[svn r57320] remove test, misc, doc, io, and code directories
that are to come from the event branch with the next commit. --HG-- branch : trunk
This commit is contained in:
parent
9ceb61056e
commit
7428eadf7d
|
@ -1 +0,0 @@
|
||||||
""" python inspection/code generation API """
|
|
|
@ -1,92 +0,0 @@
|
||||||
import py
|
|
||||||
|
|
||||||
class Code(object):
|
|
||||||
""" wrapper around Python code objects """
|
|
||||||
def __init__(self, rawcode):
|
|
||||||
rawcode = getattr(rawcode, 'im_func', rawcode)
|
|
||||||
rawcode = getattr(rawcode, 'func_code', rawcode)
|
|
||||||
self.raw = rawcode
|
|
||||||
self.filename = rawcode.co_filename
|
|
||||||
try:
|
|
||||||
self.firstlineno = rawcode.co_firstlineno - 1
|
|
||||||
except AttributeError:
|
|
||||||
raise TypeError("not a code object: %r" %(rawcode,))
|
|
||||||
self.name = rawcode.co_name
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self.raw == other.raw
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
def new(self, rec=False, **kwargs):
|
|
||||||
""" return new code object with modified attributes.
|
|
||||||
if rec-cursive is true then dive into code
|
|
||||||
objects contained in co_consts.
|
|
||||||
"""
|
|
||||||
names = [x for x in dir(self.raw) if x[:3] == 'co_']
|
|
||||||
for name in kwargs:
|
|
||||||
if name not in names:
|
|
||||||
raise TypeError("unknown code attribute: %r" %(name, ))
|
|
||||||
if rec:
|
|
||||||
newconstlist = []
|
|
||||||
co = self.raw
|
|
||||||
cotype = type(co)
|
|
||||||
for c in co.co_consts:
|
|
||||||
if isinstance(c, cotype):
|
|
||||||
c = self.__class__(c).new(rec=True, **kwargs)
|
|
||||||
newconstlist.append(c)
|
|
||||||
return self.new(rec=False, co_consts=tuple(newconstlist), **kwargs)
|
|
||||||
for name in names:
|
|
||||||
if name not in kwargs:
|
|
||||||
kwargs[name] = getattr(self.raw, name)
|
|
||||||
return py.std.new.code(
|
|
||||||
kwargs['co_argcount'],
|
|
||||||
kwargs['co_nlocals'],
|
|
||||||
kwargs['co_stacksize'],
|
|
||||||
kwargs['co_flags'],
|
|
||||||
kwargs['co_code'],
|
|
||||||
kwargs['co_consts'],
|
|
||||||
kwargs['co_names'],
|
|
||||||
kwargs['co_varnames'],
|
|
||||||
kwargs['co_filename'],
|
|
||||||
kwargs['co_name'],
|
|
||||||
kwargs['co_firstlineno'],
|
|
||||||
kwargs['co_lnotab'],
|
|
||||||
kwargs['co_freevars'],
|
|
||||||
kwargs['co_cellvars'],
|
|
||||||
)
|
|
||||||
|
|
||||||
def path(self):
|
|
||||||
""" return a py.path.local object wrapping the source of the code """
|
|
||||||
try:
|
|
||||||
return self.raw.co_filename.__path__
|
|
||||||
except AttributeError:
|
|
||||||
return py.path.local(self.raw.co_filename)
|
|
||||||
path = property(path, None, None, "path of this code object")
|
|
||||||
|
|
||||||
def fullsource(self):
|
|
||||||
""" return a py.code.Source object for the full source file of the code
|
|
||||||
"""
|
|
||||||
fn = self.raw.co_filename
|
|
||||||
try:
|
|
||||||
return fn.__source__
|
|
||||||
except AttributeError:
|
|
||||||
return py.code.Source(self.path.read(mode="rU"))
|
|
||||||
fullsource = property(fullsource, None, None,
|
|
||||||
"full source containing this code object")
|
|
||||||
|
|
||||||
def source(self):
|
|
||||||
""" return a py.code.Source object for the code object's source only
|
|
||||||
"""
|
|
||||||
# return source only for that part of code
|
|
||||||
import inspect
|
|
||||||
return py.code.Source(inspect.getsource(self.raw))
|
|
||||||
|
|
||||||
def getargs(self):
|
|
||||||
""" return a tuple with the argument names for the code object
|
|
||||||
"""
|
|
||||||
# handfull shortcut for getting args
|
|
||||||
raw = self.raw
|
|
||||||
return raw.co_varnames[:raw.co_argcount]
|
|
||||||
|
|
|
@ -1,48 +0,0 @@
|
||||||
from __future__ import generators
|
|
||||||
import sys
|
|
||||||
import py
|
|
||||||
|
|
||||||
class ExceptionInfo(object):
|
|
||||||
""" wraps sys.exc_info() objects and offers
|
|
||||||
help for navigating the traceback.
|
|
||||||
"""
|
|
||||||
_striptext = ''
|
|
||||||
def __init__(self, tup=None, exprinfo=None):
|
|
||||||
# NB. all attributes are private! Subclasses or other
|
|
||||||
# ExceptionInfo-like classes may have different attributes.
|
|
||||||
if tup is None:
|
|
||||||
tup = sys.exc_info()
|
|
||||||
if exprinfo is None and isinstance(tup[1], py.magic.AssertionError):
|
|
||||||
exprinfo = tup[1].msg
|
|
||||||
if exprinfo and exprinfo.startswith('assert '):
|
|
||||||
self._striptext = 'AssertionError: '
|
|
||||||
self._excinfo = tup
|
|
||||||
self.type, self.value, tb = self._excinfo
|
|
||||||
self.typename = self.type.__module__ + '.' + self.type.__name__
|
|
||||||
self.traceback = py.code.Traceback(tb)
|
|
||||||
|
|
||||||
def exconly(self, tryshort=False):
|
|
||||||
""" return the exception as a string
|
|
||||||
|
|
||||||
when 'tryshort' resolves to True, and the exception is a
|
|
||||||
py.magic.AssertionError, only the actual exception part of
|
|
||||||
the exception representation is returned (so 'AssertionError: ' is
|
|
||||||
removed from the beginning)
|
|
||||||
"""
|
|
||||||
lines = py.std.traceback.format_exception_only(self.type, self.value)
|
|
||||||
text = ''.join(lines)
|
|
||||||
if text.endswith('\n'):
|
|
||||||
text = text[:-1]
|
|
||||||
if tryshort:
|
|
||||||
if text.startswith(self._striptext):
|
|
||||||
text = text[len(self._striptext):]
|
|
||||||
return text
|
|
||||||
|
|
||||||
def errisinstance(self, exc):
|
|
||||||
""" return True if the exception is an instance of exc """
|
|
||||||
return isinstance(self.value, exc)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
# XXX wrong str
|
|
||||||
return self.exconly()
|
|
||||||
|
|
|
@ -1,55 +0,0 @@
|
||||||
import py
|
|
||||||
import py.__.code.safe_repr
|
|
||||||
|
|
||||||
class Frame(object):
|
|
||||||
"""Wrapper around a Python frame holding f_locals and f_globals
|
|
||||||
in which expressions can be evaluated."""
|
|
||||||
|
|
||||||
def __init__(self, frame):
|
|
||||||
self.code = py.code.Code(frame.f_code)
|
|
||||||
self.lineno = frame.f_lineno - 1
|
|
||||||
self.f_globals = frame.f_globals
|
|
||||||
self.f_locals = frame.f_locals
|
|
||||||
self.raw = frame
|
|
||||||
|
|
||||||
def statement(self):
|
|
||||||
return self.code.fullsource.getstatement(self.lineno)
|
|
||||||
statement = property(statement, None, None,
|
|
||||||
"statement this frame is at")
|
|
||||||
|
|
||||||
def eval(self, code, **vars):
|
|
||||||
""" evaluate 'code' in the frame
|
|
||||||
|
|
||||||
'vars' are optional additional local variables
|
|
||||||
|
|
||||||
returns the result of the evaluation
|
|
||||||
"""
|
|
||||||
f_locals = self.f_locals.copy()
|
|
||||||
f_locals.update(vars)
|
|
||||||
return eval(code, self.f_globals, f_locals)
|
|
||||||
|
|
||||||
def exec_(self, code, **vars):
|
|
||||||
""" exec 'code' in the frame
|
|
||||||
|
|
||||||
'vars' are optiona; additional local variables
|
|
||||||
"""
|
|
||||||
f_locals = self.f_locals.copy()
|
|
||||||
f_locals.update(vars)
|
|
||||||
exec code in self.f_globals, f_locals
|
|
||||||
|
|
||||||
def repr(self, object):
|
|
||||||
""" return a 'safe' (non-recursive, one-line) string repr for 'object'
|
|
||||||
"""
|
|
||||||
return py.__.code.safe_repr._repr(object)
|
|
||||||
|
|
||||||
def is_true(self, object):
|
|
||||||
return object
|
|
||||||
|
|
||||||
def getargs(self):
|
|
||||||
""" return a list of tuples (name, value) for all arguments
|
|
||||||
"""
|
|
||||||
retval = []
|
|
||||||
for arg in self.code.getargs():
|
|
||||||
retval.append((arg, self.f_locals[arg]))
|
|
||||||
return retval
|
|
||||||
|
|
|
@ -1,63 +0,0 @@
|
||||||
"""Defines a safe repr function. This will always return a string of "reasonable" length
|
|
||||||
no matter what the object does in it's own repr function. Let's examine what can go wrong
|
|
||||||
in an arbitrary repr function.
|
|
||||||
The default repr will return something like (on Win32 anyway):
|
|
||||||
<foo.bar object at 0x008D5650>. Well behaved user-defined repr() methods will do similar.
|
|
||||||
The usual expectation is that repr will return a single line string.
|
|
||||||
|
|
||||||
1. However, the repr method can raise an exception of an arbitrary type.
|
|
||||||
|
|
||||||
Also, the return value may not be as expected:
|
|
||||||
2. The return value may not be a string!
|
|
||||||
3. The return value may not be a single line string, it may contain line breaks.
|
|
||||||
4. The method may enter a loop and never return.
|
|
||||||
5. The return value may be enormous, eg range(100000)
|
|
||||||
|
|
||||||
The standard library has a nice implementation in the repr module that will do the job,
|
|
||||||
but the exception
|
|
||||||
handling is silent, so the the output contains no clue that repr() call raised an
|
|
||||||
exception. I would like to be told if repr raises an exception, it's a serious error, so
|
|
||||||
a sublass of repr overrides the method that does repr for class instances."""
|
|
||||||
|
|
||||||
|
|
||||||
import repr
|
|
||||||
import __builtin__
|
|
||||||
|
|
||||||
|
|
||||||
class SafeRepr(repr.Repr):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
repr.Repr.__init__(self, *args, **kwargs)
|
|
||||||
# Do we need a commandline switch for this?
|
|
||||||
self.maxstring = 240 # 3 * 80 chars
|
|
||||||
self.maxother = 160 # 2 * 80 chars
|
|
||||||
def repr_instance(self, x, level):
|
|
||||||
try:
|
|
||||||
# Try the vanilla repr and make sure that the result is a string
|
|
||||||
s = str(__builtin__.repr(x))
|
|
||||||
except (KeyboardInterrupt, MemoryError, SystemExit):
|
|
||||||
raise
|
|
||||||
except Exception ,e:
|
|
||||||
try:
|
|
||||||
exc_name = e.__class__.__name__
|
|
||||||
except:
|
|
||||||
exc_name = 'unknown'
|
|
||||||
try:
|
|
||||||
exc_info = str(e)
|
|
||||||
except:
|
|
||||||
exc_info = 'unknown'
|
|
||||||
return '<[%s("%s") raised in repr()] %s object at 0x%x>' % \
|
|
||||||
(exc_name, exc_info, x.__class__.__name__, id(x))
|
|
||||||
except:
|
|
||||||
try:
|
|
||||||
name = x.__class__.__name__
|
|
||||||
except:
|
|
||||||
name = 'unknown'
|
|
||||||
return '<[unknown exception raised in repr()] %s object at 0x%x>' % \
|
|
||||||
(name, id(x))
|
|
||||||
if len(s) > self.maxstring:
|
|
||||||
i = max(0, (self.maxstring-3)//2)
|
|
||||||
j = max(0, self.maxstring-3-i)
|
|
||||||
s = s[:i] + '...' + s[len(s)-j:]
|
|
||||||
return s
|
|
||||||
|
|
||||||
_repr = SafeRepr().repr
|
|
|
@ -1,282 +0,0 @@
|
||||||
from __future__ import generators
|
|
||||||
import sys
|
|
||||||
import inspect, tokenize
|
|
||||||
import py
|
|
||||||
cpy_compile = compile
|
|
||||||
|
|
||||||
# DON'T IMPORT PY HERE
|
|
||||||
|
|
||||||
class Source(object):
|
|
||||||
""" a mutable object holding a source code fragment,
|
|
||||||
possibly deindenting it.
|
|
||||||
"""
|
|
||||||
def __init__(self, *parts, **kwargs):
|
|
||||||
self.lines = lines = []
|
|
||||||
de = kwargs.get('deindent', True)
|
|
||||||
rstrip = kwargs.get('rstrip', True)
|
|
||||||
for part in parts:
|
|
||||||
if not part:
|
|
||||||
partlines = []
|
|
||||||
if isinstance(part, Source):
|
|
||||||
partlines = part.lines
|
|
||||||
elif isinstance(part, (unicode, str)):
|
|
||||||
partlines = part.split('\n')
|
|
||||||
if rstrip:
|
|
||||||
while partlines:
|
|
||||||
if partlines[-1].strip():
|
|
||||||
break
|
|
||||||
partlines.pop()
|
|
||||||
else:
|
|
||||||
partlines = getsource(part, deindent=de).lines
|
|
||||||
if de:
|
|
||||||
partlines = deindent(partlines)
|
|
||||||
lines.extend(partlines)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
try:
|
|
||||||
return self.lines == other.lines
|
|
||||||
except AttributeError:
|
|
||||||
if isinstance(other, str):
|
|
||||||
return str(self) == other
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
if isinstance(key, int):
|
|
||||||
return self.lines[key]
|
|
||||||
else:
|
|
||||||
if key.step not in (None, 1):
|
|
||||||
raise IndexError("cannot slice a Source with a step")
|
|
||||||
return self.__getslice__(key.start, key.stop)
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(self.lines)
|
|
||||||
|
|
||||||
def __getslice__(self, start, end):
|
|
||||||
newsource = Source()
|
|
||||||
newsource.lines = self.lines[start:end]
|
|
||||||
return newsource
|
|
||||||
|
|
||||||
def strip(self):
|
|
||||||
""" return new source object with trailing
|
|
||||||
and leading blank lines removed.
|
|
||||||
"""
|
|
||||||
start, end = 0, len(self)
|
|
||||||
while start < end and not self.lines[start].strip():
|
|
||||||
start += 1
|
|
||||||
while end > start and not self.lines[end-1].strip():
|
|
||||||
end -= 1
|
|
||||||
source = Source()
|
|
||||||
source.lines[:] = self.lines[start:end]
|
|
||||||
return source
|
|
||||||
|
|
||||||
def putaround(self, before='', after='', indent=' ' * 4):
|
|
||||||
""" return a copy of the source object with
|
|
||||||
'before' and 'after' wrapped around it.
|
|
||||||
"""
|
|
||||||
before = Source(before)
|
|
||||||
after = Source(after)
|
|
||||||
newsource = Source()
|
|
||||||
lines = [ (indent + line) for line in self.lines]
|
|
||||||
newsource.lines = before.lines + lines + after.lines
|
|
||||||
return newsource
|
|
||||||
|
|
||||||
def indent(self, indent=' ' * 4):
|
|
||||||
""" return a copy of the source object with
|
|
||||||
all lines indented by the given indent-string.
|
|
||||||
"""
|
|
||||||
newsource = Source()
|
|
||||||
newsource.lines = [(indent+line) for line in self.lines]
|
|
||||||
return newsource
|
|
||||||
|
|
||||||
def getstatement(self, lineno):
|
|
||||||
""" return Source statement which contains the
|
|
||||||
given linenumber (counted from 0).
|
|
||||||
"""
|
|
||||||
start, end = self.getstatementrange(lineno)
|
|
||||||
return self[start:end]
|
|
||||||
|
|
||||||
def getstatementrange(self, lineno):
|
|
||||||
""" return (start, end) tuple which spans the minimal
|
|
||||||
statement region which containing the given lineno.
|
|
||||||
"""
|
|
||||||
# XXX there must be a better than these heuristic ways ...
|
|
||||||
# XXX there may even be better heuristics :-)
|
|
||||||
if not (0 <= lineno < len(self)):
|
|
||||||
raise IndexError("lineno out of range")
|
|
||||||
|
|
||||||
# 1. find the start of the statement
|
|
||||||
from codeop import compile_command
|
|
||||||
for start in range(lineno, -1, -1):
|
|
||||||
trylines = self.lines[start:lineno+1]
|
|
||||||
# quick hack to indent the source and get it as a string in one go
|
|
||||||
trylines.insert(0, 'def xxx():')
|
|
||||||
trysource = '\n '.join(trylines)
|
|
||||||
# ^ space here
|
|
||||||
try:
|
|
||||||
compile_command(trysource)
|
|
||||||
except (SyntaxError, OverflowError, ValueError):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
break # got a valid or incomplete statement
|
|
||||||
|
|
||||||
# 2. find the end of the statement
|
|
||||||
for end in range(lineno+1, len(self)+1):
|
|
||||||
trysource = self[start:end]
|
|
||||||
if trysource.isparseable():
|
|
||||||
break
|
|
||||||
|
|
||||||
return start, end
|
|
||||||
|
|
||||||
def getblockend(self, lineno):
|
|
||||||
# XXX
|
|
||||||
lines = [x + '\n' for x in self.lines[lineno:]]
|
|
||||||
blocklines = inspect.getblock(lines)
|
|
||||||
#print blocklines
|
|
||||||
return lineno + len(blocklines) - 1
|
|
||||||
|
|
||||||
def deindent(self, offset=None):
|
|
||||||
""" return a new source object deindented by offset.
|
|
||||||
If offset is None then guess an indentation offset from
|
|
||||||
the first non-blank line. Subsequent lines which have a
|
|
||||||
lower indentation offset will be copied verbatim as
|
|
||||||
they are assumed to be part of multilines.
|
|
||||||
"""
|
|
||||||
# XXX maybe use the tokenizer to properly handle multiline
|
|
||||||
# strings etc.pp?
|
|
||||||
newsource = Source()
|
|
||||||
newsource.lines[:] = deindent(self.lines, offset)
|
|
||||||
return newsource
|
|
||||||
|
|
||||||
def isparseable(self, deindent=True):
|
|
||||||
""" return True if source is parseable, heuristically
|
|
||||||
deindenting it by default.
|
|
||||||
"""
|
|
||||||
import parser
|
|
||||||
if deindent:
|
|
||||||
source = str(self.deindent())
|
|
||||||
else:
|
|
||||||
source = str(self)
|
|
||||||
try:
|
|
||||||
parser.suite(source+'\n')
|
|
||||||
except (parser.ParserError, SyntaxError):
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return "\n".join(self.lines)
|
|
||||||
|
|
||||||
def compile(self, filename=None, mode='exec',
|
|
||||||
flag=generators.compiler_flag, dont_inherit=0):
|
|
||||||
""" return compiled code object. if filename is None
|
|
||||||
invent an artificial filename which displays
|
|
||||||
the source/line position of the caller frame.
|
|
||||||
"""
|
|
||||||
if not filename or py.path.local(filename).check(file=0):
|
|
||||||
frame = sys._getframe(1) # the caller
|
|
||||||
filename = '%s<%s:%d>' % (filename, frame.f_code.co_filename,
|
|
||||||
frame.f_lineno)
|
|
||||||
source = "\n".join(self.lines) + '\n'
|
|
||||||
try:
|
|
||||||
co = cpy_compile(source, filename, mode, flag)
|
|
||||||
except SyntaxError, ex:
|
|
||||||
# re-represent syntax errors from parsing python strings
|
|
||||||
msglines = self.lines[:ex.lineno]
|
|
||||||
if ex.offset:
|
|
||||||
msglines.append(" "*ex.offset + '^')
|
|
||||||
msglines.append("syntax error probably generated here: %s" % filename)
|
|
||||||
newex = SyntaxError('\n'.join(msglines))
|
|
||||||
newex.offset = ex.offset
|
|
||||||
newex.lineno = ex.lineno
|
|
||||||
newex.text = ex.text
|
|
||||||
raise newex
|
|
||||||
else:
|
|
||||||
co_filename = MyStr(filename)
|
|
||||||
co_filename.__source__ = self
|
|
||||||
return py.code.Code(co).new(rec=1, co_filename=co_filename)
|
|
||||||
#return newcode_withfilename(co, co_filename)
|
|
||||||
|
|
||||||
#
|
|
||||||
# public API shortcut functions
|
|
||||||
#
|
|
||||||
|
|
||||||
def compile_(source, filename=None, mode='exec', flags=
|
|
||||||
generators.compiler_flag, dont_inherit=0):
|
|
||||||
""" compile the given source to a raw code object,
|
|
||||||
which points back to the source code through
|
|
||||||
"co_filename.__source__". All code objects
|
|
||||||
contained in the code object will recursively
|
|
||||||
also have this special subclass-of-string
|
|
||||||
filename.
|
|
||||||
"""
|
|
||||||
s = Source(source)
|
|
||||||
co = s.compile(filename, mode, flags)
|
|
||||||
return co
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# various helper functions
|
|
||||||
#
|
|
||||||
class MyStr(str):
|
|
||||||
""" custom string which allows to add attributes. """
|
|
||||||
|
|
||||||
def getsource(obj, **kwargs):
|
|
||||||
if hasattr(obj, 'func_code'):
|
|
||||||
obj = obj.func_code
|
|
||||||
elif hasattr(obj, 'f_code'):
|
|
||||||
obj = obj.f_code
|
|
||||||
try:
|
|
||||||
fullsource = obj.co_filename.__source__
|
|
||||||
except AttributeError:
|
|
||||||
try:
|
|
||||||
strsrc = inspect.getsource(obj)
|
|
||||||
except IndentationError:
|
|
||||||
strsrc = "\"Buggy python version consider upgrading, cannot get source\""
|
|
||||||
assert isinstance(strsrc, str)
|
|
||||||
return Source(strsrc, **kwargs)
|
|
||||||
else:
|
|
||||||
lineno = obj.co_firstlineno - 1
|
|
||||||
end = fullsource.getblockend(lineno)
|
|
||||||
return fullsource[lineno:end+1]
|
|
||||||
|
|
||||||
|
|
||||||
def deindent(lines, offset=None):
|
|
||||||
if offset is None:
|
|
||||||
for line in lines:
|
|
||||||
line = line.expandtabs()
|
|
||||||
s = line.lstrip()
|
|
||||||
if s:
|
|
||||||
offset = len(line)-len(s)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
offset = 0
|
|
||||||
if offset == 0:
|
|
||||||
return list(lines)
|
|
||||||
newlines = []
|
|
||||||
def readline_generator(lines):
|
|
||||||
for line in lines:
|
|
||||||
yield line + '\n'
|
|
||||||
while True:
|
|
||||||
yield ''
|
|
||||||
|
|
||||||
readline = readline_generator(lines).next
|
|
||||||
|
|
||||||
try:
|
|
||||||
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(readline):
|
|
||||||
if sline > len(lines):
|
|
||||||
break # End of input reached
|
|
||||||
if sline > len(newlines):
|
|
||||||
line = lines[sline - 1].expandtabs()
|
|
||||||
if line.lstrip() and line[:offset].isspace():
|
|
||||||
line = line[offset:] # Deindent
|
|
||||||
newlines.append(line)
|
|
||||||
|
|
||||||
for i in range(sline, eline):
|
|
||||||
# Don't deindent continuing lines of
|
|
||||||
# multiline tokens (i.e. multiline strings)
|
|
||||||
newlines.append(lines[i])
|
|
||||||
except (IndentationError, tokenize.TokenError):
|
|
||||||
pass
|
|
||||||
# Add any lines we didn't see. E.g. if an exception was raised.
|
|
||||||
newlines.extend(lines[len(newlines):])
|
|
||||||
return newlines
|
|
|
@ -1 +0,0 @@
|
||||||
#
|
|
|
@ -1,57 +0,0 @@
|
||||||
from __future__ import generators
|
|
||||||
import py
|
|
||||||
|
|
||||||
def test_newcode():
|
|
||||||
source = "i = 3"
|
|
||||||
co = compile(source, '', 'exec')
|
|
||||||
code = py.code.Code(co)
|
|
||||||
newco = code.new()
|
|
||||||
assert co == newco
|
|
||||||
|
|
||||||
def test_ne():
|
|
||||||
code1 = py.code.Code(compile('foo = "bar"', '', 'exec'))
|
|
||||||
assert code1 == code1
|
|
||||||
code2 = py.code.Code(compile('foo = "baz"', '', 'exec'))
|
|
||||||
assert code2 != code1
|
|
||||||
|
|
||||||
def test_newcode_unknown_args():
|
|
||||||
code = py.code.Code(compile("", '', 'exec'))
|
|
||||||
py.test.raises(TypeError, 'code.new(filename="hello")')
|
|
||||||
|
|
||||||
def test_newcode_withfilename():
|
|
||||||
source = py.code.Source("""
|
|
||||||
def f():
|
|
||||||
def g():
|
|
||||||
pass
|
|
||||||
""")
|
|
||||||
co = compile(str(source)+'\n', 'nada', 'exec')
|
|
||||||
obj = 'hello'
|
|
||||||
newco = py.code.Code(co).new(rec=True, co_filename=obj)
|
|
||||||
def walkcode(co):
|
|
||||||
for x in co.co_consts:
|
|
||||||
if isinstance(x, type(co)):
|
|
||||||
for y in walkcode(x):
|
|
||||||
yield y
|
|
||||||
yield co
|
|
||||||
|
|
||||||
names = []
|
|
||||||
for code in walkcode(newco):
|
|
||||||
assert newco.co_filename == obj
|
|
||||||
assert newco.co_filename is obj
|
|
||||||
names.append(code.co_name)
|
|
||||||
assert 'f' in names
|
|
||||||
assert 'g' in names
|
|
||||||
|
|
||||||
def test_newcode_with_filename():
|
|
||||||
source = "i = 3"
|
|
||||||
co = compile(source, '', 'exec')
|
|
||||||
code = py.code.Code(co)
|
|
||||||
class MyStr(str):
|
|
||||||
pass
|
|
||||||
filename = MyStr("hello")
|
|
||||||
filename.__source__ = py.code.Source(source)
|
|
||||||
newco = code.new(rec=True, co_filename=filename)
|
|
||||||
assert newco.co_filename is filename
|
|
||||||
s = py.code.Source(newco)
|
|
||||||
assert str(s) == source
|
|
||||||
|
|
|
@ -1,16 +0,0 @@
|
||||||
|
|
||||||
import new
|
|
||||||
|
|
||||||
def test_new_code_object_carries_filename_through():
|
|
||||||
class mystr(str):
|
|
||||||
pass
|
|
||||||
filename = mystr("dummy")
|
|
||||||
co = compile("hello\n", filename, 'exec')
|
|
||||||
assert not isinstance(co.co_filename, mystr)
|
|
||||||
c2 = new.code(co.co_argcount, co.co_nlocals, co.co_stacksize,
|
|
||||||
co.co_flags, co.co_code, co.co_consts,
|
|
||||||
co.co_names, co.co_varnames,
|
|
||||||
filename,
|
|
||||||
co.co_name, co.co_firstlineno, co.co_lnotab,
|
|
||||||
co.co_freevars, co.co_cellvars)
|
|
||||||
assert c2.co_filename is filename
|
|
|
@ -1,208 +0,0 @@
|
||||||
import py
|
|
||||||
mypath = py.magic.autopath()
|
|
||||||
|
|
||||||
def test_excinfo_simple():
|
|
||||||
try:
|
|
||||||
raise ValueError
|
|
||||||
except ValueError:
|
|
||||||
info = py.code.ExceptionInfo()
|
|
||||||
assert info.type == ValueError
|
|
||||||
|
|
||||||
def test_excinfo_getstatement():
|
|
||||||
def g():
|
|
||||||
raise ValueError
|
|
||||||
def f():
|
|
||||||
g()
|
|
||||||
try:
|
|
||||||
f()
|
|
||||||
except ValueError:
|
|
||||||
excinfo = py.code.ExceptionInfo()
|
|
||||||
linenumbers = [f.func_code.co_firstlineno-1+3,
|
|
||||||
f.func_code.co_firstlineno-1+1,
|
|
||||||
g.func_code.co_firstlineno-1+1,]
|
|
||||||
l = list(excinfo.traceback)
|
|
||||||
foundlinenumbers = [x.lineno for x in l]
|
|
||||||
print l[0].frame.statement
|
|
||||||
assert foundlinenumbers == linenumbers
|
|
||||||
#for x in info:
|
|
||||||
# print "%s:%d %s" %(x.path.relto(root), x.lineno, x.statement)
|
|
||||||
#xxx
|
|
||||||
|
|
||||||
# testchain for getentries test below
|
|
||||||
def f():
|
|
||||||
#
|
|
||||||
raise ValueError
|
|
||||||
#
|
|
||||||
def g():
|
|
||||||
#
|
|
||||||
__tracebackhide__ = True
|
|
||||||
f()
|
|
||||||
#
|
|
||||||
def h():
|
|
||||||
#
|
|
||||||
g()
|
|
||||||
#
|
|
||||||
|
|
||||||
class TestTraceback_f_g_h:
|
|
||||||
def setup_method(self, method):
|
|
||||||
try:
|
|
||||||
h()
|
|
||||||
except ValueError:
|
|
||||||
self.excinfo = py.code.ExceptionInfo()
|
|
||||||
|
|
||||||
def test_traceback_entries(self):
|
|
||||||
tb = self.excinfo.traceback
|
|
||||||
entries = list(tb)
|
|
||||||
assert len(tb) == 4 # maybe fragile test
|
|
||||||
assert len(entries) == 4 # maybe fragile test
|
|
||||||
names = ['f', 'g', 'h']
|
|
||||||
for entry in entries:
|
|
||||||
try:
|
|
||||||
names.remove(entry.frame.code.name)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
assert not names
|
|
||||||
|
|
||||||
def test_traceback_entry_getsource(self):
|
|
||||||
tb = self.excinfo.traceback
|
|
||||||
s = str(tb[-1].getsource() )
|
|
||||||
assert s.startswith("def f():")
|
|
||||||
assert s.endswith("raise ValueError")
|
|
||||||
|
|
||||||
def test_traceback_entry_getsource_in_construct(self):
|
|
||||||
source = py.code.Source("""\
|
|
||||||
def xyz():
|
|
||||||
try:
|
|
||||||
raise ValueError
|
|
||||||
except somenoname:
|
|
||||||
pass
|
|
||||||
xyz()
|
|
||||||
""")
|
|
||||||
try:
|
|
||||||
exec source.compile()
|
|
||||||
except NameError:
|
|
||||||
tb = py.code.ExceptionInfo().traceback
|
|
||||||
print tb[-1].getsource()
|
|
||||||
s = str(tb[-1].getsource())
|
|
||||||
assert s.startswith("def xyz():\n try:")
|
|
||||||
assert s.endswith("except somenoname:")
|
|
||||||
|
|
||||||
def test_traceback_cut(self):
|
|
||||||
co = py.code.Code(f)
|
|
||||||
path, firstlineno = co.path, co.firstlineno
|
|
||||||
traceback = self.excinfo.traceback
|
|
||||||
newtraceback = traceback.cut(path=path, firstlineno=firstlineno)
|
|
||||||
assert len(newtraceback) == 1
|
|
||||||
newtraceback = traceback.cut(path=path, lineno=firstlineno+2)
|
|
||||||
assert len(newtraceback) == 1
|
|
||||||
|
|
||||||
def test_traceback_filter(self):
|
|
||||||
traceback = self.excinfo.traceback
|
|
||||||
ntraceback = traceback.filter()
|
|
||||||
assert len(ntraceback) == len(traceback) - 1
|
|
||||||
|
|
||||||
def test_traceback_recursion_index(self):
|
|
||||||
def f(n):
|
|
||||||
if n < 10:
|
|
||||||
n += 1
|
|
||||||
f(n)
|
|
||||||
excinfo = py.test.raises(RuntimeError, f, 8)
|
|
||||||
traceback = excinfo.traceback
|
|
||||||
recindex = traceback.recursionindex()
|
|
||||||
assert recindex == 3
|
|
||||||
|
|
||||||
def test_traceback_no_recursion_index(self):
|
|
||||||
def do_stuff():
|
|
||||||
raise RuntimeError
|
|
||||||
def reraise_me():
|
|
||||||
import sys
|
|
||||||
exc, val, tb = sys.exc_info()
|
|
||||||
raise exc, val, tb
|
|
||||||
def f(n):
|
|
||||||
try:
|
|
||||||
do_stuff()
|
|
||||||
except:
|
|
||||||
reraise_me()
|
|
||||||
excinfo = py.test.raises(RuntimeError, f, 8)
|
|
||||||
traceback = excinfo.traceback
|
|
||||||
recindex = traceback.recursionindex()
|
|
||||||
assert recindex is None
|
|
||||||
|
|
||||||
def test_traceback_getcrashentry(self):
|
|
||||||
def i():
|
|
||||||
__tracebackhide__ = True
|
|
||||||
raise ValueError
|
|
||||||
def h():
|
|
||||||
i()
|
|
||||||
def g():
|
|
||||||
__tracebackhide__ = True
|
|
||||||
h()
|
|
||||||
def f():
|
|
||||||
g()
|
|
||||||
|
|
||||||
excinfo = py.test.raises(ValueError, f)
|
|
||||||
tb = excinfo.traceback
|
|
||||||
entry = tb.getcrashentry()
|
|
||||||
co = py.code.Code(h)
|
|
||||||
assert entry.frame.code.path == co.path
|
|
||||||
assert entry.lineno == co.firstlineno + 1
|
|
||||||
assert entry.frame.code.name == 'h'
|
|
||||||
|
|
||||||
def test_traceback_getcrashentry_empty(self):
|
|
||||||
def g():
|
|
||||||
__tracebackhide__ = True
|
|
||||||
raise ValueError
|
|
||||||
def f():
|
|
||||||
__tracebackhide__ = True
|
|
||||||
g()
|
|
||||||
|
|
||||||
excinfo = py.test.raises(ValueError, f)
|
|
||||||
tb = excinfo.traceback
|
|
||||||
entry = tb.getcrashentry()
|
|
||||||
co = py.code.Code(g)
|
|
||||||
assert entry.frame.code.path == co.path
|
|
||||||
assert entry.lineno == co.firstlineno + 2
|
|
||||||
assert entry.frame.code.name == 'g'
|
|
||||||
|
|
||||||
#def test_traceback_display_func(self):
|
|
||||||
# tb = self.excinfo.traceback
|
|
||||||
# for x in tb:
|
|
||||||
# x.setdisplay(lambda entry: entry.frame.code.name + '\n')
|
|
||||||
## l = tb.display().rstrip().split('\n')
|
|
||||||
# assert l == ['setup_method', 'h', 'g', 'f']
|
|
||||||
|
|
||||||
|
|
||||||
def hello(x):
|
|
||||||
x + 5
|
|
||||||
|
|
||||||
def test_tbentry_reinterpret():
|
|
||||||
try:
|
|
||||||
hello("hello")
|
|
||||||
except TypeError:
|
|
||||||
excinfo = py.code.ExceptionInfo()
|
|
||||||
tbentry = excinfo.traceback[-1]
|
|
||||||
msg = tbentry.reinterpret()
|
|
||||||
assert msg.startswith("TypeError: ('hello' + 5)")
|
|
||||||
|
|
||||||
#def test_excinfo_getentries_type_error():
|
|
||||||
# excinfo = py.test.raises(ValueError, h)
|
|
||||||
# entries = excinfo.getentries(
|
|
||||||
# lambda x: x.frame.code.name != 'raises',
|
|
||||||
# lambda x: x.frame.code.name != 'f')
|
|
||||||
# names = [x.frame.code.name for x in entries]
|
|
||||||
# assert names == ['h','g']
|
|
||||||
|
|
||||||
def test_excinfo_exconly():
|
|
||||||
excinfo = py.test.raises(ValueError, h)
|
|
||||||
assert excinfo.exconly().startswith('ValueError')
|
|
||||||
|
|
||||||
def test_excinfo_errisinstance():
|
|
||||||
excinfo = py.test.raises(ValueError, h)
|
|
||||||
assert excinfo.errisinstance(ValueError)
|
|
||||||
|
|
||||||
def test_excinfo_no_sourcecode():
|
|
||||||
try:
|
|
||||||
exec "raise ValueError()"
|
|
||||||
except ValueError:
|
|
||||||
excinfo = py.code.ExceptionInfo()
|
|
||||||
s = str(excinfo.traceback[-1])
|
|
|
@ -1,15 +0,0 @@
|
||||||
import sys
|
|
||||||
import py
|
|
||||||
|
|
||||||
def test_frame_getsourcelineno_myself():
|
|
||||||
def func():
|
|
||||||
return sys._getframe(0)
|
|
||||||
f = func()
|
|
||||||
f = py.code.Frame(f)
|
|
||||||
source, lineno = f.code.fullsource, f.lineno
|
|
||||||
assert source[lineno].startswith(" return sys._getframe(0)")
|
|
||||||
|
|
||||||
def test_code_from_func():
|
|
||||||
co = py.code.Code(test_frame_getsourcelineno_myself)
|
|
||||||
assert co.firstlineno
|
|
||||||
assert co.path
|
|
|
@ -1,34 +0,0 @@
|
||||||
|
|
||||||
import py
|
|
||||||
from py.__.code import safe_repr
|
|
||||||
|
|
||||||
def test_simple_repr():
|
|
||||||
assert safe_repr._repr(1) == '1'
|
|
||||||
assert safe_repr._repr(None) == 'None'
|
|
||||||
|
|
||||||
class BrokenRepr:
|
|
||||||
def __init__(self, ex):
|
|
||||||
self.ex = ex
|
|
||||||
foo = 0
|
|
||||||
def __repr__(self):
|
|
||||||
raise self.ex
|
|
||||||
|
|
||||||
def test_exception():
|
|
||||||
assert 'Exception' in safe_repr._repr(BrokenRepr(Exception("broken")))
|
|
||||||
|
|
||||||
class BrokenReprException(Exception):
|
|
||||||
__str__ = None
|
|
||||||
__repr__ = None
|
|
||||||
|
|
||||||
def test_broken_exception():
|
|
||||||
assert 'Exception' in safe_repr._repr(BrokenRepr(BrokenReprException("really broken")))
|
|
||||||
|
|
||||||
def test_string_exception():
|
|
||||||
assert 'unknown' in safe_repr._repr(BrokenRepr("string"))
|
|
||||||
|
|
||||||
def test_big_repr():
|
|
||||||
assert len(safe_repr._repr(range(1000))) <= \
|
|
||||||
len('[' + safe_repr.SafeRepr().maxlist * "1000" + ']')
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,298 +0,0 @@
|
||||||
from py.code import Source
|
|
||||||
import py
|
|
||||||
import sys
|
|
||||||
|
|
||||||
def test_source_str_function():
|
|
||||||
x = Source("3")
|
|
||||||
assert str(x) == "3"
|
|
||||||
|
|
||||||
x = Source(" 3")
|
|
||||||
assert str(x) == "3"
|
|
||||||
|
|
||||||
x = Source("""
|
|
||||||
3
|
|
||||||
""", rstrip=False)
|
|
||||||
assert str(x) == "\n3\n "
|
|
||||||
|
|
||||||
x = Source("""
|
|
||||||
3
|
|
||||||
""", rstrip=True)
|
|
||||||
assert str(x) == "\n3"
|
|
||||||
|
|
||||||
def test_unicode():
|
|
||||||
x = Source(unicode("4"))
|
|
||||||
assert str(x) == "4"
|
|
||||||
|
|
||||||
|
|
||||||
def test_source_from_function():
|
|
||||||
source = py.code.Source(test_source_str_function)
|
|
||||||
assert str(source).startswith('def test_source_str_function():')
|
|
||||||
|
|
||||||
def test_source_from_inner_function():
|
|
||||||
def f():
|
|
||||||
pass
|
|
||||||
source = py.code.Source(f, deindent=False)
|
|
||||||
assert str(source).startswith(' def f():')
|
|
||||||
source = py.code.Source(f)
|
|
||||||
assert str(source).startswith('def f():')
|
|
||||||
|
|
||||||
def test_source_putaround_simple():
|
|
||||||
source = Source("raise ValueError")
|
|
||||||
source = source.putaround(
|
|
||||||
"try:", """\
|
|
||||||
except ValueError:
|
|
||||||
x = 42
|
|
||||||
else:
|
|
||||||
x = 23""")
|
|
||||||
assert str(source)=="""\
|
|
||||||
try:
|
|
||||||
raise ValueError
|
|
||||||
except ValueError:
|
|
||||||
x = 42
|
|
||||||
else:
|
|
||||||
x = 23"""
|
|
||||||
|
|
||||||
def test_source_putaround():
|
|
||||||
source = Source()
|
|
||||||
source = source.putaround("""
|
|
||||||
if 1:
|
|
||||||
x=1
|
|
||||||
""")
|
|
||||||
assert str(source).strip() == "if 1:\n x=1"
|
|
||||||
|
|
||||||
def test_source_strips():
|
|
||||||
source = Source("")
|
|
||||||
assert source == Source()
|
|
||||||
assert str(source) == ''
|
|
||||||
assert source.strip() == source
|
|
||||||
|
|
||||||
def test_source_strip_multiline():
|
|
||||||
source = Source()
|
|
||||||
source.lines = ["", " hello", " "]
|
|
||||||
source2 = source.strip()
|
|
||||||
assert source2.lines == [" hello"]
|
|
||||||
|
|
||||||
def test_syntaxerror_rerepresentation():
|
|
||||||
ex = py.test.raises(SyntaxError, py.code.compile, 'x x')
|
|
||||||
assert ex.value.lineno == 1
|
|
||||||
assert ex.value.offset == 3
|
|
||||||
assert ex.value.text.strip(), 'x x'
|
|
||||||
|
|
||||||
def test_isparseable():
|
|
||||||
assert Source("hello").isparseable()
|
|
||||||
assert Source("if 1:\n pass").isparseable()
|
|
||||||
assert Source(" \nif 1:\n pass").isparseable()
|
|
||||||
assert not Source("if 1:\n").isparseable()
|
|
||||||
assert not Source(" \nif 1:\npass").isparseable()
|
|
||||||
|
|
||||||
class TestAccesses:
|
|
||||||
source = Source("""\
|
|
||||||
def f(x):
|
|
||||||
pass
|
|
||||||
def g(x):
|
|
||||||
pass
|
|
||||||
""")
|
|
||||||
def test_getrange(self):
|
|
||||||
x = self.source[0:2]
|
|
||||||
assert x.isparseable()
|
|
||||||
assert len(x.lines) == 2
|
|
||||||
assert str(x) == "def f(x):\n pass"
|
|
||||||
|
|
||||||
def test_getline(self):
|
|
||||||
x = self.source[0]
|
|
||||||
assert x == "def f(x):"
|
|
||||||
|
|
||||||
def test_len(self):
|
|
||||||
assert len(self.source) == 4
|
|
||||||
|
|
||||||
def test_iter(self):
|
|
||||||
l = [x for x in self.source]
|
|
||||||
assert len(l) == 4
|
|
||||||
|
|
||||||
class TestSourceParsingAndCompiling:
|
|
||||||
source = Source("""\
|
|
||||||
def f(x):
|
|
||||||
assert (x ==
|
|
||||||
3 +
|
|
||||||
4)
|
|
||||||
""").strip()
|
|
||||||
|
|
||||||
def test_compile(self):
|
|
||||||
co = py.code.compile("x=3")
|
|
||||||
exec co
|
|
||||||
assert x == 3
|
|
||||||
|
|
||||||
def test_compile_unicode(self):
|
|
||||||
co = py.code.compile(unicode('u"\xc3\xa5"', 'utf8'), mode='eval')
|
|
||||||
val = eval(co)
|
|
||||||
assert isinstance(val, unicode)
|
|
||||||
|
|
||||||
def test_compile_and_getsource_simple(self):
|
|
||||||
co = py.code.compile("x=3")
|
|
||||||
exec co
|
|
||||||
source = py.code.Source(co)
|
|
||||||
assert str(source) == "x=3"
|
|
||||||
|
|
||||||
def test_getstatement(self):
|
|
||||||
#print str(self.source)
|
|
||||||
ass = str(self.source[1:])
|
|
||||||
for i in range(1, 4):
|
|
||||||
#print "trying start in line %r" % self.source[i]
|
|
||||||
s = self.source.getstatement(i)
|
|
||||||
#x = s.deindent()
|
|
||||||
assert str(s) == ass
|
|
||||||
|
|
||||||
def test_getstatementrange_within_constructs(self):
|
|
||||||
source = Source("""\
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
raise ValueError
|
|
||||||
except SomeThing:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
42
|
|
||||||
""")
|
|
||||||
assert len(source) == 7
|
|
||||||
assert source.getstatementrange(0) == (0, 7)
|
|
||||||
assert source.getstatementrange(1) == (1, 5)
|
|
||||||
assert source.getstatementrange(2) == (2, 3)
|
|
||||||
assert source.getstatementrange(3) == (1, 5)
|
|
||||||
assert source.getstatementrange(4) == (4, 5)
|
|
||||||
assert source.getstatementrange(5) == (0, 7)
|
|
||||||
assert source.getstatementrange(6) == (6, 7)
|
|
||||||
|
|
||||||
def test_getstatementrange_bug(self):
|
|
||||||
source = Source("""\
|
|
||||||
try:
|
|
||||||
x = (
|
|
||||||
y +
|
|
||||||
z)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
""")
|
|
||||||
assert len(source) == 6
|
|
||||||
assert source.getstatementrange(2) == (1, 4)
|
|
||||||
|
|
||||||
def test_getstatementrange_bug2(self):
|
|
||||||
py.test.skip("fix me (issue19)")
|
|
||||||
source = Source("""\
|
|
||||||
assert (
|
|
||||||
33
|
|
||||||
==
|
|
||||||
[
|
|
||||||
X(3,
|
|
||||||
b=1, c=2
|
|
||||||
),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
assert len(source) == 9
|
|
||||||
assert source.getstatementrange(5) == (0, 9)
|
|
||||||
|
|
||||||
def test_compile_and_getsource(self):
|
|
||||||
co = self.source.compile()
|
|
||||||
exec co
|
|
||||||
f(7)
|
|
||||||
excinfo = py.test.raises(AssertionError, "f(6)")
|
|
||||||
frame = excinfo.traceback[-1].frame
|
|
||||||
stmt = frame.code.fullsource.getstatement(frame.lineno)
|
|
||||||
#print "block", str(block)
|
|
||||||
assert str(stmt).strip().startswith('assert')
|
|
||||||
|
|
||||||
def test_offsetless_synerr(self):
|
|
||||||
py.test.raises(SyntaxError, py.code.compile, "lambda a,a: 0", mode='eval')
|
|
||||||
|
|
||||||
def test_getstartingblock_singleline():
|
|
||||||
class A:
|
|
||||||
def __init__(self, *args):
|
|
||||||
frame = sys._getframe(1)
|
|
||||||
self.source = py.code.Frame(frame).statement
|
|
||||||
|
|
||||||
x = A('x', 'y')
|
|
||||||
|
|
||||||
l = [i for i in x.source.lines if i.strip()]
|
|
||||||
assert len(l) == 1
|
|
||||||
|
|
||||||
def test_getstartingblock_multiline():
|
|
||||||
class A:
|
|
||||||
def __init__(self, *args):
|
|
||||||
frame = sys._getframe(1)
|
|
||||||
self.source = py.code.Frame(frame).statement
|
|
||||||
|
|
||||||
x = A('x',
|
|
||||||
'y' \
|
|
||||||
,
|
|
||||||
'z')
|
|
||||||
|
|
||||||
l = [i for i in x.source.lines if i.strip()]
|
|
||||||
assert len(l) == 4
|
|
||||||
|
|
||||||
def test_getline_finally():
|
|
||||||
#py.test.skip("inner statements cannot be located yet.")
|
|
||||||
def c(): pass
|
|
||||||
excinfo = py.test.raises(TypeError, """
|
|
||||||
teardown = None
|
|
||||||
try:
|
|
||||||
c(1)
|
|
||||||
finally:
|
|
||||||
if teardown:
|
|
||||||
teardown()
|
|
||||||
""")
|
|
||||||
source = excinfo.traceback[-1].statement
|
|
||||||
assert str(source).strip() == 'c(1)'
|
|
||||||
|
|
||||||
def test_getfuncsource_dynamic():
|
|
||||||
source = """
|
|
||||||
def f():
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
def g(): pass
|
|
||||||
"""
|
|
||||||
co = py.code.compile(source)
|
|
||||||
exec co
|
|
||||||
assert str(py.code.Source(f)).strip() == 'def f():\n raise ValueError'
|
|
||||||
assert str(py.code.Source(g)).strip() == 'def g(): pass'
|
|
||||||
|
|
||||||
|
|
||||||
def test_getfuncsource_with_multine_string():
|
|
||||||
def f():
|
|
||||||
c = '''while True:
|
|
||||||
pass
|
|
||||||
'''
|
|
||||||
assert str(py.code.Source(f)).strip() == "def f():\n c = '''while True:\n pass\n'''"
|
|
||||||
|
|
||||||
|
|
||||||
def test_deindent():
|
|
||||||
from py.__.code.source import deindent as deindent
|
|
||||||
assert deindent(['\tfoo', '\tbar', ]) == ['foo', 'bar']
|
|
||||||
|
|
||||||
def f():
|
|
||||||
c = '''while True:
|
|
||||||
pass
|
|
||||||
'''
|
|
||||||
import inspect
|
|
||||||
lines = deindent(inspect.getsource(f).splitlines())
|
|
||||||
assert lines == ["def f():", " c = '''while True:", " pass", "'''"]
|
|
||||||
|
|
||||||
source = """
|
|
||||||
def f():
|
|
||||||
def g():
|
|
||||||
pass
|
|
||||||
"""
|
|
||||||
lines = deindent(source.splitlines())
|
|
||||||
assert lines == ['', 'def f():', ' def g():', ' pass', ' ']
|
|
||||||
|
|
||||||
def test_source_of_class_at_eof_without_newline():
|
|
||||||
py.test.skip("CPython's inspect.getsource is buggy")
|
|
||||||
# this test fails because the implicit inspect.getsource(A) below
|
|
||||||
# does not return the "x = 1" last line.
|
|
||||||
tmpdir = py.test.ensuretemp("source_write_read")
|
|
||||||
source = py.code.Source('''
|
|
||||||
class A(object):
|
|
||||||
def method(self):
|
|
||||||
x = 1
|
|
||||||
''')
|
|
||||||
path = tmpdir.join("a.py")
|
|
||||||
path.write(source)
|
|
||||||
s2 = py.code.Source(tmpdir.join("a.py").pyimport().A)
|
|
||||||
assert str(source).strip() == str(s2).strip()
|
|
|
@ -1,191 +0,0 @@
|
||||||
from __future__ import generators
|
|
||||||
import py
|
|
||||||
import sys
|
|
||||||
|
|
||||||
class TracebackEntry(object):
|
|
||||||
""" a single entry in a traceback """
|
|
||||||
|
|
||||||
exprinfo = None
|
|
||||||
|
|
||||||
def __init__(self, rawentry):
|
|
||||||
self._rawentry = rawentry
|
|
||||||
self.frame = py.code.Frame(rawentry.tb_frame)
|
|
||||||
# Ugh. 2.4 and 2.5 differs here when encountering
|
|
||||||
# multi-line statements. Not sure about the solution, but
|
|
||||||
# should be portable
|
|
||||||
self.lineno = rawentry.tb_lineno - 1
|
|
||||||
self.relline = self.lineno - self.frame.code.firstlineno
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1)
|
|
||||||
|
|
||||||
def statement(self):
|
|
||||||
""" return a py.code.Source object for the current statement """
|
|
||||||
source = self.frame.code.fullsource
|
|
||||||
return source.getstatement(self.lineno)
|
|
||||||
statement = property(statement, None, None,
|
|
||||||
"statement of this traceback entry.")
|
|
||||||
|
|
||||||
def path(self):
|
|
||||||
return self.frame.code.path
|
|
||||||
path = property(path, None, None, "path to the full source code")
|
|
||||||
|
|
||||||
def getlocals(self):
|
|
||||||
return self.frame.f_locals
|
|
||||||
locals = property(getlocals, None, None, "locals of underlaying frame")
|
|
||||||
|
|
||||||
def reinterpret(self):
|
|
||||||
"""Reinterpret the failing statement and returns a detailed information
|
|
||||||
about what operations are performed."""
|
|
||||||
if self.exprinfo is None:
|
|
||||||
from py.__.magic import exprinfo
|
|
||||||
source = str(self.statement).strip()
|
|
||||||
x = exprinfo.interpret(source, self.frame, should_fail=True)
|
|
||||||
if not isinstance(x, str):
|
|
||||||
raise TypeError, "interpret returned non-string %r" % (x,)
|
|
||||||
self.exprinfo = x
|
|
||||||
return self.exprinfo
|
|
||||||
|
|
||||||
def getfirstlinesource(self):
|
|
||||||
return self.frame.code.firstlineno
|
|
||||||
|
|
||||||
def getsource(self):
|
|
||||||
""" return failing source code. """
|
|
||||||
source = self.frame.code.fullsource
|
|
||||||
start = self.getfirstlinesource()
|
|
||||||
end = self.lineno
|
|
||||||
try:
|
|
||||||
_, end = source.getstatementrange(end)
|
|
||||||
except IndexError:
|
|
||||||
end = self.lineno + 1
|
|
||||||
# heuristic to stop displaying source on e.g.
|
|
||||||
# if something: # assume this causes a NameError
|
|
||||||
# # _this_ lines and the one
|
|
||||||
# below we don't want from entry.getsource()
|
|
||||||
for i in range(self.lineno, end):
|
|
||||||
if source[i].rstrip().endswith(':'):
|
|
||||||
end = i + 1
|
|
||||||
break
|
|
||||||
return source[start:end]
|
|
||||||
source = property(getsource)
|
|
||||||
|
|
||||||
def ishidden(self):
|
|
||||||
""" return True if the current frame has a var __tracebackhide__
|
|
||||||
resolving to True
|
|
||||||
|
|
||||||
mostly for internal use
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return self.frame.eval("__tracebackhide__")
|
|
||||||
except (SystemExit, KeyboardInterrupt):
|
|
||||||
raise
|
|
||||||
except:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
try:
|
|
||||||
fn = str(self.path)
|
|
||||||
except py.error.Error:
|
|
||||||
fn = '???'
|
|
||||||
name = self.frame.code.name
|
|
||||||
try:
|
|
||||||
line = str(self.statement).lstrip()
|
|
||||||
except EnvironmentError, e:
|
|
||||||
line = "<could not get sourceline>"
|
|
||||||
return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line)
|
|
||||||
|
|
||||||
def name(self):
|
|
||||||
return self.frame.code.raw.co_name
|
|
||||||
name = property(name, None, None, "co_name of underlaying code")
|
|
||||||
|
|
||||||
class Traceback(list):
|
|
||||||
""" Traceback objects encapsulate and offer higher level
|
|
||||||
access to Traceback entries.
|
|
||||||
"""
|
|
||||||
Entry = TracebackEntry
|
|
||||||
def __init__(self, tb):
|
|
||||||
""" initialize from given python traceback object. """
|
|
||||||
if hasattr(tb, 'tb_next'):
|
|
||||||
def f(cur):
|
|
||||||
while cur is not None:
|
|
||||||
yield self.Entry(cur)
|
|
||||||
cur = cur.tb_next
|
|
||||||
list.__init__(self, f(tb))
|
|
||||||
else:
|
|
||||||
list.__init__(self, tb)
|
|
||||||
|
|
||||||
def cut(self, path=None, lineno=None, firstlineno=None):
|
|
||||||
""" return a Traceback instance wrapping part of this Traceback
|
|
||||||
|
|
||||||
by provding any combination of path, lineno and firstlineno, the
|
|
||||||
first frame to start the to-be-returned traceback is determined
|
|
||||||
|
|
||||||
this allows cutting the first part of a Traceback instance e.g.
|
|
||||||
for formatting reasons (removing some uninteresting bits that deal
|
|
||||||
with handling of the exception/traceback)
|
|
||||||
"""
|
|
||||||
for x in self:
|
|
||||||
if ((path is None or x.frame.code.path == path) and
|
|
||||||
(lineno is None or x.lineno == lineno) and
|
|
||||||
(firstlineno is None or x.frame.code.firstlineno == firstlineno)):
|
|
||||||
return Traceback(x._rawentry)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
val = super(Traceback, self).__getitem__(key)
|
|
||||||
if isinstance(key, type(slice(0))):
|
|
||||||
val = self.__class__(val)
|
|
||||||
return val
|
|
||||||
|
|
||||||
def filter(self, fn=lambda x: not x.ishidden()):
|
|
||||||
""" return a Traceback instance with certain items removed
|
|
||||||
|
|
||||||
fn is a function that gets a single argument, a TracebackItem
|
|
||||||
instance, and should return True when the item should be added
|
|
||||||
to the Traceback, False when not
|
|
||||||
|
|
||||||
by default this removes all the TracebackItems which are hidden
|
|
||||||
(see ishidden() above)
|
|
||||||
"""
|
|
||||||
return Traceback(filter(fn, self))
|
|
||||||
|
|
||||||
def getcrashentry(self):
|
|
||||||
""" return last non-hidden traceback entry that lead
|
|
||||||
to the exception of a traceback.
|
|
||||||
"""
|
|
||||||
tb = self.filter()
|
|
||||||
if not tb:
|
|
||||||
tb = self
|
|
||||||
return tb[-1]
|
|
||||||
|
|
||||||
def recursionindex(self):
|
|
||||||
""" return the index of the frame/TracebackItem where recursion
|
|
||||||
originates if appropriate, None if no recursion occurred
|
|
||||||
"""
|
|
||||||
cache = {}
|
|
||||||
for i, entry in py.builtin.enumerate(self):
|
|
||||||
key = entry.frame.code.path, entry.lineno
|
|
||||||
#print "checking for recursion at", key
|
|
||||||
l = cache.setdefault(key, [])
|
|
||||||
if l:
|
|
||||||
f = entry.frame
|
|
||||||
loc = f.f_locals
|
|
||||||
for otherloc in l:
|
|
||||||
if f.is_true(f.eval(co_equal,
|
|
||||||
__recursioncache_locals_1=loc,
|
|
||||||
__recursioncache_locals_2=otherloc)):
|
|
||||||
return i
|
|
||||||
l.append(entry.frame.f_locals)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# def __str__(self):
|
|
||||||
# for x in self
|
|
||||||
# l = []
|
|
||||||
## for func, entry in self._tblist:
|
|
||||||
# l.append(entry.display())
|
|
||||||
# return "".join(l)
|
|
||||||
|
|
||||||
|
|
||||||
co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
|
|
||||||
'?', 'eval')
|
|
||||||
|
|
621
py/doc/TODO.txt
621
py/doc/TODO.txt
|
@ -1,621 +0,0 @@
|
||||||
Things to do for 1.0.0
|
|
||||||
=========================
|
|
||||||
|
|
||||||
packaging
|
|
||||||
-------------------------------------
|
|
||||||
|
|
||||||
* setup automatical dist snapshots from trunk
|
|
||||||
when tests pass (at best for all of windows,
|
|
||||||
linux and osx).
|
|
||||||
|
|
||||||
* ensure compatibility with Python 2.3 - 2.6
|
|
||||||
|
|
||||||
* package c-extensions (greenlet) for windows
|
|
||||||
|
|
||||||
* support installation from svn, setup.py and via easy_install
|
|
||||||
|
|
||||||
* debian and TAR/zip packages for py lib,
|
|
||||||
particularly look into C module issues (greenlet most importantly)
|
|
||||||
|
|
||||||
* (partly DONE) refine and implement releasescheme/download
|
|
||||||
|
|
||||||
py.test
|
|
||||||
--------------
|
|
||||||
|
|
||||||
- BUG: write test/fix --showlocals (not showing anything)
|
|
||||||
|
|
||||||
- implement a summary at the end of a test run
|
|
||||||
FILENO:LINENO EXCONLY
|
|
||||||
FILENO:LINENO EXCONLY
|
|
||||||
...
|
|
||||||
that should be easily parseable for editors like scits, vim.
|
|
||||||
|
|
||||||
- implement "py.test --version"
|
|
||||||
|
|
||||||
- extend conftest mechanism to allow to check for:
|
|
||||||
|
|
||||||
conftest_*.py
|
|
||||||
|
|
||||||
and maybe also for
|
|
||||||
|
|
||||||
conftest/
|
|
||||||
|
|
||||||
directories.
|
|
||||||
|
|
||||||
- twisted support: checkout and integrate Samuele's twisted support files
|
|
||||||
also look at Ralf Schmitt's way of going over greenlets
|
|
||||||
|
|
||||||
- ensure that a full "py.test --boxed" passes - probably needs
|
|
||||||
a bit of temp directory handling enhancements
|
|
||||||
|
|
||||||
- (ongoing) review and refactor architecture of py.test with particular
|
|
||||||
respect to:
|
|
||||||
- allow custom reporting
|
|
||||||
- writing (stacked) extensions / plugins (compared to Nose)
|
|
||||||
- event naming and processing
|
|
||||||
- fast and stable distributed testing
|
|
||||||
- reliable cross-platform testing
|
|
||||||
|
|
||||||
- porting existing extensions (htmlconftest / PyPy's conftest's ...)
|
|
||||||
|
|
||||||
- remove "-j" "starton" option, maybe introduce keyword
|
|
||||||
expression syntax for the same purposes
|
|
||||||
|
|
||||||
- fix reporting/usage regression after reporter-merge merge:
|
|
||||||
- collapse skips with same reason and lineno into one line
|
|
||||||
|
|
||||||
- fix and investigate win32 failures
|
|
||||||
|
|
||||||
- (needs review) adjust py.test documentation to reflect new
|
|
||||||
collector/session architecture
|
|
||||||
|
|
||||||
- document py.test's conftest.py approach
|
|
||||||
|
|
||||||
- review and optimize skip-handling (it can be quite slow in
|
|
||||||
certain situations because e.g. setup/teardown is fully performed
|
|
||||||
although we have "skip by keyword" and could detect this early)
|
|
||||||
|
|
||||||
- reduce ambiguity/confusion of collection/running of tests:
|
|
||||||
- introduce "gentest" naming for generative tests and
|
|
||||||
deprecate test_* generators?
|
|
||||||
- collectors have collect() method
|
|
||||||
items have runtest() method
|
|
||||||
deprecate run()
|
|
||||||
- review source code and rename some internal methods to
|
|
||||||
help with un-confusing things
|
|
||||||
|
|
||||||
|
|
||||||
py.execnet
|
|
||||||
--------------
|
|
||||||
|
|
||||||
- thoroughly test on win32 (also in conjunction with py.test)
|
|
||||||
|
|
||||||
- cross-python version (2.2/2.3-2.5/6) and cross-platform testing of
|
|
||||||
setup/teardown semantics
|
|
||||||
|
|
||||||
- optimize general setup and rsync timing?
|
|
||||||
|
|
||||||
py.apigen
|
|
||||||
----------------
|
|
||||||
|
|
||||||
- refactor to produce intermediate data/files capturing
|
|
||||||
info of test runs
|
|
||||||
- refactor html renderer to work on intermediate
|
|
||||||
data/files rather than on the live data
|
|
||||||
|
|
||||||
- check out CodeInvestigator
|
|
||||||
http://codeinvestigator.googlepages.com/main
|
|
||||||
|
|
||||||
|
|
||||||
py.io
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
- write documentation about it
|
|
||||||
- deprecate py.process.cmdexec, move it to py.io.cmdexec()
|
|
||||||
merge exceptions/handling of py.io.cmdexec, py.io.forkedfunc
|
|
||||||
|
|
||||||
ld (review and shift to above)
|
|
||||||
=================================
|
|
||||||
|
|
||||||
refactorings
|
|
||||||
------------------
|
|
||||||
|
|
||||||
- (ongoing) reporting unification, i.e. use dist-testing Reporter class
|
|
||||||
also for "normal" session, consider introduction of tkinter
|
|
||||||
session (M978)
|
|
||||||
|
|
||||||
- refine doctests usage (particularly skips of doctests if
|
|
||||||
some imports/conditions are not satisfied)
|
|
||||||
|
|
||||||
- generalization of "host specifications" for execnet and
|
|
||||||
py.test --dist usages in particular (see also revision 37500 which
|
|
||||||
contained a draft for that). The goal is to have cross-platform
|
|
||||||
testing and dist-testing and other usages of py.execnet all
|
|
||||||
use a common syntax for specifiying connection methods and
|
|
||||||
be able to instantiate gateways/connections through it.
|
|
||||||
|
|
||||||
- unification of "gateway"/host setup and teardown, including
|
|
||||||
rsyncing, i.e. cross-platform and dist-testing.
|
|
||||||
|
|
||||||
- py.apigen tool -> separate runtime-data collection and
|
|
||||||
web page generation. (see M750), provide "py.apigen" tool
|
|
||||||
for generating API documentation
|
|
||||||
|
|
||||||
- py.log: unify API, possibly deprecate duplicate ones,
|
|
||||||
base things on a Config object (hte latter almost a feature though)
|
|
||||||
(M988)
|
|
||||||
|
|
||||||
- consider setup/teardown for generative tests (M826)
|
|
||||||
|
|
||||||
- fix teardown problems regarding when teardown is done (should be done
|
|
||||||
after test run, not before the next one)
|
|
||||||
|
|
||||||
features
|
|
||||||
--------------
|
|
||||||
|
|
||||||
- have a py.test scan/run database for results and test names
|
|
||||||
etc. (to allow quicker selection of tests and post-run
|
|
||||||
information on failures etc.) (M760)
|
|
||||||
|
|
||||||
- consider features of py.apigen (recheck closed "M1016")
|
|
||||||
|
|
||||||
- integrate rlcompleter2 (make it remotely workable)
|
|
||||||
and maybe integrate with "pdb" / pdbplus (M975)
|
|
||||||
|
|
||||||
- integrate native collecting of unittest.py tests from py.test
|
|
||||||
(along the PyPy lib-python tests) (M987)
|
|
||||||
|
|
||||||
- provide an automated conversion script helper for converting
|
|
||||||
unittest.py based tests to py.test ones. (M987)
|
|
||||||
|
|
||||||
- references from ReST docs to modules, functions and classes
|
|
||||||
of apigen generated html docs (M960)
|
|
||||||
|
|
||||||
- review svn-testing (and escape characters), consider
|
|
||||||
svn-bindings (M634)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
APIGEN / source viewer
|
|
||||||
-------------------------------------
|
|
||||||
|
|
||||||
* (DONE, XXX functions/methods?) integrate rest directive into
|
|
||||||
py/documentation/conftest.py
|
|
||||||
with help code from py.__.rest.directive....
|
|
||||||
make sure that the txt files in py/documentation/ use it
|
|
||||||
|
|
||||||
testing
|
|
||||||
-----------
|
|
||||||
|
|
||||||
* these should all work on 1.0 and on the py lib and pypy:
|
|
||||||
- running "py.test -s"
|
|
||||||
- running "py.test --pdb"
|
|
||||||
- running "py.test --looponfailing"
|
|
||||||
- running "py.test" distributed on some hosts
|
|
||||||
|
|
||||||
(guido tested all on win32, everything works except --dist (requires
|
|
||||||
os.fork to work))
|
|
||||||
|
|
||||||
code quality
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
* no function implementation longer than 30 lines
|
|
||||||
|
|
||||||
* no lines longer than 80 characters
|
|
||||||
|
|
||||||
* review the pylib issue tracker
|
|
||||||
(cfbolz: done: what has a 1.0.0 tag (or lower) should be looked at again)
|
|
||||||
|
|
||||||
py.test
|
|
||||||
-------
|
|
||||||
|
|
||||||
* (postponed, likely) py.test fails to parse strangely formatted code after assertion failure
|
|
||||||
|
|
||||||
Missing docstrings
|
|
||||||
------------------
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
code.Traceback.recursionindex misses a docstring
|
|
||||||
code.Traceback.filter misses a docstring
|
|
||||||
code.Traceback.cut misses a docstring
|
|
||||||
code.Traceback.__getitem__ misses a docstring
|
|
||||||
code.Traceback.Entry misses a docstring
|
|
||||||
code.Traceback.Entry.ishidden misses a docstring
|
|
||||||
code.Traceback.Entry.getfirstlinesource misses a docstring
|
|
||||||
code.Traceback.Entry.__str__ misses a docstring
|
|
||||||
code.Traceback.Entry.__repr__ misses a docstring
|
|
||||||
code.Traceback.Entry.__init__ misses a docstring
|
|
||||||
code.Source.getblockend misses a docstring
|
|
||||||
code.Source.__str__ misses a docstring
|
|
||||||
code.Source.__len__ misses a docstring
|
|
||||||
code.Source.__init__ misses a docstring
|
|
||||||
code.Source.__getslice__ misses a docstring
|
|
||||||
code.Source.__getitem__ misses a docstring
|
|
||||||
code.Source.__eq__ misses a docstring
|
|
||||||
code.Frame.repr misses a docstring
|
|
||||||
code.Frame.is_true misses a docstring
|
|
||||||
code.Frame.getargs misses a docstring
|
|
||||||
code.Frame.exec_ misses a docstring
|
|
||||||
code.Frame.eval misses a docstring
|
|
||||||
code.Frame.__init__ misses a docstring
|
|
||||||
code.ExceptionInfo.exconly misses a docstring
|
|
||||||
code.ExceptionInfo.errisinstance misses a docstring
|
|
||||||
code.ExceptionInfo.__str__ misses a docstring
|
|
||||||
code.ExceptionInfo.__init__ misses a docstring
|
|
||||||
code.Code misses a docstring
|
|
||||||
code.Code.source misses a docstring
|
|
||||||
code.Code.getargs misses a docstring
|
|
||||||
code.Code.__init__ misses a docstring
|
|
||||||
code.Code.__eq__ misses a docstring
|
|
||||||
execnet.SshGateway misses a docstring
|
|
||||||
execnet.SshGateway.join misses a docstring
|
|
||||||
execnet.SshGateway.exit misses a docstring
|
|
||||||
execnet.SshGateway.__repr__ misses a docstring
|
|
||||||
execnet.SshGateway.__init__ misses a docstring
|
|
||||||
execnet.SshGateway.ThreadOut.write misses a docstring
|
|
||||||
execnet.SshGateway.ThreadOut.setwritefunc misses a docstring
|
|
||||||
execnet.SshGateway.ThreadOut.setdefaultwriter misses a docstring
|
|
||||||
execnet.SshGateway.ThreadOut.resetdefault misses a docstring
|
|
||||||
execnet.SshGateway.ThreadOut.isatty misses a docstring
|
|
||||||
execnet.SshGateway.ThreadOut.flush misses a docstring
|
|
||||||
execnet.SshGateway.ThreadOut.delwritefunc misses a docstring
|
|
||||||
execnet.SshGateway.ThreadOut.deinstall misses a docstring
|
|
||||||
execnet.SocketGateway misses a docstring
|
|
||||||
execnet.SocketGateway.join misses a docstring
|
|
||||||
execnet.SocketGateway.exit misses a docstring
|
|
||||||
execnet.SocketGateway.__repr__ misses a docstring
|
|
||||||
execnet.SocketGateway.__init__ misses a docstring
|
|
||||||
execnet.PopenGateway misses a docstring
|
|
||||||
execnet.PopenGateway.remote_bootstrap_gateway misses a docstring
|
|
||||||
execnet.PopenGateway.join misses a docstring
|
|
||||||
execnet.PopenGateway.exit misses a docstring
|
|
||||||
execnet.PopenGateway.__repr__ misses a docstring
|
|
||||||
execnet.PopenGateway.__init__ misses a docstring
|
|
||||||
initpkg misses a docstring
|
|
||||||
log.setconsumer misses a docstring
|
|
||||||
log.get misses a docstring
|
|
||||||
log.Syslog misses a docstring
|
|
||||||
log.STDOUT misses a docstring
|
|
||||||
log.STDERR misses a docstring
|
|
||||||
log.Producer.set_consumer misses a docstring
|
|
||||||
log.Producer.get_consumer misses a docstring
|
|
||||||
log.Producer.__repr__ misses a docstring
|
|
||||||
log.Producer.__init__ misses a docstring
|
|
||||||
log.Producer.__getattr__ misses a docstring
|
|
||||||
log.Producer.__call__ misses a docstring
|
|
||||||
log.Producer.Message misses a docstring
|
|
||||||
log.Producer.Message.prefix misses a docstring
|
|
||||||
log.Producer.Message.content misses a docstring
|
|
||||||
log.Producer.Message.__str__ misses a docstring
|
|
||||||
log.Producer.Message.__init__ misses a docstring
|
|
||||||
log.Path misses a docstring
|
|
||||||
log.Path.__init__ misses a docstring
|
|
||||||
log.Path.__call__ misses a docstring
|
|
||||||
magic.View.__viewkey__ misses a docstring
|
|
||||||
magic.View.__repr__ misses a docstring
|
|
||||||
magic.View.__new__ misses a docstring
|
|
||||||
magic.View.__matchkey__ misses a docstring
|
|
||||||
magic.View.__getattr__ misses a docstring
|
|
||||||
magic.AssertionError misses a docstring
|
|
||||||
path.svnwc.visit misses a docstring
|
|
||||||
path.svnwc.mkdir misses a docstring
|
|
||||||
path.svnwc.dump misses a docstring
|
|
||||||
path.svnwc.check misses a docstring
|
|
||||||
path.svnwc.add misses a docstring
|
|
||||||
path.svnwc.__str__ misses a docstring
|
|
||||||
path.svnwc.__repr__ misses a docstring
|
|
||||||
path.svnwc.__new__ misses a docstring
|
|
||||||
path.svnwc.__iter__ misses a docstring
|
|
||||||
path.svnwc.__hash__ misses a docstring
|
|
||||||
path.svnwc.__eq__ misses a docstring
|
|
||||||
path.svnwc.__div__ misses a docstring
|
|
||||||
path.svnwc.__contains__ misses a docstring
|
|
||||||
path.svnwc.__cmp__ misses a docstring
|
|
||||||
path.svnwc.__add__ misses a docstring
|
|
||||||
path.svnwc.Checkers misses a docstring
|
|
||||||
path.svnurl.visit misses a docstring
|
|
||||||
path.svnurl.check misses a docstring
|
|
||||||
path.svnurl.__repr__ misses a docstring
|
|
||||||
path.svnurl.__new__ misses a docstring
|
|
||||||
path.svnurl.__ne__ misses a docstring
|
|
||||||
path.svnurl.__iter__ misses a docstring
|
|
||||||
path.svnurl.__hash__ misses a docstring
|
|
||||||
path.svnurl.__div__ misses a docstring
|
|
||||||
path.svnurl.__contains__ misses a docstring
|
|
||||||
path.svnurl.__cmp__ misses a docstring
|
|
||||||
path.svnurl.__add__ misses a docstring
|
|
||||||
path.svnurl.Checkers misses a docstring
|
|
||||||
path.local.visit misses a docstring
|
|
||||||
path.local.sysexec has an 'XXX' in its docstring
|
|
||||||
path.local.check misses a docstring
|
|
||||||
path.local.__repr__ misses a docstring
|
|
||||||
path.local.__iter__ misses a docstring
|
|
||||||
path.local.__hash__ misses a docstring
|
|
||||||
path.local.__eq__ misses a docstring
|
|
||||||
path.local.__div__ misses a docstring
|
|
||||||
path.local.__contains__ misses a docstring
|
|
||||||
path.local.__cmp__ misses a docstring
|
|
||||||
path.local.__add__ misses a docstring
|
|
||||||
path.local.Checkers misses a docstring
|
|
||||||
test.rest.RestReporter misses a docstring
|
|
||||||
test.rest.RestReporter.summary misses a docstring
|
|
||||||
test.rest.RestReporter.skips misses a docstring
|
|
||||||
test.rest.RestReporter.repr_traceback misses a docstring
|
|
||||||
test.rest.RestReporter.repr_source misses a docstring
|
|
||||||
test.rest.RestReporter.repr_signal misses a docstring
|
|
||||||
test.rest.RestReporter.repr_failure misses a docstring
|
|
||||||
test.rest.RestReporter.report_unknown misses a docstring
|
|
||||||
test.rest.RestReporter.report_TestStarted misses a docstring
|
|
||||||
test.rest.RestReporter.report_TestFinished misses a docstring
|
|
||||||
test.rest.RestReporter.report_SkippedTryiter misses a docstring
|
|
||||||
test.rest.RestReporter.report_SendItem misses a docstring
|
|
||||||
test.rest.RestReporter.report_RsyncFinished misses a docstring
|
|
||||||
test.rest.RestReporter.report_ReceivedItemOutcome misses a docstring
|
|
||||||
test.rest.RestReporter.report_Nodes misses a docstring
|
|
||||||
test.rest.RestReporter.report_ItemStart misses a docstring
|
|
||||||
test.rest.RestReporter.report_ImmediateFailure misses a docstring
|
|
||||||
test.rest.RestReporter.report_HostReady misses a docstring
|
|
||||||
test.rest.RestReporter.report_HostRSyncing misses a docstring
|
|
||||||
test.rest.RestReporter.report_FailedTryiter misses a docstring
|
|
||||||
test.rest.RestReporter.report misses a docstring
|
|
||||||
test.rest.RestReporter.print_summary misses a docstring
|
|
||||||
test.rest.RestReporter.prepare_source misses a docstring
|
|
||||||
test.rest.RestReporter.hangs misses a docstring
|
|
||||||
test.rest.RestReporter.get_rootpath misses a docstring
|
|
||||||
test.rest.RestReporter.get_path_from_item misses a docstring
|
|
||||||
test.rest.RestReporter.get_linkwriter misses a docstring
|
|
||||||
test.rest.RestReporter.get_item_name misses a docstring
|
|
||||||
test.rest.RestReporter.get_host misses a docstring
|
|
||||||
test.rest.RestReporter.failures misses a docstring
|
|
||||||
test.rest.RestReporter.add_rest misses a docstring
|
|
||||||
test.rest.RestReporter.__init__ misses a docstring
|
|
||||||
test.rest.RelLinkWriter misses a docstring
|
|
||||||
test.rest.RelLinkWriter.get_link misses a docstring
|
|
||||||
test.rest.NoLinkWriter misses a docstring
|
|
||||||
test.rest.NoLinkWriter.get_link misses a docstring
|
|
||||||
test.rest.LinkWriter misses a docstring
|
|
||||||
test.rest.LinkWriter.get_link misses a docstring
|
|
||||||
test.rest.LinkWriter.__init__ misses a docstring
|
|
||||||
test.exit misses a docstring
|
|
||||||
test.deprecated_call misses a docstring
|
|
||||||
test.collect.Module misses a docstring
|
|
||||||
test.collect.Module.tryiter has an 'XXX' in its docstring
|
|
||||||
test.collect.Module.teardown misses a docstring
|
|
||||||
test.collect.Module.startcapture misses a docstring
|
|
||||||
test.collect.Module.skipbykeyword misses a docstring
|
|
||||||
test.collect.Module.setup misses a docstring
|
|
||||||
test.collect.Module.run misses a docstring
|
|
||||||
test.collect.Module.makeitem misses a docstring
|
|
||||||
test.collect.Module.listnames misses a docstring
|
|
||||||
test.collect.Module.join misses a docstring
|
|
||||||
test.collect.Module.haskeyword misses a docstring
|
|
||||||
test.collect.Module.getsortvalue misses a docstring
|
|
||||||
test.collect.Module.getpathlineno misses a docstring
|
|
||||||
test.collect.Module.getouterr misses a docstring
|
|
||||||
test.collect.Module.getitembynames misses a docstring
|
|
||||||
test.collect.Module.funcnamefilter misses a docstring
|
|
||||||
test.collect.Module.finishcapture misses a docstring
|
|
||||||
test.collect.Module.classnamefilter misses a docstring
|
|
||||||
test.collect.Module.buildname2items misses a docstring
|
|
||||||
test.collect.Module.__repr__ misses a docstring
|
|
||||||
test.collect.Module.__ne__ misses a docstring
|
|
||||||
test.collect.Module.__init__ misses a docstring
|
|
||||||
test.collect.Module.__hash__ misses a docstring
|
|
||||||
test.collect.Module.__eq__ misses a docstring
|
|
||||||
test.collect.Module.__cmp__ misses a docstring
|
|
||||||
test.collect.Module.Skipped misses a docstring
|
|
||||||
test.collect.Module.Passed misses a docstring
|
|
||||||
test.collect.Module.Outcome misses a docstring
|
|
||||||
test.collect.Module.Failed misses a docstring
|
|
||||||
test.collect.Module.ExceptionFailure misses a docstring
|
|
||||||
test.collect.Instance misses a docstring
|
|
||||||
test.collect.Instance.tryiter has an 'XXX' in its docstring
|
|
||||||
test.collect.Instance.teardown misses a docstring
|
|
||||||
test.collect.Instance.startcapture misses a docstring
|
|
||||||
test.collect.Instance.skipbykeyword misses a docstring
|
|
||||||
test.collect.Instance.setup misses a docstring
|
|
||||||
test.collect.Instance.run misses a docstring
|
|
||||||
test.collect.Instance.makeitem misses a docstring
|
|
||||||
test.collect.Instance.listnames misses a docstring
|
|
||||||
test.collect.Instance.join misses a docstring
|
|
||||||
test.collect.Instance.haskeyword misses a docstring
|
|
||||||
test.collect.Instance.getsortvalue misses a docstring
|
|
||||||
test.collect.Instance.getpathlineno misses a docstring
|
|
||||||
test.collect.Instance.getouterr misses a docstring
|
|
||||||
test.collect.Instance.getitembynames misses a docstring
|
|
||||||
test.collect.Instance.funcnamefilter misses a docstring
|
|
||||||
test.collect.Instance.finishcapture misses a docstring
|
|
||||||
test.collect.Instance.classnamefilter misses a docstring
|
|
||||||
test.collect.Instance.buildname2items misses a docstring
|
|
||||||
test.collect.Instance.__repr__ misses a docstring
|
|
||||||
test.collect.Instance.__ne__ misses a docstring
|
|
||||||
test.collect.Instance.__init__ misses a docstring
|
|
||||||
test.collect.Instance.__hash__ misses a docstring
|
|
||||||
test.collect.Instance.__eq__ misses a docstring
|
|
||||||
test.collect.Instance.__cmp__ misses a docstring
|
|
||||||
test.collect.Generator misses a docstring
|
|
||||||
test.collect.Generator.tryiter has an 'XXX' in its docstring
|
|
||||||
test.collect.Generator.teardown misses a docstring
|
|
||||||
test.collect.Generator.startcapture misses a docstring
|
|
||||||
test.collect.Generator.skipbykeyword misses a docstring
|
|
||||||
test.collect.Generator.setup misses a docstring
|
|
||||||
test.collect.Generator.run misses a docstring
|
|
||||||
test.collect.Generator.listnames misses a docstring
|
|
||||||
test.collect.Generator.join misses a docstring
|
|
||||||
test.collect.Generator.haskeyword misses a docstring
|
|
||||||
test.collect.Generator.getsortvalue misses a docstring
|
|
||||||
test.collect.Generator.getpathlineno misses a docstring
|
|
||||||
test.collect.Generator.getouterr misses a docstring
|
|
||||||
test.collect.Generator.getitembynames misses a docstring
|
|
||||||
test.collect.Generator.getcallargs misses a docstring
|
|
||||||
test.collect.Generator.finishcapture misses a docstring
|
|
||||||
test.collect.Generator.buildname2items misses a docstring
|
|
||||||
test.collect.Generator.__repr__ misses a docstring
|
|
||||||
test.collect.Generator.__ne__ misses a docstring
|
|
||||||
test.collect.Generator.__init__ misses a docstring
|
|
||||||
test.collect.Generator.__hash__ misses a docstring
|
|
||||||
test.collect.Generator.__eq__ misses a docstring
|
|
||||||
test.collect.Generator.__cmp__ misses a docstring
|
|
||||||
test.collect.DoctestFile misses a docstring
|
|
||||||
test.collect.DoctestFile.tryiter has an 'XXX' in its docstring
|
|
||||||
test.collect.DoctestFile.teardown misses a docstring
|
|
||||||
test.collect.DoctestFile.startcapture misses a docstring
|
|
||||||
test.collect.DoctestFile.skipbykeyword misses a docstring
|
|
||||||
test.collect.DoctestFile.setup misses a docstring
|
|
||||||
test.collect.DoctestFile.run misses a docstring
|
|
||||||
test.collect.DoctestFile.makeitem misses a docstring
|
|
||||||
test.collect.DoctestFile.listnames misses a docstring
|
|
||||||
test.collect.DoctestFile.join misses a docstring
|
|
||||||
test.collect.DoctestFile.haskeyword misses a docstring
|
|
||||||
test.collect.DoctestFile.getsortvalue misses a docstring
|
|
||||||
test.collect.DoctestFile.getpathlineno misses a docstring
|
|
||||||
test.collect.DoctestFile.getouterr misses a docstring
|
|
||||||
test.collect.DoctestFile.getitembynames misses a docstring
|
|
||||||
test.collect.DoctestFile.funcnamefilter misses a docstring
|
|
||||||
test.collect.DoctestFile.finishcapture misses a docstring
|
|
||||||
test.collect.DoctestFile.classnamefilter misses a docstring
|
|
||||||
test.collect.DoctestFile.buildname2items misses a docstring
|
|
||||||
test.collect.DoctestFile.__repr__ misses a docstring
|
|
||||||
test.collect.DoctestFile.__ne__ misses a docstring
|
|
||||||
test.collect.DoctestFile.__init__ misses a docstring
|
|
||||||
test.collect.DoctestFile.__hash__ misses a docstring
|
|
||||||
test.collect.DoctestFile.__eq__ misses a docstring
|
|
||||||
test.collect.DoctestFile.__cmp__ misses a docstring
|
|
||||||
test.collect.Directory misses a docstring
|
|
||||||
test.collect.Directory.tryiter has an 'XXX' in its docstring
|
|
||||||
test.collect.Directory.teardown misses a docstring
|
|
||||||
test.collect.Directory.startcapture misses a docstring
|
|
||||||
test.collect.Directory.skipbykeyword misses a docstring
|
|
||||||
test.collect.Directory.setup misses a docstring
|
|
||||||
test.collect.Directory.run misses a docstring
|
|
||||||
test.collect.Directory.recfilter misses a docstring
|
|
||||||
test.collect.Directory.makeitem misses a docstring
|
|
||||||
test.collect.Directory.listnames misses a docstring
|
|
||||||
test.collect.Directory.join misses a docstring
|
|
||||||
test.collect.Directory.haskeyword misses a docstring
|
|
||||||
test.collect.Directory.getsortvalue misses a docstring
|
|
||||||
test.collect.Directory.getpathlineno misses a docstring
|
|
||||||
test.collect.Directory.getouterr misses a docstring
|
|
||||||
test.collect.Directory.getitembynames misses a docstring
|
|
||||||
test.collect.Directory.finishcapture misses a docstring
|
|
||||||
test.collect.Directory.filefilter misses a docstring
|
|
||||||
test.collect.Directory.buildname2items misses a docstring
|
|
||||||
test.collect.Directory.__repr__ misses a docstring
|
|
||||||
test.collect.Directory.__ne__ misses a docstring
|
|
||||||
test.collect.Directory.__init__ misses a docstring
|
|
||||||
test.collect.Directory.__hash__ misses a docstring
|
|
||||||
test.collect.Directory.__eq__ misses a docstring
|
|
||||||
test.collect.Directory.__cmp__ misses a docstring
|
|
||||||
test.collect.Collector misses a docstring
|
|
||||||
test.collect.Collector.tryiter has an 'XXX' in its docstring
|
|
||||||
test.collect.Collector.teardown misses a docstring
|
|
||||||
test.collect.Collector.startcapture misses a docstring
|
|
||||||
test.collect.Collector.skipbykeyword misses a docstring
|
|
||||||
test.collect.Collector.setup misses a docstring
|
|
||||||
test.collect.Collector.run misses a docstring
|
|
||||||
test.collect.Collector.listnames misses a docstring
|
|
||||||
test.collect.Collector.join misses a docstring
|
|
||||||
test.collect.Collector.haskeyword misses a docstring
|
|
||||||
test.collect.Collector.getsortvalue misses a docstring
|
|
||||||
test.collect.Collector.getpathlineno misses a docstring
|
|
||||||
test.collect.Collector.getouterr misses a docstring
|
|
||||||
test.collect.Collector.getitembynames misses a docstring
|
|
||||||
test.collect.Collector.finishcapture misses a docstring
|
|
||||||
test.collect.Collector.buildname2items misses a docstring
|
|
||||||
test.collect.Collector.__repr__ misses a docstring
|
|
||||||
test.collect.Collector.__ne__ misses a docstring
|
|
||||||
test.collect.Collector.__init__ misses a docstring
|
|
||||||
test.collect.Collector.__hash__ misses a docstring
|
|
||||||
test.collect.Collector.__eq__ misses a docstring
|
|
||||||
test.collect.Collector.__cmp__ misses a docstring
|
|
||||||
test.collect.Class misses a docstring
|
|
||||||
test.collect.Class.tryiter has an 'XXX' in its docstring
|
|
||||||
test.collect.Class.teardown misses a docstring
|
|
||||||
test.collect.Class.startcapture misses a docstring
|
|
||||||
test.collect.Class.skipbykeyword misses a docstring
|
|
||||||
test.collect.Class.setup misses a docstring
|
|
||||||
test.collect.Class.run misses a docstring
|
|
||||||
test.collect.Class.makeitem misses a docstring
|
|
||||||
test.collect.Class.listnames misses a docstring
|
|
||||||
test.collect.Class.join misses a docstring
|
|
||||||
test.collect.Class.haskeyword misses a docstring
|
|
||||||
test.collect.Class.getsortvalue misses a docstring
|
|
||||||
test.collect.Class.getpathlineno misses a docstring
|
|
||||||
test.collect.Class.getouterr misses a docstring
|
|
||||||
test.collect.Class.getitembynames misses a docstring
|
|
||||||
test.collect.Class.funcnamefilter misses a docstring
|
|
||||||
test.collect.Class.finishcapture misses a docstring
|
|
||||||
test.collect.Class.classnamefilter misses a docstring
|
|
||||||
test.collect.Class.buildname2items misses a docstring
|
|
||||||
test.collect.Class.__repr__ misses a docstring
|
|
||||||
test.collect.Class.__ne__ misses a docstring
|
|
||||||
test.collect.Class.__init__ misses a docstring
|
|
||||||
test.collect.Class.__hash__ misses a docstring
|
|
||||||
test.collect.Class.__eq__ misses a docstring
|
|
||||||
test.collect.Class.__cmp__ misses a docstring
|
|
||||||
test.cmdline.main misses a docstring
|
|
||||||
test.Item misses a docstring
|
|
||||||
test.Item.tryiter has an 'XXX' in its docstring
|
|
||||||
test.Item.teardown misses a docstring
|
|
||||||
test.Item.startcapture misses a docstring
|
|
||||||
test.Item.skipbykeyword misses a docstring
|
|
||||||
test.Item.setup misses a docstring
|
|
||||||
test.Item.run misses a docstring
|
|
||||||
test.Item.listnames misses a docstring
|
|
||||||
test.Item.join misses a docstring
|
|
||||||
test.Item.haskeyword misses a docstring
|
|
||||||
test.Item.getsortvalue misses a docstring
|
|
||||||
test.Item.getpathlineno misses a docstring
|
|
||||||
test.Item.getouterr misses a docstring
|
|
||||||
test.Item.getitembynames misses a docstring
|
|
||||||
test.Item.finishcapture misses a docstring
|
|
||||||
test.Item.buildname2items misses a docstring
|
|
||||||
test.Item.__repr__ misses a docstring
|
|
||||||
test.Item.__ne__ misses a docstring
|
|
||||||
test.Item.__init__ misses a docstring
|
|
||||||
test.Item.__hash__ misses a docstring
|
|
||||||
test.Item.__eq__ misses a docstring
|
|
||||||
test.Item.__cmp__ misses a docstring
|
|
||||||
test.Function.tryiter has an 'XXX' in its docstring
|
|
||||||
test.Function.teardown misses a docstring
|
|
||||||
test.Function.startcapture misses a docstring
|
|
||||||
test.Function.skipbykeyword misses a docstring
|
|
||||||
test.Function.setup misses a docstring
|
|
||||||
test.Function.run misses a docstring
|
|
||||||
test.Function.listnames misses a docstring
|
|
||||||
test.Function.join misses a docstring
|
|
||||||
test.Function.haskeyword misses a docstring
|
|
||||||
test.Function.getsortvalue misses a docstring
|
|
||||||
test.Function.getpathlineno misses a docstring
|
|
||||||
test.Function.getouterr misses a docstring
|
|
||||||
test.Function.getitembynames misses a docstring
|
|
||||||
test.Function.finishcapture misses a docstring
|
|
||||||
test.Function.buildname2items misses a docstring
|
|
||||||
test.Function.__repr__ misses a docstring
|
|
||||||
test.Function.__ne__ misses a docstring
|
|
||||||
test.Function.__init__ misses a docstring
|
|
||||||
test.Function.__hash__ misses a docstring
|
|
||||||
test.Function.__eq__ misses a docstring
|
|
||||||
test.Function.__cmp__ misses a docstring
|
|
||||||
test.Config.__init__ misses a docstring
|
|
||||||
xml.raw.__init__ misses a docstring
|
|
||||||
xml.html misses a docstring
|
|
||||||
xml.html.__tagclass__ misses a docstring
|
|
||||||
xml.html.__tagclass__.unicode misses a docstring
|
|
||||||
xml.html.__tagclass__.__unicode__ misses a docstring
|
|
||||||
xml.html.__tagclass__.__repr__ misses a docstring
|
|
||||||
xml.html.__tagclass__.__init__ misses a docstring
|
|
||||||
xml.html.__tagclass__.Attr misses a docstring
|
|
||||||
xml.html.__tagclass__.Attr.__init__ misses a docstring
|
|
||||||
xml.html.__metaclass__ misses a docstring
|
|
||||||
xml.html.__metaclass__.__getattr__ misses a docstring
|
|
||||||
xml.html.Style misses a docstring
|
|
||||||
xml.html.Style.__init__ misses a docstring
|
|
||||||
xml.escape misses a docstring
|
|
||||||
xml.Tag misses a docstring
|
|
||||||
xml.Tag.unicode misses a docstring
|
|
||||||
xml.Tag.__unicode__ misses a docstring
|
|
||||||
xml.Tag.__repr__ misses a docstring
|
|
||||||
xml.Tag.__init__ misses a docstring
|
|
||||||
xml.Namespace misses a docstring
|
|
|
@ -1 +0,0 @@
|
||||||
#
|
|
|
@ -1,284 +0,0 @@
|
||||||
===========================================
|
|
||||||
apigen - API documentation generation tool
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
What is it?
|
|
||||||
===========
|
|
||||||
|
|
||||||
Apigen is a tool for automatically generating API reference documentation for
|
|
||||||
Python projects. It works by examining code at runtime rather than at compile
|
|
||||||
time. This way it is capable of displaying information about the code base
|
|
||||||
after initialization. A drawback is that you cannot easily document source code
|
|
||||||
that automatically starts server processes or has some other irreversible
|
|
||||||
effects upon getting imported.
|
|
||||||
|
|
||||||
The apigen functionality is normally triggered from :api:`py.test`, and while
|
|
||||||
running the tests it gathers information such as code paths, arguments and
|
|
||||||
return values of callables, and exceptions that can be raised while the code
|
|
||||||
runs (XXX not yet!) to include in the documentation. It's also possible to
|
|
||||||
run the tracer (which collects the data) in other code if your project
|
|
||||||
does not use :api:`py.test` but still wants to collect the runtime information
|
|
||||||
and build the docs.
|
|
||||||
|
|
||||||
Apigen is written for the :api:`py` lib, but can be used to build documentation
|
|
||||||
for any project: there are hooks in py.test to, by providing a simple script,
|
|
||||||
build api documentation for the tested project when running py.test. Of course
|
|
||||||
this does imply :api:`py.test` is actually used: if little or no tests are
|
|
||||||
actually ran, the additional information (code paths, arguments and return
|
|
||||||
values and exceptions) can not be gathered and thus there will be less of an
|
|
||||||
advantage of apigen compared to other solutions.
|
|
||||||
|
|
||||||
Features
|
|
||||||
========
|
|
||||||
|
|
||||||
Some features were mentioned above already, but here's a complete list of all
|
|
||||||
the niceties apigen has to offer:
|
|
||||||
|
|
||||||
* source documents
|
|
||||||
|
|
||||||
Apigen not only builds the API documentation, but also a tree of
|
|
||||||
syntax-colored source files, with links from the API docs to the source
|
|
||||||
files.
|
|
||||||
|
|
||||||
* abundance of information
|
|
||||||
|
|
||||||
compared to other documentation generation tools, apigen produces an
|
|
||||||
abundant amount of information: it provides syntax-colored code snippets,
|
|
||||||
code path traces, etc.
|
|
||||||
|
|
||||||
* linking
|
|
||||||
|
|
||||||
besides links to the source files, apigen provides links all across the
|
|
||||||
documentation: callable arguments and return values link to their
|
|
||||||
definition (if part of the documented code), class definition to their
|
|
||||||
base classes (again, if they're part of the documented code), and
|
|
||||||
everywhere are links to the source files (including in traces)
|
|
||||||
|
|
||||||
* (hopefully) improves testing
|
|
||||||
|
|
||||||
because the documentation is built partially from test results, developers
|
|
||||||
may (especially if they're using the documentation themselves) be more
|
|
||||||
aware of untested parts of the code, or parts can use more tests or need
|
|
||||||
attention
|
|
||||||
|
|
||||||
Using apigen
|
|
||||||
============
|
|
||||||
|
|
||||||
To trigger apigen, all you need to do is run the :source:`py/bin/py.test` tool
|
|
||||||
with an --apigen argument, as such::
|
|
||||||
|
|
||||||
$ py.test --apigen=<path>
|
|
||||||
|
|
||||||
where <path> is a path to a script containing some special hooks to build
|
|
||||||
the documents (see below). The script to build the documents for the :api:`py`
|
|
||||||
lib can be found in :source:`py/apigen/apigen.py`, so building those documents
|
|
||||||
can be done by cd'ing to the 'py' directory, and executing::
|
|
||||||
|
|
||||||
$ py.test --apigen=apigen/apigen.py
|
|
||||||
|
|
||||||
The documents will by default be built in the *parent directory* of the
|
|
||||||
*package dir* (in this case the 'py' directory). Be careful that you don't
|
|
||||||
overwrite anything!
|
|
||||||
|
|
||||||
Other projects
|
|
||||||
==============
|
|
||||||
|
|
||||||
To use apigen from another project, there are three things that you need to do:
|
|
||||||
|
|
||||||
Use :api:`py.test` for unit tests
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
This is a good idea anyway... ;) The more tests, the more tracing information
|
|
||||||
and such can be built, so it makes sense to have good test coverage when using
|
|
||||||
this tool.
|
|
||||||
|
|
||||||
Provide :api:`py.test` hooks
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
To hook into the unit testing framework, you will need to write a script with
|
|
||||||
two functions. The first should be called 'get_documentable_items', gets a
|
|
||||||
package dir (the root of the project) as argument, and should return a tuple
|
|
||||||
with the package name as first element, and a dict as second. The dict should
|
|
||||||
contain, for all the to-be-documented items, a dotted name as key and a
|
|
||||||
reference to the item as value.
|
|
||||||
|
|
||||||
The second function should be called 'build', and gets also the package dir as
|
|
||||||
argument, but also a reference to a DocStorageAcessor, which contains
|
|
||||||
information gathered by the tracer, and a reference to a
|
|
||||||
:api:`py.io.StdCaptureFD` instance that is used to capture stdout and stderr,
|
|
||||||
and allows writing to them, when the docs are built.
|
|
||||||
|
|
||||||
This 'build' function is responsible for actually building the documentation,
|
|
||||||
and, depending on your needs, can be used to control each aspect of it. In most
|
|
||||||
situations you will just copy the code from :source:`py/apigen/apigen.py`'s
|
|
||||||
build() function, but if you want you can choose to build entirely different
|
|
||||||
output formats by directly accessing the DocStorageAccessor class.
|
|
||||||
|
|
||||||
Provide layout
|
|
||||||
--------------
|
|
||||||
|
|
||||||
For the :api:`py` lib tests, the 'LayoutPage' class found in
|
|
||||||
:source:`py/apigen/layout.py` is used, which produces HTML specific for that
|
|
||||||
particular library (with a menubar, etc.). To customize this, you will need to
|
|
||||||
provide a similar class, most probably using the Page base class from
|
|
||||||
:source:`py/doc/confrest.py`. Note that this step depends on how heavy the
|
|
||||||
customization in the previous step is done: if you decide to directly use the
|
|
||||||
DocStorageAccessor rather than let the code in :source:`py/apigen/htmlgen.py`
|
|
||||||
build HTML for you, this can be skipped.
|
|
||||||
|
|
||||||
Using apigen from code
|
|
||||||
======================
|
|
||||||
|
|
||||||
If you want to avoid using :api:`py.test`, or have an other idea of how to best
|
|
||||||
collect information while running code, the apigen functionality can be
|
|
||||||
directly accessed. The most important classes are the Tracer class found in
|
|
||||||
:source:`py/apigen/tracer/tracer.py`, which holds the information gathered
|
|
||||||
during the tests, and the DocStorage and DocStorageAccessor classes from
|
|
||||||
:source:`py/apigen/tracer/docstorage.py`, which (respectively) store the data,
|
|
||||||
and make it accessible.
|
|
||||||
|
|
||||||
Gathering information
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
To gather information about documentation, you will first need to tell the tool
|
|
||||||
what objects it should investigate. Only information for registered objects
|
|
||||||
will be stored. An example::
|
|
||||||
|
|
||||||
>>> import py
|
|
||||||
>>> from py.__.apigen.tracer.docstorage import DocStorage, DocStorageAccessor
|
|
||||||
>>> from py.__.apigen.tracer.tracer import Tracer
|
|
||||||
>>> toregister = {'py.path.local': py.path.local,
|
|
||||||
... 'py.path.svnwc': py.path.svnwc}
|
|
||||||
>>> ds = DocStorage().from_dict(toregister)
|
|
||||||
>>> t = Tracer(ds)
|
|
||||||
>>> t.start_tracing()
|
|
||||||
>>> p = py.path.local('.')
|
|
||||||
>>> p.check(dir=True)
|
|
||||||
True
|
|
||||||
>>> t.end_tracing()
|
|
||||||
|
|
||||||
Now the 'ds' variable should contain all kinds of information about both the
|
|
||||||
:api:`py.path.local` and the :api:`py.path.svnwc` classes, and things like call
|
|
||||||
stacks, possible argument types, etc. as additional information about
|
|
||||||
:api:`py.path.local.check()` (since it was called from the traced code).
|
|
||||||
|
|
||||||
Using the information
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
To use the information, we need to get a DocStorageAccessor instance to
|
|
||||||
provide access to the data stored in the DocStorage object::
|
|
||||||
|
|
||||||
>>> dsa = DocStorageAccessor(ds)
|
|
||||||
|
|
||||||
Currently there is no API reference available for this object, so you'll have
|
|
||||||
to read the source (:source:`py/apigen/tracer/docstorage.py`) to see what
|
|
||||||
functionality it offers.
|
|
||||||
|
|
||||||
Comparison with other documentation generation tools
|
|
||||||
====================================================
|
|
||||||
|
|
||||||
Apigen is of course not the only documentation generation tool available for
|
|
||||||
Python. Although we knew in advance that our tool had certain features the
|
|
||||||
others do not offer, we decided to investigate a bit so that we could do a
|
|
||||||
proper comparison.
|
|
||||||
|
|
||||||
Tools examined
|
|
||||||
--------------
|
|
||||||
|
|
||||||
After some 'googling around', it turned out that the amount of documentation
|
|
||||||
generation tools available was surprisingly low. There were only 5 packages
|
|
||||||
I could find, of which 1 (called 'HappyDoc') seems dead (last release 2001),
|
|
||||||
one (called 'Pudge') not yet born (perhaps DOA even? most of the links on the
|
|
||||||
website are dead), and one (called 'Endo') specific to the Enthought suite.
|
|
||||||
The remaining two were Epydoc, which is widely used [1]_, and PyDoctor, which is
|
|
||||||
used only by (and written for) the Twisted project, but can be used seperately.
|
|
||||||
|
|
||||||
Epydoc
|
|
||||||
~~~~~~
|
|
||||||
|
|
||||||
http://epydoc.sourceforge.net/
|
|
||||||
|
|
||||||
Epydoc is the best known, and most widely used, documentation generation tool
|
|
||||||
for Python. It builds a documentation tree by inspecting imported modules and
|
|
||||||
using Python's introspection features. This way it can display information like
|
|
||||||
containment, inheritance, and docstrings.
|
|
||||||
|
|
||||||
The tool is relatively sophisticated, with support for generating HTML and PDF,
|
|
||||||
choosing different styles (CSS), generating graphs using Graphviz, etc. Also
|
|
||||||
it allows using markup (which can be ReST, JavaDoc, or their own 'epytext'
|
|
||||||
format) inside docstrings for displaying rich text in the result.
|
|
||||||
|
|
||||||
Quick overview:
|
|
||||||
|
|
||||||
* builds docs from object tree
|
|
||||||
* displays relatively little information, just inheritance trees, API and
|
|
||||||
docstrings
|
|
||||||
* supports some markup (ReST, 'epytext', JavaDoc) in docstrings
|
|
||||||
|
|
||||||
PyDoctor
|
|
||||||
~~~~~~~~
|
|
||||||
|
|
||||||
http://codespeak.net/~mwh/pydoctor/
|
|
||||||
|
|
||||||
This tool is written by Michael Hudson for the Twisted project. The major
|
|
||||||
difference between this and Epydoc is that it browses the AST (Abstract Syntax
|
|
||||||
Tree) instead of using 'live' objects, which means that code that uses special
|
|
||||||
import mechanisms, or depends on other code that is not available can still be
|
|
||||||
inspected. On the other hand, code that, for example, puts bound methods into a
|
|
||||||
module namespace is not documented.
|
|
||||||
|
|
||||||
The tool is relatively simple and doesn't support the more advanced features
|
|
||||||
that Epydoc offers. It was written for Twisted and there are no current plans to
|
|
||||||
promote its use for unrelated projects.
|
|
||||||
|
|
||||||
Quick overview:
|
|
||||||
|
|
||||||
* inspects AST rather than object tree
|
|
||||||
* again not a lot of information, the usual API docstrings, class inheritance
|
|
||||||
and module structure, but that's it
|
|
||||||
* rather heavy dependencies (depends on Twisted/Nevow (trunk version))
|
|
||||||
* written for Twisted, but quite nice output with other applications
|
|
||||||
|
|
||||||
Quick overview lists of the other tools
|
|
||||||
---------------------------------------
|
|
||||||
|
|
||||||
HappyDoc
|
|
||||||
~~~~~~~~
|
|
||||||
|
|
||||||
http://happydoc.sourceforge.net/
|
|
||||||
|
|
||||||
* dead
|
|
||||||
* inspects AST
|
|
||||||
* quite flexible, different output formats (HTML, XML, SGML, PDF)
|
|
||||||
* pluggable docstring parsers
|
|
||||||
|
|
||||||
Pudge
|
|
||||||
~~~~~
|
|
||||||
|
|
||||||
http://pudge.lesscode.org/
|
|
||||||
|
|
||||||
* immature, dead?
|
|
||||||
* builds docs from live object tree (I think?)
|
|
||||||
* supports ReST
|
|
||||||
* uses Kid templates
|
|
||||||
|
|
||||||
Endo
|
|
||||||
~~~~
|
|
||||||
|
|
||||||
https://svn.enthought.com/enthought/wiki/EndoHowTo
|
|
||||||
|
|
||||||
* inspects object tree (I think?)
|
|
||||||
* 'traits' aware (see https://svn.enthought.com/enthought/wiki/Traits)
|
|
||||||
* customizable HTML output with custom templating engine
|
|
||||||
* little documentation, seems like it's written for Enthought's own use
|
|
||||||
mostly
|
|
||||||
* heavy dependencies
|
|
||||||
|
|
||||||
.. [1] Epydoc doesn't seem to be developed anymore, either, but it's so
|
|
||||||
widely used it can not be ignored...
|
|
||||||
|
|
||||||
Questions, remarks, etc.
|
|
||||||
========================
|
|
||||||
|
|
||||||
For more information, questions, remarks, etc. see http://codespeak.net/py.
|
|
||||||
This website also contains links to mailing list and IRC channel.
|
|
|
@ -1,35 +0,0 @@
|
||||||
|
|
||||||
Proposed apigen refactorings
|
|
||||||
=============================
|
|
||||||
|
|
||||||
First of all we would like to have some kind of a persistent storage
|
|
||||||
for apigen, so we could use it for different purposes (hint! hint! pdb)
|
|
||||||
than only web pages. This will resolve the issue of having separated
|
|
||||||
apigen "data" generation and web page generation.
|
|
||||||
|
|
||||||
Apigen is very usefull feature, but we don't use it in general. Which
|
|
||||||
is bad. One of the reasons is above and the other one is that API of
|
|
||||||
apigen is not that well defined which makes it harder to use. So what
|
|
||||||
I think would be:
|
|
||||||
|
|
||||||
* **py.apigen** tool, which will take tests and initpkg (or whatever
|
|
||||||
means of collecting data) and will try to store it somewhere
|
|
||||||
(not sure, plain text log as a first step?). Than next step
|
|
||||||
would be to have tools for generating webpages out of it
|
|
||||||
(py.webapi or so) and other tools which will integrate it to pdb,
|
|
||||||
emacs (pick your random IDE here) or whatever.
|
|
||||||
|
|
||||||
* Another option is to have py.test generate those data and have another
|
|
||||||
tools using it.
|
|
||||||
|
|
||||||
* Data storage. Text with a log comes in mind, but it's not very handy.
|
|
||||||
Using any sort of SQL doesn't really counts, cause it makes pylib
|
|
||||||
less standalone, especially that I wouldn't like to have write all
|
|
||||||
those SQL myself, but rather use some kind of sql object relational
|
|
||||||
mapper. Another format might be some kind of structured text
|
|
||||||
(xml anyone?) or pickled stuff. Pickle has problems on his own,
|
|
||||||
so I don't have any best solution at hand.
|
|
||||||
|
|
||||||
* Accessing. These are all strings and simple types built on top of it.
|
|
||||||
Probably would be good not to store all data in memory, because it might
|
|
||||||
be huge in case we would like to have all past informations there.
|
|
|
@ -1,67 +0,0 @@
|
||||||
======================
|
|
||||||
``py/bin/`` scripts
|
|
||||||
======================
|
|
||||||
|
|
||||||
The py-lib contains some scripts, most of which are
|
|
||||||
small ones (apart from ``py.test``) that help during
|
|
||||||
the python development process. If working
|
|
||||||
from a svn-checkout of py lib you may add ``py/bin``
|
|
||||||
to your shell ``PATH`` which should make the scripts
|
|
||||||
available on your command prompt.
|
|
||||||
|
|
||||||
``py.test``
|
|
||||||
===========
|
|
||||||
|
|
||||||
The ``py.test`` executable is the main entry point into the py-lib testing tool,
|
|
||||||
see the `py.test documentation`_.
|
|
||||||
|
|
||||||
.. _`py.test documentation`: test.html
|
|
||||||
|
|
||||||
``py.cleanup``
|
|
||||||
==============
|
|
||||||
|
|
||||||
Usage: ``py.cleanup [PATH]``
|
|
||||||
|
|
||||||
Delete pyc file recursively, starting from ``PATH`` (which defaults to the
|
|
||||||
current working directory). Don't follow links and don't recurse into
|
|
||||||
directories with a ".".
|
|
||||||
|
|
||||||
|
|
||||||
``py.countloc``
|
|
||||||
===============
|
|
||||||
|
|
||||||
Usage: ``py.countloc [PATHS]``
|
|
||||||
|
|
||||||
Count (non-empty) lines of python code and number of python files recursively
|
|
||||||
starting from a ``PATHS`` given on the command line (starting from the current
|
|
||||||
working directory). Distinguish between test files and normal ones and report
|
|
||||||
them separately.
|
|
||||||
|
|
||||||
``py.lookup``
|
|
||||||
=============
|
|
||||||
|
|
||||||
Usage: ``py.lookup SEARCH_STRING [options]``
|
|
||||||
|
|
||||||
Looks recursively at Python files for a ``SEARCH_STRING``, starting from the
|
|
||||||
present working directory. Prints the line, with the filename and line-number
|
|
||||||
prepended.
|
|
||||||
|
|
||||||
``py.rest``
|
|
||||||
===========
|
|
||||||
|
|
||||||
Usage: ``py.rest [PATHS] [options]``
|
|
||||||
|
|
||||||
Loot recursively for .txt files starting from ``PATHS`` and convert them to
|
|
||||||
html using docutils (or to pdf files, if the --pdf option is used).
|
|
||||||
|
|
||||||
``py.rest`` has some extra features over rst2html (which is shipped with
|
|
||||||
docutils). Most of these are still experimental, the one which is most likely
|
|
||||||
not going to change is the `graphviz`_ directive. With that you can embed .dot
|
|
||||||
files into your document and have them be converted to png (when outputting
|
|
||||||
html) and to eps (when outputting pdf). Otherwise the directive works mostly
|
|
||||||
like the image directive::
|
|
||||||
|
|
||||||
.. graphviz:: example.dot
|
|
||||||
:scale: 90
|
|
||||||
|
|
||||||
.. _`graphviz`: http://www.graphviz.org
|
|
141
py/doc/code.txt
141
py/doc/code.txt
|
@ -1,141 +0,0 @@
|
||||||
==============
|
|
||||||
:api:`py.code`
|
|
||||||
==============
|
|
||||||
|
|
||||||
The :api:`py.code` part of the 'py lib' contains some functionality to help
|
|
||||||
dealing with Python code objects. Even though working with Python's internal
|
|
||||||
code objects (as found on frames and callables) can be very powerful, it's
|
|
||||||
usually also quite cumbersome, because the API provided by core Python is
|
|
||||||
relatively low level and not very accessible.
|
|
||||||
|
|
||||||
The :api:`py.code` library tries to simplify accessing the code objects as well
|
|
||||||
as creating them. There is a small set of interfaces a user needs to deal with,
|
|
||||||
all nicely bundled together, and with a rich set of 'Pythonic' functionality.
|
|
||||||
|
|
||||||
source: :source:`py/code/`
|
|
||||||
|
|
||||||
Contents of the library
|
|
||||||
=======================
|
|
||||||
|
|
||||||
Every object in the :api:`py.code` library wraps a code Python object related
|
|
||||||
to code objects, source code, frames and tracebacks: the :api:`py.code.Code`
|
|
||||||
class wraps code objects, :api:`py.code.Source` source snippets,
|
|
||||||
:api:`py.code.Traceback` exception tracebacks, :api:`py.code.Frame` frame
|
|
||||||
objects (as found in e.g. tracebacks) and :api:`py.code.ExceptionInfo` the
|
|
||||||
tuple provided by sys.exc_info() (containing exception and traceback
|
|
||||||
information when an exception occurs). Also in the library is a helper function
|
|
||||||
:api:`py.code.compile()` that provides the same functionality as Python's
|
|
||||||
built-in 'compile()' function, but returns a wrapped code object.
|
|
||||||
|
|
||||||
The wrappers
|
|
||||||
============
|
|
||||||
|
|
||||||
:api:`py.code.Code`
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
Code objects are instantiated with a code object or a callable as argument,
|
|
||||||
and provide functionality to compare themselves with other Code objects, get to
|
|
||||||
the source file or its contents, create new Code objects from scratch, etc.
|
|
||||||
|
|
||||||
A quick example::
|
|
||||||
|
|
||||||
>>> import py
|
|
||||||
>>> c = py.code.Code(py.path.local.read)
|
|
||||||
>>> c.path.basename
|
|
||||||
'common.py'
|
|
||||||
>>> isinstance(c.source(), py.code.Source)
|
|
||||||
True
|
|
||||||
>>> str(c.source()).split('\n')[0]
|
|
||||||
"def read(self, mode='rb'):"
|
|
||||||
|
|
||||||
source: :source:`py/code/code.py`
|
|
||||||
|
|
||||||
:api:`py.code.Source`
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
Source objects wrap snippets of Python source code, providing a simple yet
|
|
||||||
powerful interface to read, deindent, slice, compare, compile and manipulate
|
|
||||||
them, things that are not so easy in core Python.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
>>> s = py.code.Source("""\
|
|
||||||
... def foo():
|
|
||||||
... print "foo"
|
|
||||||
... """)
|
|
||||||
>>> str(s).startswith('def') # automatic de-indentation!
|
|
||||||
True
|
|
||||||
>>> s.isparseable()
|
|
||||||
True
|
|
||||||
>>> sub = s.getstatement(1) # get the statement starting at line 1
|
|
||||||
>>> str(sub).strip() # XXX why is the strip() required?!?
|
|
||||||
'print "foo"'
|
|
||||||
|
|
||||||
source: :source:`py/code/source.py`
|
|
||||||
|
|
||||||
:api:`py.code.Traceback`
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
Tracebacks are usually not very easy to examine, you need to access certain
|
|
||||||
somewhat hidden attributes of the traceback's items (resulting in expressions
|
|
||||||
such as 'fname = tb.tb_next.tb_frame.f_code.co_filename'). The Traceback
|
|
||||||
interface (and its TracebackItem children) tries to improve this.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
>>> import sys
|
|
||||||
>>> try:
|
|
||||||
... py.path.local(100) # illegal argument
|
|
||||||
... except:
|
|
||||||
... exc, e, tb = sys.exc_info()
|
|
||||||
>>> t = py.code.Traceback(tb)
|
|
||||||
>>> first = t[1] # get the second entry (first is in this doc)
|
|
||||||
>>> first.path.basename # second is in py/path/local.py
|
|
||||||
'local.py'
|
|
||||||
>>> isinstance(first.statement, py.code.Source)
|
|
||||||
True
|
|
||||||
>>> str(first.statement).strip().startswith('raise ValueError')
|
|
||||||
True
|
|
||||||
|
|
||||||
source: :source:`py/code/traceback2.py`
|
|
||||||
|
|
||||||
:api:`py.code.Frame`
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
Frame wrappers are used in :api:`py.code.Traceback` items, and will usually not
|
|
||||||
directly be instantiated. They provide some nice methods to evaluate code
|
|
||||||
'inside' the frame (using the frame's local variables), get to the underlying
|
|
||||||
code (frames have a code attribute that points to a :api:`py.code.Code` object)
|
|
||||||
and examine the arguments.
|
|
||||||
|
|
||||||
Example (using the 'first' TracebackItem instance created above)::
|
|
||||||
|
|
||||||
>>> frame = first.frame
|
|
||||||
>>> isinstance(frame.code, py.code.Code)
|
|
||||||
True
|
|
||||||
>>> isinstance(frame.eval('self'), py.__.path.local.local.LocalPath)
|
|
||||||
True
|
|
||||||
>>> [namevalue[0] for namevalue in frame.getargs()]
|
|
||||||
['cls', 'path']
|
|
||||||
|
|
||||||
:api:`py.code.ExceptionInfo`
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
A wrapper around the tuple returned by sys.exc_info() (will call sys.exc_info()
|
|
||||||
itself if the tuple is not provided as an argument), provides some handy
|
|
||||||
attributes to easily access the traceback and exception string.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
>>> import sys
|
|
||||||
>>> try:
|
|
||||||
... foobar()
|
|
||||||
... except:
|
|
||||||
... excinfo = py.code.ExceptionInfo()
|
|
||||||
>>> excinfo.typename
|
|
||||||
'exceptions.NameError'
|
|
||||||
>>> isinstance(excinfo.traceback, py.code.Traceback)
|
|
||||||
True
|
|
||||||
>>> excinfo.exconly()
|
|
||||||
"NameError: name 'foobar' is not defined"
|
|
||||||
|
|
|
@ -1,73 +0,0 @@
|
||||||
=====================================================
|
|
||||||
Coding Style for the Py lib and friendly applications
|
|
||||||
=====================================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
Honour PEP 8: Style Guide for Python Code
|
|
||||||
-----------------------------------------
|
|
||||||
|
|
||||||
First of all, if you haven't already read it, read the `PEP 8
|
|
||||||
Style Guide for Python Code`_ which, if in doubt, serves as
|
|
||||||
the default coding-style for the py lib.
|
|
||||||
|
|
||||||
Documentation and Testing
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
- generally we want to drive and interweave coding of
|
|
||||||
documentation, tests and real code as much as possible.
|
|
||||||
Without good documentation others may never know about
|
|
||||||
your latest and greatest feature.
|
|
||||||
|
|
||||||
naming
|
|
||||||
------
|
|
||||||
|
|
||||||
- directories, modules and namespaces are always **lowercase**
|
|
||||||
|
|
||||||
- classes and especially Exceptions are most often **CamelCase**
|
|
||||||
|
|
||||||
- types, i.e. very widely usable classes like the ``py.path``
|
|
||||||
family are all lower case.
|
|
||||||
|
|
||||||
- never use plural names in directory and file names
|
|
||||||
|
|
||||||
- functions/methods are lowercase and ``_`` - separated if
|
|
||||||
you really need to separate at all
|
|
||||||
|
|
||||||
- it's appreciated if you manage to name files in a directory
|
|
||||||
so that tab-completion on the shell level is as easy as possible.
|
|
||||||
|
|
||||||
|
|
||||||
committing
|
|
||||||
----------
|
|
||||||
|
|
||||||
- adding features requires adding appropriate tests.
|
|
||||||
|
|
||||||
- bug fixes should be encoded in a test before being fixed.
|
|
||||||
|
|
||||||
- write telling log messages because several people
|
|
||||||
will read your diffs, and we plan to have a search facility
|
|
||||||
over the py lib's subversion repository.
|
|
||||||
|
|
||||||
- if you add ``.txt`` or ``.py`` files to the repository then
|
|
||||||
please make sure you have ``svn:eol-style`` set to native.
|
|
||||||
which allows checkin/checkout in native line-ending format.
|
|
||||||
|
|
||||||
Miscellaneous
|
|
||||||
-------------
|
|
||||||
|
|
||||||
- Tests are the insurance that your code will be maintained
|
|
||||||
further and survives major releases.
|
|
||||||
|
|
||||||
- Try to put the tests close to the tested code, don't
|
|
||||||
overload directories with names.
|
|
||||||
|
|
||||||
- If you think of exporting new py lib APIs, discuss it first on the
|
|
||||||
`py-dev mailing list`_ and possibly write a chapter in our
|
|
||||||
`future_` book. Communication is considered a key here to make
|
|
||||||
sure that the py lib develops in a consistent way.
|
|
||||||
|
|
||||||
.. _`PEP 8 Style Guide for Python Code`: http://www.python.org/peps/pep-0008.html
|
|
||||||
.. _`py-dev mailing list`: http://codespeak.net/mailman/listinfo/py-dev
|
|
||||||
.. _`future`: future.html
|
|
|
@ -1,170 +0,0 @@
|
||||||
import py
|
|
||||||
from py.__.misc.rest import convert_rest_html, strip_html_header
|
|
||||||
from py.__.misc.difftime import worded_time
|
|
||||||
from py.__.doc.conftest import get_apigenpath, get_docpath
|
|
||||||
from py.__.apigen.linker import relpath
|
|
||||||
|
|
||||||
html = py.xml.html
|
|
||||||
|
|
||||||
class Page(object):
|
|
||||||
doctype = ('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"'
|
|
||||||
' "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n')
|
|
||||||
|
|
||||||
def __init__(self, project, title, targetpath, stylesheeturl=None,
|
|
||||||
type="text/html", encoding="ISO-8859-1"):
|
|
||||||
self.project = project
|
|
||||||
self.title = project.prefix_title + title
|
|
||||||
self.targetpath = targetpath
|
|
||||||
self.stylesheeturl = stylesheeturl
|
|
||||||
self.type = type
|
|
||||||
self.encoding = encoding
|
|
||||||
|
|
||||||
self.body = html.body()
|
|
||||||
self.head = html.head()
|
|
||||||
self._root = html.html(self.head, self.body)
|
|
||||||
self.fill()
|
|
||||||
|
|
||||||
def a_docref(self, name, relhtmlpath):
|
|
||||||
docpath = self.project.get_docpath()
|
|
||||||
return html.a(name, class_="menu",
|
|
||||||
href=relpath(self.targetpath.strpath,
|
|
||||||
docpath.join(relhtmlpath).strpath))
|
|
||||||
|
|
||||||
def a_apigenref(self, name, relhtmlpath):
|
|
||||||
apipath = get_apigenpath()
|
|
||||||
return html.a(name, class_="menu",
|
|
||||||
href=relpath(self.targetpath.strpath,
|
|
||||||
apipath.join(relhtmlpath).strpath))
|
|
||||||
|
|
||||||
def fill_menubar(self):
|
|
||||||
items = [
|
|
||||||
self.a_docref("index", "index.html"),
|
|
||||||
self.a_apigenref("api", "api/index.html"),
|
|
||||||
self.a_apigenref("source", "source/index.html"),
|
|
||||||
self.a_docref("contact", "contact.html"),
|
|
||||||
self.a_docref("download", "download.html"),
|
|
||||||
]
|
|
||||||
items2 = [items.pop(0)]
|
|
||||||
sep = " "
|
|
||||||
for item in items:
|
|
||||||
items2.append(sep)
|
|
||||||
items2.append(item)
|
|
||||||
self.menubar = html.div(id="menubar", *items2)
|
|
||||||
|
|
||||||
def fill(self):
|
|
||||||
content_type = "%s;charset=%s" %(self.type, self.encoding)
|
|
||||||
self.head.append(html.title(self.title))
|
|
||||||
self.head.append(html.meta(name="Content-Type", content=content_type))
|
|
||||||
if self.stylesheeturl:
|
|
||||||
self.head.append(
|
|
||||||
html.link(href=self.stylesheeturl,
|
|
||||||
media="screen", rel="stylesheet",
|
|
||||||
type="text/css"))
|
|
||||||
self.fill_menubar()
|
|
||||||
|
|
||||||
self.metaspace = html.div(
|
|
||||||
html.div(self.title, class_="project_title"),
|
|
||||||
self.menubar,
|
|
||||||
id='metaspace')
|
|
||||||
|
|
||||||
self.body.append(self.project.logo)
|
|
||||||
self.body.append(self.metaspace)
|
|
||||||
self.contentspace = html.div(id="contentspace")
|
|
||||||
self.body.append(self.contentspace)
|
|
||||||
|
|
||||||
def unicode(self, doctype=True):
|
|
||||||
page = self._root.unicode()
|
|
||||||
if doctype:
|
|
||||||
return self.doctype + page
|
|
||||||
else:
|
|
||||||
return page
|
|
||||||
|
|
||||||
class PyPage(Page):
|
|
||||||
def get_menubar(self):
|
|
||||||
menubar = super(PyPage, self).get_menubar()
|
|
||||||
# base layout
|
|
||||||
menubar.append(
|
|
||||||
html.a("issue", href="https://codespeak.net/issue/py-dev/",
|
|
||||||
class_="menu"),
|
|
||||||
)
|
|
||||||
return menubar
|
|
||||||
|
|
||||||
|
|
||||||
def getrealname(username):
|
|
||||||
try:
|
|
||||||
import uconf
|
|
||||||
except ImportError:
|
|
||||||
return username
|
|
||||||
try:
|
|
||||||
user = uconf.system.User(username)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
raise
|
|
||||||
try:
|
|
||||||
return user.realname or username
|
|
||||||
except KeyError:
|
|
||||||
return username
|
|
||||||
|
|
||||||
|
|
||||||
class Project:
|
|
||||||
mydir = py.magic.autopath().dirpath()
|
|
||||||
# string for url, path for local file
|
|
||||||
stylesheet = mydir.join('style.css')
|
|
||||||
title = "py lib"
|
|
||||||
prefix_title = "" # we have a logo already containing "py lib"
|
|
||||||
encoding = 'latin1'
|
|
||||||
logo = html.div(
|
|
||||||
html.a(
|
|
||||||
html.img(alt="py lib", id='pyimg', height=114, width=154,
|
|
||||||
src="http://codespeak.net/img/pylib.png"),
|
|
||||||
href="http://codespeak.net"))
|
|
||||||
Page = PyPage
|
|
||||||
|
|
||||||
|
|
||||||
def get_content(self, txtpath, encoding):
|
|
||||||
return unicode(txtpath.read(), encoding)
|
|
||||||
|
|
||||||
def get_docpath(self):
|
|
||||||
return get_docpath()
|
|
||||||
|
|
||||||
def get_htmloutputpath(self, txtpath):
|
|
||||||
docpath = self.get_docpath()
|
|
||||||
reloutputpath = txtpath.new(ext='.html').relto(self.mydir)
|
|
||||||
return docpath.join(reloutputpath)
|
|
||||||
|
|
||||||
def process(self, txtpath):
|
|
||||||
encoding = self.encoding
|
|
||||||
content = self.get_content(txtpath, encoding)
|
|
||||||
docpath = self.get_docpath()
|
|
||||||
outputpath = self.get_htmloutputpath(txtpath)
|
|
||||||
|
|
||||||
stylesheet = self.stylesheet
|
|
||||||
if isinstance(self.stylesheet, py.path.local):
|
|
||||||
if not docpath.join(stylesheet.basename).check():
|
|
||||||
docpath.ensure(dir=True)
|
|
||||||
stylesheet.copy(docpath)
|
|
||||||
stylesheet = relpath(outputpath.strpath,
|
|
||||||
docpath.join(stylesheet.basename).strpath)
|
|
||||||
|
|
||||||
content = convert_rest_html(content, txtpath,
|
|
||||||
stylesheet=stylesheet, encoding=encoding)
|
|
||||||
content = strip_html_header(content, encoding=encoding)
|
|
||||||
|
|
||||||
page = self.Page(self, "[%s] " % txtpath.purebasename,
|
|
||||||
outputpath, stylesheeturl=stylesheet)
|
|
||||||
|
|
||||||
try:
|
|
||||||
svninfo = txtpath.info()
|
|
||||||
modified = " modified %s by %s" % (worded_time(svninfo.mtime),
|
|
||||||
getrealname(svninfo.last_author))
|
|
||||||
except (KeyboardInterrupt, SystemExit):
|
|
||||||
raise
|
|
||||||
except:
|
|
||||||
modified = " "
|
|
||||||
|
|
||||||
page.contentspace.append(
|
|
||||||
html.div(html.div(modified, style="float: right; font-style: italic;"),
|
|
||||||
id = 'docinfoline'))
|
|
||||||
|
|
||||||
page.contentspace.append(py.xml.raw(content))
|
|
||||||
outputpath.ensure().write(page.unicode().encode(encoding))
|
|
||||||
|
|
|
@ -1,324 +0,0 @@
|
||||||
from __future__ import generators
|
|
||||||
import py
|
|
||||||
from py.__.misc import rest
|
|
||||||
from py.__.apigen.linker import relpath
|
|
||||||
import os
|
|
||||||
|
|
||||||
pypkgdir = py.path.local(py.__file__).dirpath()
|
|
||||||
|
|
||||||
mypath = py.magic.autopath().dirpath()
|
|
||||||
|
|
||||||
Option = py.test.config.Option
|
|
||||||
option = py.test.config.addoptions("documentation check options",
|
|
||||||
Option('-R', '--checkremote',
|
|
||||||
action="store_true", dest="checkremote", default=False,
|
|
||||||
help="perform tests involving remote accesses (links, svn)"
|
|
||||||
),
|
|
||||||
Option('', '--forcegen',
|
|
||||||
action="store_true", dest="forcegen", default=False,
|
|
||||||
help="force generation of html files even if they appear up-to-date"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_apigenpath():
|
|
||||||
from py.__.conftest import option
|
|
||||||
path = os.environ.get('APIGENPATH')
|
|
||||||
if path is None:
|
|
||||||
path = option.apigenpath
|
|
||||||
return pypkgdir.join(path, abs=True)
|
|
||||||
|
|
||||||
def get_docpath():
|
|
||||||
from py.__.conftest import option
|
|
||||||
path = os.environ.get('DOCPATH')
|
|
||||||
if path is None:
|
|
||||||
path = option.docpath
|
|
||||||
return pypkgdir.join(path, abs=True)
|
|
||||||
|
|
||||||
def get_apigen_relpath():
|
|
||||||
return relpath(get_docpath().strpath + '/',
|
|
||||||
get_apigenpath().strpath + '/')
|
|
||||||
|
|
||||||
def deindent(s, sep='\n'):
|
|
||||||
leastspaces = -1
|
|
||||||
lines = s.split(sep)
|
|
||||||
for line in lines:
|
|
||||||
if not line.strip():
|
|
||||||
continue
|
|
||||||
spaces = len(line) - len(line.lstrip())
|
|
||||||
if leastspaces == -1 or spaces < leastspaces:
|
|
||||||
leastspaces = spaces
|
|
||||||
if leastspaces == -1:
|
|
||||||
return s
|
|
||||||
for i, line in py.builtin.enumerate(lines):
|
|
||||||
if not line.strip():
|
|
||||||
lines[i] = ''
|
|
||||||
else:
|
|
||||||
lines[i] = line[leastspaces:]
|
|
||||||
return sep.join(lines)
|
|
||||||
|
|
||||||
_initialized = False
|
|
||||||
def checkdocutils():
|
|
||||||
global _initialized
|
|
||||||
try:
|
|
||||||
import docutils
|
|
||||||
except ImportError:
|
|
||||||
py.test.skip("docutils not importable")
|
|
||||||
if not _initialized:
|
|
||||||
from py.__.rest import directive
|
|
||||||
directive.register_linkrole('api', resolve_linkrole)
|
|
||||||
directive.register_linkrole('source', resolve_linkrole)
|
|
||||||
_initialized = True
|
|
||||||
|
|
||||||
def restcheck(path):
|
|
||||||
localpath = path
|
|
||||||
if hasattr(path, 'localpath'):
|
|
||||||
localpath = path.localpath
|
|
||||||
checkdocutils()
|
|
||||||
import docutils.utils
|
|
||||||
|
|
||||||
try:
|
|
||||||
cur = localpath
|
|
||||||
for x in cur.parts(reverse=True):
|
|
||||||
confrest = x.dirpath('confrest.py')
|
|
||||||
if confrest.check(file=1):
|
|
||||||
confrest = confrest.pyimport()
|
|
||||||
project = confrest.Project()
|
|
||||||
_checkskip(path, project.get_htmloutputpath(path))
|
|
||||||
project.process(path)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# defer to default processor
|
|
||||||
_checkskip(path)
|
|
||||||
rest.process(path)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
raise
|
|
||||||
except docutils.utils.SystemMessage:
|
|
||||||
# we assume docutils printed info on stdout
|
|
||||||
py.test.fail("docutils processing failed, see captured stderr")
|
|
||||||
|
|
||||||
def _checkskip(lpath, htmlpath=None):
|
|
||||||
if not option.forcegen:
|
|
||||||
lpath = py.path.local(lpath)
|
|
||||||
if htmlpath is not None:
|
|
||||||
htmlpath = py.path.local(htmlpath)
|
|
||||||
if lpath.ext == '.txt':
|
|
||||||
htmlpath = htmlpath or lpath.new(ext='.html')
|
|
||||||
if htmlpath.check(file=1) and htmlpath.mtime() >= lpath.mtime():
|
|
||||||
py.test.skip("html file is up to date, use --forcegen to regenerate")
|
|
||||||
#return [] # no need to rebuild
|
|
||||||
|
|
||||||
class ReSTSyntaxTest(py.test.collect.Item):
|
|
||||||
def run(self):
|
|
||||||
mypath = self.fspath
|
|
||||||
restcheck(py.path.svnwc(mypath))
|
|
||||||
|
|
||||||
class DoctestText(py.test.collect.Item):
|
|
||||||
def run(self):
|
|
||||||
# XXX quite nasty... but it works (fixes win32 issues)
|
|
||||||
s = self._normalize_linesep()
|
|
||||||
l = []
|
|
||||||
prefix = '.. >>> '
|
|
||||||
mod = py.std.types.ModuleType(self.fspath.purebasename)
|
|
||||||
for line in deindent(s).split('\n'):
|
|
||||||
stripped = line.strip()
|
|
||||||
if stripped.startswith(prefix):
|
|
||||||
exec py.code.Source(stripped[len(prefix):]).compile() in \
|
|
||||||
mod.__dict__
|
|
||||||
line = ""
|
|
||||||
else:
|
|
||||||
l.append(line)
|
|
||||||
docstring = "\n".join(l)
|
|
||||||
self.execute(mod, docstring)
|
|
||||||
|
|
||||||
def execute(self, mod, docstring):
|
|
||||||
mod.__doc__ = docstring
|
|
||||||
failed, tot = py.compat.doctest.testmod(mod, verbose=1)
|
|
||||||
if failed:
|
|
||||||
py.test.fail("doctest %s: %s failed out of %s" %(
|
|
||||||
self.fspath, failed, tot))
|
|
||||||
|
|
||||||
def _normalize_linesep(self):
|
|
||||||
s = self.fspath.read()
|
|
||||||
linesep = '\n'
|
|
||||||
if '\r' in s:
|
|
||||||
if '\n' not in s:
|
|
||||||
linesep = '\r'
|
|
||||||
else:
|
|
||||||
linesep = '\r\n'
|
|
||||||
s = s.replace(linesep, '\n')
|
|
||||||
return s
|
|
||||||
|
|
||||||
class LinkCheckerMaker(py.test.collect.Collector):
|
|
||||||
def run(self):
|
|
||||||
l = []
|
|
||||||
for call, tryfn, path, lineno in genlinkchecks(self.fspath):
|
|
||||||
l.append(tryfn)
|
|
||||||
return l
|
|
||||||
|
|
||||||
def join(self, name):
|
|
||||||
for call, tryfn, path, lineno in genlinkchecks(self.fspath):
|
|
||||||
if tryfn == name:
|
|
||||||
return CheckLink(name, parent=self, args=(tryfn, path, lineno), obj=call)
|
|
||||||
|
|
||||||
class CheckLink(py.test.collect.Function):
|
|
||||||
def setup(self):
|
|
||||||
pass
|
|
||||||
def teardown(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class ReSTChecker(py.test.collect.Module):
|
|
||||||
DoctestText = DoctestText
|
|
||||||
ReSTSyntaxTest = ReSTSyntaxTest
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return py.test.collect.Collector.__repr__(self)
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
pass
|
|
||||||
def teardown(self):
|
|
||||||
pass
|
|
||||||
def run(self):
|
|
||||||
return [self.fspath.basename, 'checklinks', 'doctest']
|
|
||||||
def join(self, name):
|
|
||||||
if name == self.fspath.basename:
|
|
||||||
return self.ReSTSyntaxTest(name, parent=self)
|
|
||||||
elif name == 'checklinks':
|
|
||||||
return LinkCheckerMaker(name, self)
|
|
||||||
elif name == 'doctest':
|
|
||||||
return self.DoctestText(name, self)
|
|
||||||
|
|
||||||
# generating functions + args as single tests
|
|
||||||
def genlinkchecks(path):
|
|
||||||
for lineno, line in py.builtin.enumerate(path.readlines()):
|
|
||||||
line = line.strip()
|
|
||||||
if line.startswith('.. _'):
|
|
||||||
if line.startswith('.. _`'):
|
|
||||||
delim = '`:'
|
|
||||||
else:
|
|
||||||
delim = ':'
|
|
||||||
l = line.split(delim, 1)
|
|
||||||
if len(l) != 2:
|
|
||||||
continue
|
|
||||||
tryfn = l[1].strip()
|
|
||||||
if tryfn.startswith('http:') or tryfn.startswith('https'):
|
|
||||||
if option.checkremote:
|
|
||||||
yield urlcheck, tryfn, path, lineno
|
|
||||||
elif tryfn.startswith('webcal:'):
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
i = tryfn.find('#')
|
|
||||||
if i != -1:
|
|
||||||
checkfn = tryfn[:i]
|
|
||||||
else:
|
|
||||||
checkfn = tryfn
|
|
||||||
if checkfn.strip() and (1 or checkfn.endswith('.html')):
|
|
||||||
yield localrefcheck, tryfn, path, lineno
|
|
||||||
|
|
||||||
def urlcheck(tryfn, path, lineno):
|
|
||||||
try:
|
|
||||||
print "trying remote", tryfn
|
|
||||||
py.std.urllib2.urlopen(tryfn)
|
|
||||||
except (py.std.urllib2.URLError, py.std.urllib2.HTTPError), e:
|
|
||||||
if e.code in (401, 403): # authorization required, forbidden
|
|
||||||
py.test.skip("%s: %s" %(tryfn, str(e)))
|
|
||||||
else:
|
|
||||||
py.test.fail("remote reference error %r in %s:%d\n%s" %(
|
|
||||||
tryfn, path.basename, lineno+1, e))
|
|
||||||
|
|
||||||
def localrefcheck(tryfn, path, lineno):
|
|
||||||
# assume it should be a file
|
|
||||||
i = tryfn.find('#')
|
|
||||||
if tryfn.startswith('javascript:'):
|
|
||||||
return # don't check JS refs
|
|
||||||
if i != -1:
|
|
||||||
anchor = tryfn[i+1:]
|
|
||||||
tryfn = tryfn[:i]
|
|
||||||
else:
|
|
||||||
anchor = ''
|
|
||||||
fn = path.dirpath(tryfn)
|
|
||||||
ishtml = fn.ext == '.html'
|
|
||||||
fn = ishtml and fn.new(ext='.txt') or fn
|
|
||||||
print "filename is", fn
|
|
||||||
if not fn.check(): # not ishtml or not fn.check():
|
|
||||||
if not py.path.local(tryfn).check(): # the html could be there
|
|
||||||
py.test.fail("reference error %r in %s:%d" %(
|
|
||||||
tryfn, path.basename, lineno+1))
|
|
||||||
if anchor:
|
|
||||||
source = unicode(fn.read(), 'latin1')
|
|
||||||
source = source.lower().replace('-', ' ') # aehem
|
|
||||||
|
|
||||||
anchor = anchor.replace('-', ' ')
|
|
||||||
match2 = ".. _`%s`:" % anchor
|
|
||||||
match3 = ".. _%s:" % anchor
|
|
||||||
candidates = (anchor, match2, match3)
|
|
||||||
print "candidates", repr(candidates)
|
|
||||||
for line in source.split('\n'):
|
|
||||||
line = line.strip()
|
|
||||||
if line in candidates:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
py.test.fail("anchor reference error %s#%s in %s:%d" %(
|
|
||||||
tryfn, anchor, path.basename, lineno+1))
|
|
||||||
|
|
||||||
|
|
||||||
# ___________________________________________________________
|
|
||||||
#
|
|
||||||
# hooking into py.test Directory collector's chain ...
|
|
||||||
|
|
||||||
class DocDirectory(py.test.collect.Directory):
|
|
||||||
ReSTChecker = ReSTChecker
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
results = super(DocDirectory, self).run()
|
|
||||||
for x in self.fspath.listdir('*.txt', sort=True):
|
|
||||||
results.append(x.basename)
|
|
||||||
return results
|
|
||||||
|
|
||||||
def join(self, name):
|
|
||||||
if not name.endswith('.txt'):
|
|
||||||
return super(DocDirectory, self).join(name)
|
|
||||||
p = self.fspath.join(name)
|
|
||||||
if p.check(file=1):
|
|
||||||
return self.ReSTChecker(p, parent=self)
|
|
||||||
Directory = DocDirectory
|
|
||||||
|
|
||||||
def resolve_linkrole(name, text, check=True):
|
|
||||||
apigen_relpath = get_apigen_relpath()
|
|
||||||
if name == 'api':
|
|
||||||
if text == 'py':
|
|
||||||
return ('py', apigen_relpath + 'api/index.html')
|
|
||||||
else:
|
|
||||||
assert text.startswith('py.'), (
|
|
||||||
'api link "%s" does not point to the py package') % (text,)
|
|
||||||
dotted_name = text
|
|
||||||
if dotted_name.find('(') > -1:
|
|
||||||
dotted_name = dotted_name[:text.find('(')]
|
|
||||||
# remove pkg root
|
|
||||||
path = dotted_name.split('.')[1:]
|
|
||||||
dotted_name = '.'.join(path)
|
|
||||||
obj = py
|
|
||||||
if check:
|
|
||||||
for chunk in path:
|
|
||||||
try:
|
|
||||||
obj = getattr(obj, chunk)
|
|
||||||
except AttributeError:
|
|
||||||
raise AssertionError(
|
|
||||||
'problem with linkrole :api:`%s`: can not resolve '
|
|
||||||
'dotted name %s' % (text, dotted_name,))
|
|
||||||
return (text, apigen_relpath + 'api/%s.html' % (dotted_name,))
|
|
||||||
elif name == 'source':
|
|
||||||
assert text.startswith('py/'), ('source link "%s" does not point '
|
|
||||||
'to the py package') % (text,)
|
|
||||||
relpath = '/'.join(text.split('/')[1:])
|
|
||||||
if check:
|
|
||||||
pkgroot = py.__pkg__.getpath()
|
|
||||||
abspath = pkgroot.join(relpath)
|
|
||||||
assert pkgroot.join(relpath).check(), (
|
|
||||||
'problem with linkrole :source:`%s`: '
|
|
||||||
'path %s does not exist' % (text, relpath))
|
|
||||||
if relpath.endswith('/') or not relpath:
|
|
||||||
relpath += 'index.html'
|
|
||||||
else:
|
|
||||||
relpath += '.html'
|
|
||||||
return (text, apigen_relpath + 'source/%s' % (relpath,))
|
|
||||||
|
|
|
@ -1,81 +0,0 @@
|
||||||
py lib contact and communication
|
|
||||||
===================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
IRC Channel #pylib on irc.freenode.net
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
The #pylib channel on freenode displays all commits to the py lib
|
|
||||||
and you are welcome to lurk or to ask questions there!
|
|
||||||
|
|
||||||
`py-dev`_ developers mailing list
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
If you see bugs and/or can provide patches, please
|
|
||||||
subscribe to the `py-dev developers list`_.
|
|
||||||
As of Febrary 2007 it has medium to low traffic.
|
|
||||||
|
|
||||||
|
|
||||||
`py-svn`_ commit mailing list
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
If you'd like to see ongoing development commits,
|
|
||||||
please subscribe to:
|
|
||||||
|
|
||||||
`py-svn general commit mailing list`_
|
|
||||||
|
|
||||||
This list (as of February 2007) has medium to high traffic.
|
|
||||||
|
|
||||||
|
|
||||||
`development bug/feature tracker`_
|
|
||||||
---------------------------------------------
|
|
||||||
|
|
||||||
This (somewhat old) roundup instance still serves
|
|
||||||
to file bugs and track issues. However, we also
|
|
||||||
keep a list of "TODOs" in various directories.
|
|
||||||
|
|
||||||
|
|
||||||
Coding and communication
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
We are practicing what could be called documentation,
|
|
||||||
vision, discussion and automated-test driven development.
|
|
||||||
In the `future`_ book we try to layout visions and ideas for
|
|
||||||
the near coding feature to give a means for preliminary
|
|
||||||
feedback before code hits the ground.
|
|
||||||
|
|
||||||
With our `coding style`_ we are mostly following
|
|
||||||
cpython guidance with some additional restrictions
|
|
||||||
some of which projects like twisted_ or zope3_ have
|
|
||||||
adopted in similar ways.
|
|
||||||
|
|
||||||
.. _`zope3`: http://zope3.zwiki.org/
|
|
||||||
.. _twisted: http://www.twistedmatrix.org
|
|
||||||
.. _future: future.html
|
|
||||||
|
|
||||||
.. _`get an account`:
|
|
||||||
|
|
||||||
|
|
||||||
get an account on codespeak
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
codespeak_ is employing a liberal committing scheme. If you know
|
|
||||||
someone who is active on codespeak already or you are otherwise known in
|
|
||||||
the community then you will most probably just get access. But even if
|
|
||||||
you are new to the python developer community you may still get one if
|
|
||||||
you want to improve things and can be expected to honour the
|
|
||||||
style of coding and communication.
|
|
||||||
|
|
||||||
.. _`coding style`: coding-style.html
|
|
||||||
.. _us: http://codespeak.net/mailman/listinfo/py-dev
|
|
||||||
.. _codespeak: http://codespeak.net/
|
|
||||||
.. _`py-dev`:
|
|
||||||
.. _`development mailing list`:
|
|
||||||
.. _`py-dev developers list`: http://codespeak.net/mailman/listinfo/py-dev
|
|
||||||
.. _`subversion commit mailing list`:
|
|
||||||
.. _`py-svn`:
|
|
||||||
.. _`py-svn general commit mailing list`: http://codespeak.net/mailman/listinfo/py-svn
|
|
||||||
.. _`development bug/feature tracker`: https://codespeak.net/issue/py-dev/
|
|
||||||
|
|
|
@ -1,113 +0,0 @@
|
||||||
Download and Installation of the py lib
|
|
||||||
===============================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
Downloading a tar/zip file and installing it
|
|
||||||
===================================================
|
|
||||||
|
|
||||||
The latest public release:
|
|
||||||
|
|
||||||
`download py-0.9.0.tar.gz`_
|
|
||||||
`download py-0.9.0.zip`_
|
|
||||||
|
|
||||||
.. _`download py-0.9.0.tar.gz`: http://codespeak.net/download/py/py-0.9.0.tar.gz
|
|
||||||
.. _`download py-0.9.0.zip`: http://codespeak.net/download/py/py-0.9.0.zip
|
|
||||||
|
|
||||||
The py lib can be `globally installed via setup.py`_
|
|
||||||
or `used locally`_.
|
|
||||||
|
|
||||||
WARNING: win32 there is no pre-packaged c-extension
|
|
||||||
module (greenlet) yet and thus greenlets will not work
|
|
||||||
out of the box.
|
|
||||||
|
|
||||||
Getting (and updating) via subversion
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
Use Subversion to checkout the latest 0.9.x stable release:
|
|
||||||
|
|
||||||
svn co http://codespeak.net/svn/py/release/0.9.x py-0.9.x
|
|
||||||
|
|
||||||
to obtain the complete code and documentation source.
|
|
||||||
|
|
||||||
If you experience problems with the subversion checkout e.g.
|
|
||||||
because you have a http-proxy in between that doesn't proxy
|
|
||||||
DAV requests you can try to use "codespeak.net:8080" instead
|
|
||||||
of just "codespeak.net". Alternatively, you may tweak
|
|
||||||
your local subversion installation.
|
|
||||||
|
|
||||||
If you want to follow stable snapshots
|
|
||||||
then you may use the equivalent of this invocation:
|
|
||||||
|
|
||||||
svn co http://codespeak.net/svn/py/dist py-dist
|
|
||||||
|
|
||||||
|
|
||||||
.. _`globally installed via setup.py`:
|
|
||||||
|
|
||||||
Installation via setup.py
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
Go to your unpacked/checked out directory
|
|
||||||
and issue:
|
|
||||||
|
|
||||||
python setup.py install
|
|
||||||
|
|
||||||
|
|
||||||
.. _`used locally`:
|
|
||||||
|
|
||||||
Local Installation/Usage
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
You need to put the checkout-directory into your ``PYTHONPATH``
|
|
||||||
and you want to have the ``py-dist/py/bin/py.test`` script in
|
|
||||||
your (unixish) system path, which lets you execute test files
|
|
||||||
and directories.
|
|
||||||
|
|
||||||
There is a convenient way for Bash/Shell based systems
|
|
||||||
to setup the ``PYTHONPATH`` as well as the shell ``PATH``, insert::
|
|
||||||
|
|
||||||
eval `python ~/path/to/py-dist/py/env.py`
|
|
||||||
|
|
||||||
into your ``.bash_profile``. Of course, you need to
|
|
||||||
specify your own checkout-directory.
|
|
||||||
|
|
||||||
|
|
||||||
.. _`svn-external scenario`:
|
|
||||||
|
|
||||||
The py lib as an svn external
|
|
||||||
-------------------------------------------------------
|
|
||||||
|
|
||||||
Add the py lib as an external to your project `DIRECTORY`
|
|
||||||
which contains your svn-controlled root package::
|
|
||||||
|
|
||||||
svn propedit 'svn:externals' DIRECTORY
|
|
||||||
|
|
||||||
which will open an editor where you can add
|
|
||||||
the following line:
|
|
||||||
|
|
||||||
py http://codespeak.net/svn/py/dist
|
|
||||||
|
|
||||||
This will make your projcet automatically use the
|
|
||||||
most recent stable snapshot of the py lib.
|
|
||||||
|
|
||||||
Alternatively you may use this url for
|
|
||||||
integrating the development version:
|
|
||||||
|
|
||||||
http://codespeak.net/svn/py/trunk
|
|
||||||
|
|
||||||
or the next one for following the e.g. the 0.9 release branch
|
|
||||||
|
|
||||||
http://codespeak.net/svn/py/release/0.9.x
|
|
||||||
|
|
||||||
|
|
||||||
py subversion directory structure
|
|
||||||
=================================
|
|
||||||
|
|
||||||
The directory release layout of the repository is
|
|
||||||
going to follow this scheme::
|
|
||||||
|
|
||||||
http://codespeak.net/
|
|
||||||
svn/py/dist # latest stable (may or may not be a release)
|
|
||||||
svn/py/release # release tags and branches
|
|
||||||
svn/py/trunk # head development / merge point
|
|
|
@ -1,13 +0,0 @@
|
||||||
from py.xml import html
|
|
||||||
|
|
||||||
paras = "First Para", "Second para"
|
|
||||||
|
|
||||||
doc = html.html(
|
|
||||||
html.head(
|
|
||||||
html.meta(name="Content-Type", value="text/html; charset=latin1")),
|
|
||||||
html.body(
|
|
||||||
[html.p(p) for p in paras]))
|
|
||||||
|
|
||||||
print unicode(doc).encode('latin1')
|
|
||||||
|
|
||||||
|
|
|
@ -1,23 +0,0 @@
|
||||||
import py
|
|
||||||
html = py.xml.html
|
|
||||||
|
|
||||||
class my(html):
|
|
||||||
"a custom style"
|
|
||||||
class body(html.body):
|
|
||||||
style = html.Style(font_size = "120%")
|
|
||||||
|
|
||||||
class h2(html.h2):
|
|
||||||
style = html.Style(background = "grey")
|
|
||||||
|
|
||||||
class p(html.p):
|
|
||||||
style = html.Style(font_weight="bold")
|
|
||||||
|
|
||||||
doc = my.html(
|
|
||||||
my.head(),
|
|
||||||
my.body(
|
|
||||||
my.h2("hello world"),
|
|
||||||
my.p("bold as bold can")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
print doc.unicode(indent=2)
|
|
|
@ -1,17 +0,0 @@
|
||||||
|
|
||||||
import py
|
|
||||||
class ns(py.xml.Namespace):
|
|
||||||
pass
|
|
||||||
|
|
||||||
doc = ns.books(
|
|
||||||
ns.book(
|
|
||||||
ns.author("May Day"),
|
|
||||||
ns.title("python for java programmers"),),
|
|
||||||
ns.book(
|
|
||||||
ns.author("why", class_="somecssclass"),
|
|
||||||
ns.title("Java for Python programmers"),),
|
|
||||||
publisher="N.N",
|
|
||||||
)
|
|
||||||
print doc.unicode(indent=2).encode('utf8')
|
|
||||||
|
|
||||||
|
|
|
@ -1,120 +0,0 @@
|
||||||
from py.test import raises
|
|
||||||
import py
|
|
||||||
|
|
||||||
def otherfunc(a,b):
|
|
||||||
assert a==b
|
|
||||||
|
|
||||||
def somefunc(x,y):
|
|
||||||
otherfunc(x,y)
|
|
||||||
|
|
||||||
def otherfunc_multi(a,b):
|
|
||||||
assert (a ==
|
|
||||||
b)
|
|
||||||
|
|
||||||
class TestFailing(object):
|
|
||||||
def test_simple(self):
|
|
||||||
def f():
|
|
||||||
return 42
|
|
||||||
def g():
|
|
||||||
return 43
|
|
||||||
|
|
||||||
assert f() == g()
|
|
||||||
|
|
||||||
def test_simple_multiline(self):
|
|
||||||
otherfunc_multi(
|
|
||||||
42,
|
|
||||||
6*9)
|
|
||||||
|
|
||||||
def test_not(self):
|
|
||||||
def f():
|
|
||||||
return 42
|
|
||||||
assert not f()
|
|
||||||
|
|
||||||
def test_complex_error(self):
|
|
||||||
def f():
|
|
||||||
return 44
|
|
||||||
def g():
|
|
||||||
return 43
|
|
||||||
somefunc(f(), g())
|
|
||||||
|
|
||||||
def test_z1_unpack_error(self):
|
|
||||||
l = []
|
|
||||||
a,b = l
|
|
||||||
|
|
||||||
def test_z2_type_error(self):
|
|
||||||
l = 3
|
|
||||||
a,b = l
|
|
||||||
|
|
||||||
def test_startswith(self):
|
|
||||||
s = "123"
|
|
||||||
g = "456"
|
|
||||||
assert s.startswith(g)
|
|
||||||
|
|
||||||
def test_startswith_nested(self):
|
|
||||||
def f():
|
|
||||||
return "123"
|
|
||||||
def g():
|
|
||||||
return "456"
|
|
||||||
assert f().startswith(g())
|
|
||||||
|
|
||||||
def test_global_func(self):
|
|
||||||
assert isinstance(globf(42), float)
|
|
||||||
|
|
||||||
def test_instance(self):
|
|
||||||
self.x = 6*7
|
|
||||||
assert self.x != 42
|
|
||||||
|
|
||||||
def test_compare(self):
|
|
||||||
assert globf(10) < 5
|
|
||||||
|
|
||||||
def test_try_finally(self):
|
|
||||||
x = 1
|
|
||||||
try:
|
|
||||||
assert x == 0
|
|
||||||
finally:
|
|
||||||
x = 0
|
|
||||||
|
|
||||||
def test_raises(self):
|
|
||||||
s = 'qwe'
|
|
||||||
raises(TypeError, "int(s)")
|
|
||||||
|
|
||||||
def test_raises_doesnt(self):
|
|
||||||
raises(IOError, "int('3')")
|
|
||||||
|
|
||||||
def test_raise(self):
|
|
||||||
raise ValueError("demo error")
|
|
||||||
|
|
||||||
def test_tupleerror(self):
|
|
||||||
a,b = [1]
|
|
||||||
|
|
||||||
def test_reinterpret_fails_with_print_for_the_fun_of_it(self):
|
|
||||||
l = [1,2,3]
|
|
||||||
print "l is", l
|
|
||||||
a,b = l.pop()
|
|
||||||
|
|
||||||
def test_some_error(self):
|
|
||||||
if namenotexi:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_generator(self):
|
|
||||||
yield None
|
|
||||||
|
|
||||||
def func1(self):
|
|
||||||
assert 41 == 42
|
|
||||||
|
|
||||||
def test_generator2(self):
|
|
||||||
yield self.func1
|
|
||||||
|
|
||||||
# thanks to Matthew Scott for this test
|
|
||||||
def test_dynamic_compile_shows_nicely():
|
|
||||||
src = 'def foo():\n assert 1 == 0\n'
|
|
||||||
name = 'abc-123'
|
|
||||||
module = py.std.imp.new_module(name)
|
|
||||||
code = py.code.compile(src, name, 'exec')
|
|
||||||
exec code in module.__dict__
|
|
||||||
py.std.sys.modules[name] = module
|
|
||||||
module.foo()
|
|
||||||
|
|
||||||
|
|
||||||
def globf(x):
|
|
||||||
return x+1
|
|
|
@ -1,11 +0,0 @@
|
||||||
|
|
||||||
import py
|
|
||||||
failure_demo = py.magic.autopath().dirpath('failure_demo.py')
|
|
||||||
from py.__.doc.test_conftest import countoutcomes
|
|
||||||
|
|
||||||
def test_failure_demo_fails_properly():
|
|
||||||
config = py.test.config._reparse([failure_demo])
|
|
||||||
session = config.initsession()
|
|
||||||
failed, passed, skipped = countoutcomes(session)
|
|
||||||
assert failed == 21
|
|
||||||
assert passed == 0
|
|
|
@ -1,42 +0,0 @@
|
||||||
def setup_module(module):
|
|
||||||
module.TestStateFullThing.classcount = 0
|
|
||||||
|
|
||||||
class TestStateFullThing:
|
|
||||||
def setup_class(cls):
|
|
||||||
cls.classcount += 1
|
|
||||||
|
|
||||||
def teardown_class(cls):
|
|
||||||
cls.classcount -= 1
|
|
||||||
|
|
||||||
def setup_method(self, method):
|
|
||||||
self.id = eval(method.func_name[5:])
|
|
||||||
|
|
||||||
def test_42(self):
|
|
||||||
assert self.classcount == 1
|
|
||||||
assert self.id == 42
|
|
||||||
|
|
||||||
def test_23(self):
|
|
||||||
assert self.classcount == 1
|
|
||||||
assert self.id == 23
|
|
||||||
|
|
||||||
def teardown_module(module):
|
|
||||||
assert module.TestStateFullThing.classcount == 0
|
|
||||||
|
|
||||||
""" For this example the control flow happens as follows::
|
|
||||||
import test_setup_flow_example
|
|
||||||
setup_module(test_setup_flow_example)
|
|
||||||
setup_class(TestStateFullThing)
|
|
||||||
instance = TestStateFullThing()
|
|
||||||
setup_method(instance, instance.test_42)
|
|
||||||
instance.test_42()
|
|
||||||
setup_method(instance, instance.test_23)
|
|
||||||
instance.test_23()
|
|
||||||
teardown_class(TestStateFullThing)
|
|
||||||
teardown_module(test_setup_flow_example)
|
|
||||||
|
|
||||||
Note that ``setup_class(TestStateFullThing)`` is called and not
|
|
||||||
``TestStateFullThing.setup_class()`` which would require you
|
|
||||||
to insert ``setup_class = classmethod(setup_class)`` to make
|
|
||||||
your setup function callable.
|
|
||||||
"""
|
|
||||||
|
|
|
@ -1,224 +0,0 @@
|
||||||
The py.execnet library
|
|
||||||
======================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
A new view on distributed execution
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
``py.execnet`` supports ad-hoc distribution of parts of
|
|
||||||
a program across process and network barriers. *Ad-hoc*
|
|
||||||
means that the client side may completely control
|
|
||||||
|
|
||||||
* which parts of a program execute remotely and
|
|
||||||
|
|
||||||
* which data protocols are used between them
|
|
||||||
|
|
||||||
without requiring any prior manual installation
|
|
||||||
of user program code on the remote side. In fact,
|
|
||||||
not even a prior installation of any server code
|
|
||||||
is required, provided there is a way to get
|
|
||||||
an input/output connection to a python interpreter
|
|
||||||
(for example via "ssh" and a "python" executable).
|
|
||||||
|
|
||||||
By comparison, traditional Remote Method Based (RMI)
|
|
||||||
require prior installation and manual rather
|
|
||||||
heavy processes of setup, distribution and
|
|
||||||
communication between program parts.
|
|
||||||
|
|
||||||
|
|
||||||
What about Security? Are you completely nuts?
|
|
||||||
---------------------------------------------
|
|
||||||
|
|
||||||
We'll talk about that later :-)
|
|
||||||
|
|
||||||
Basic Features
|
|
||||||
==============
|
|
||||||
|
|
||||||
With ''py.execnet'' you get the means
|
|
||||||
|
|
||||||
- to navigate through the network with Process, Thread, SSH
|
|
||||||
and Socket- gateways that allow you ...
|
|
||||||
|
|
||||||
- to distribute your program across a network and define
|
|
||||||
communication protocols from the client side, making
|
|
||||||
server maintenance superflous. In fact, there is no such
|
|
||||||
thing as a server. It's just another computer ... if it
|
|
||||||
doesn't run in a kernel-level jail [#]_ in which case
|
|
||||||
even that is virtualized.
|
|
||||||
|
|
||||||
|
|
||||||
Available Gateways/Connection methods
|
|
||||||
-----------------------------------------
|
|
||||||
|
|
||||||
You may use one of the following connection methods:
|
|
||||||
|
|
||||||
* :api:`py.execnet.PopenGateway` a subprocess on the local
|
|
||||||
machine. Useful for jailing certain parts of a program
|
|
||||||
or for making use of multiple processors.
|
|
||||||
|
|
||||||
* :api:`py.execnet.SshGateway` a way to connect to
|
|
||||||
a remote ssh server and distribute execution to it.
|
|
||||||
|
|
||||||
* :api:`py.execnet.SocketGateway` a way to connect to
|
|
||||||
a remote Socket based server. *Note* that this method
|
|
||||||
requires a manually started
|
|
||||||
:source:py/execnet/script/socketserver.py
|
|
||||||
script. You can run this "server script" without
|
|
||||||
having the py lib installed on that remote system.
|
|
||||||
|
|
||||||
Remote execution approach
|
|
||||||
-------------------------------------
|
|
||||||
|
|
||||||
All gateways offer one main high level function:
|
|
||||||
|
|
||||||
def remote_exec(source):
|
|
||||||
"""return channel object for communicating with the asynchronously
|
|
||||||
executing 'source' code which will have a corresponding 'channel'
|
|
||||||
object in its executing namespace."""
|
|
||||||
|
|
||||||
With `remote_exec` you send source code to the other
|
|
||||||
side and get both a local and a remote Channel_ object,
|
|
||||||
which you can use to have the local and remote site
|
|
||||||
communicate data in a structured way. Here is
|
|
||||||
an example:
|
|
||||||
|
|
||||||
>>> import py
|
|
||||||
>>> gw = py.execnet.PopenGateway()
|
|
||||||
>>> channel = gw.remote_exec("""
|
|
||||||
... import os
|
|
||||||
... channel.send(os.getpid())
|
|
||||||
... """)
|
|
||||||
>>> remote_pid = channel.receive()
|
|
||||||
>>> remote_pid != py.std.os.getpid()
|
|
||||||
True
|
|
||||||
|
|
||||||
`remote_exec` implements the idea to ``determine
|
|
||||||
protocol and remote code from the client/local side``.
|
|
||||||
This makes distributing a program run in an ad-hoc
|
|
||||||
manner (using e.g. :api:`py.execnet.SshGateway`) very easy.
|
|
||||||
|
|
||||||
You should not need to maintain software on the other sides
|
|
||||||
you are running your code at, other than the Python
|
|
||||||
executable itself.
|
|
||||||
|
|
||||||
.. _`Channel`:
|
|
||||||
.. _`channel-api`:
|
|
||||||
.. _`exchange data`:
|
|
||||||
|
|
||||||
The **Channel** interface for exchanging data across gateways
|
|
||||||
-------------------------------------------------------------
|
|
||||||
|
|
||||||
While executing custom strings on "the other side" is simple enough
|
|
||||||
it is often tricky to deal with. Therefore we want a way
|
|
||||||
to send data items to and fro between the distributedly running
|
|
||||||
program. The idea is to inject a Channel object for each
|
|
||||||
execution of source code. This Channel object allows two
|
|
||||||
program parts to send data to each other.
|
|
||||||
Here is the current interface::
|
|
||||||
|
|
||||||
#
|
|
||||||
# API for sending and receiving anonymous values
|
|
||||||
#
|
|
||||||
channel.send(item):
|
|
||||||
sends the given item to the other side of the channel,
|
|
||||||
possibly blocking if the sender queue is full.
|
|
||||||
Note that items need to be marshallable (all basic
|
|
||||||
python types are):
|
|
||||||
|
|
||||||
channel.receive():
|
|
||||||
receives an item that was sent from the other side,
|
|
||||||
possibly blocking if there is none.
|
|
||||||
Note that exceptions from the other side will be
|
|
||||||
reraised as gateway.RemoteError exceptions containing
|
|
||||||
a textual representation of the remote traceback.
|
|
||||||
|
|
||||||
channel.waitclose(timeout=None):
|
|
||||||
wait until this channel is closed. Note that a closed
|
|
||||||
channel may still hold items that will be received or
|
|
||||||
send. Note that exceptions from the other side will be
|
|
||||||
reraised as gateway.RemoteError exceptions containing
|
|
||||||
a textual representation of the remote traceback.
|
|
||||||
|
|
||||||
channel.close():
|
|
||||||
close this channel on both the local and the remote side.
|
|
||||||
A remote side blocking on receive() on this channel
|
|
||||||
will get woken up and see an EOFError exception.
|
|
||||||
|
|
||||||
|
|
||||||
The complete Fileserver example
|
|
||||||
........................................
|
|
||||||
|
|
||||||
problem: retrieving contents of remote files::
|
|
||||||
|
|
||||||
import py
|
|
||||||
contentserverbootstrap = py.code.Source(
|
|
||||||
"""
|
|
||||||
for fn in channel:
|
|
||||||
f = open(fn, 'rb')
|
|
||||||
try:
|
|
||||||
channel.send(f.read())
|
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
""")
|
|
||||||
# open a gateway to a fresh child process
|
|
||||||
contentgateway = py.execnet.SshGateway('codespeak.net')
|
|
||||||
channel = contentgateway.remote_exec(contentserverbootstrap)
|
|
||||||
|
|
||||||
for fn in somefilelist:
|
|
||||||
channel.send(fn)
|
|
||||||
content = channel.receive()
|
|
||||||
# process content
|
|
||||||
|
|
||||||
# later you can exit / close down the gateway
|
|
||||||
contentgateway.exit()
|
|
||||||
|
|
||||||
|
|
||||||
A more complicated "nested" Gateway Example
|
|
||||||
...........................................
|
|
||||||
|
|
||||||
The following example opens a PopenGateway, i.e. a python
|
|
||||||
child process, starts a socket server within that process and
|
|
||||||
then opens a SocketGateway to the freshly started
|
|
||||||
socketserver. Thus it forms a "triangle"::
|
|
||||||
|
|
||||||
|
|
||||||
CLIENT < ... > PopenGateway()
|
|
||||||
< .
|
|
||||||
. .
|
|
||||||
. .
|
|
||||||
. .
|
|
||||||
> SocketGateway()
|
|
||||||
|
|
||||||
The below "socketserver" mentioned script is a small script that
|
|
||||||
basically listens and accepts socket connections, receives one
|
|
||||||
liners and executes them.
|
|
||||||
|
|
||||||
Here are 20 lines of code making the above triangle happen::
|
|
||||||
|
|
||||||
import py
|
|
||||||
port = 7770
|
|
||||||
socketserverbootstrap = py.code.Source(
|
|
||||||
mypath.dirpath().dirpath('bin', 'socketserver.py').read(),
|
|
||||||
"""
|
|
||||||
import socket
|
|
||||||
sock = bind_and_listen(("localhost", %r))
|
|
||||||
channel.send("ok")
|
|
||||||
startserver(sock)
|
|
||||||
""" % port)
|
|
||||||
# open a gateway to a fresh child process
|
|
||||||
proxygw = py.execnet.PopenGateway()
|
|
||||||
|
|
||||||
# execute asynchronously the above socketserverbootstrap on the other
|
|
||||||
channel = proxygw.remote_exec(socketserverbootstrap)
|
|
||||||
|
|
||||||
# the other side should start the socket server now
|
|
||||||
assert channel.receive() == "ok"
|
|
||||||
gw = py.execnet.SocketGateway('localhost', cls.port)
|
|
||||||
print "initialized socket gateway to port", cls.port
|
|
||||||
|
|
||||||
.. [#] There is an interesting emerging `Jail`_ linux technology
|
|
||||||
as well as a host of others, of course.
|
|
||||||
|
|
||||||
.. _`Jail`: http://books.rsbac.org/unstable/x2223.html
|
|
|
@ -1,139 +0,0 @@
|
||||||
=======================================================
|
|
||||||
Visions and ideas for further development of the py lib
|
|
||||||
=======================================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
This document tries to describe directions and guiding ideas
|
|
||||||
for the near-future development of the py lib. *Note that all
|
|
||||||
statements within this document - even if they sound factual -
|
|
||||||
mostly just express thoughts and ideas. They not always refer to
|
|
||||||
real code so read with some caution.*
|
|
||||||
|
|
||||||
|
|
||||||
Distribute tests ad-hoc across multiple platforms
|
|
||||||
======================================================
|
|
||||||
|
|
||||||
After some more refactoring and unification of
|
|
||||||
the current testing and distribution support code
|
|
||||||
we'd like to be able to run tests on multiple
|
|
||||||
platforms simultanously and allow for interaction
|
|
||||||
and introspection into the (remote) failures.
|
|
||||||
|
|
||||||
|
|
||||||
Make APIGEN useful for more projects
|
|
||||||
================================================
|
|
||||||
|
|
||||||
The new APIGEN tool offers rich information
|
|
||||||
derived from running tests against an application:
|
|
||||||
argument types and callsites, i.e. it shows
|
|
||||||
the places where a particular API is used.
|
|
||||||
In its first incarnation, there are still
|
|
||||||
some specialties that likely prevent it
|
|
||||||
from documenting APIs for other projects.
|
|
||||||
We'd like to evolve to a `py.apigen` tool
|
|
||||||
that can make use of information provided
|
|
||||||
by a py.test run.
|
|
||||||
|
|
||||||
Consider APIGEN and pdb integration
|
|
||||||
===================================
|
|
||||||
|
|
||||||
The information provided by APIGEN can be used in many
|
|
||||||
different ways. An example of this could be to write
|
|
||||||
an extension to pdb which makes it available.
|
|
||||||
Imagine you could issue a pdb command
|
|
||||||
"info <function name>" and get information
|
|
||||||
regarding incoming, and outgoing types, possible
|
|
||||||
exceptions, field types and call sites.
|
|
||||||
|
|
||||||
Distribute channels/programs across networks
|
|
||||||
================================================
|
|
||||||
|
|
||||||
Apart from stabilizing setup/teardown procedures
|
|
||||||
for `py.execnet`_, we'd like to generalize its
|
|
||||||
implementation to allow connecting two programs
|
|
||||||
across multiple hosts, i.e. we'd like to arbitrarily
|
|
||||||
send "channels" across the network. Likely this
|
|
||||||
will be done by using the "pipe" model, i.e.
|
|
||||||
that each channel is actually a pair of endpoints,
|
|
||||||
both of which can be independently transported
|
|
||||||
across the network. The programs who "own"
|
|
||||||
these endpoints remain connected.
|
|
||||||
|
|
||||||
.. _`py.execnet`: execnet.html
|
|
||||||
|
|
||||||
Benchmarking and persistent storage
|
|
||||||
=========================================
|
|
||||||
|
|
||||||
For storing test results, but also benchmarking
|
|
||||||
and other information, we need a solid way
|
|
||||||
to store all kinds of information from test runs.
|
|
||||||
We'd like to generate statistics or html-overview
|
|
||||||
out of it, but also use such information to determine when
|
|
||||||
a certain test broke, or when its performance
|
|
||||||
decreased considerably.
|
|
||||||
|
|
||||||
.. _`CPython's distutils`: http://www.python.org/dev/doc/devel/lib/module-distutils.html
|
|
||||||
|
|
||||||
.. _`restructured text`: http://docutils.sourceforge.net/docs/user/rst/quickref.html
|
|
||||||
.. _`python standard library`: http://www.python.org/doc/2.3.4/lib/lib.html
|
|
||||||
.. _`xpython EuroPython 2004 talk`: http://codespeak.net/svn/user/hpk/talks/xpython-talk.txt
|
|
||||||
.. _`under the xpy tree`: http://codespeak.net/svn/user/hpk/xpy/xml.py
|
|
||||||
.. _`future book`: future.html
|
|
||||||
.. _`PEP-324 subprocess module`: http://www.python.org/peps/pep-0324.html
|
|
||||||
.. _`subprocess implementation`: http://www.lysator.liu.se/~astrand/popen5/
|
|
||||||
.. _`py.test`: test.html
|
|
||||||
|
|
||||||
|
|
||||||
.. _`general-path`:
|
|
||||||
.. _`a more general view on path objects`:
|
|
||||||
|
|
||||||
Refactor path implementations to use a Filesystem Abstraction
|
|
||||||
=============================================================
|
|
||||||
|
|
||||||
It seems like a good idea to refactor all python implementations to
|
|
||||||
use an internal Filesystem abstraction. The current code base
|
|
||||||
would be transformed to have Filesystem implementations for e.g.
|
|
||||||
local, subversion and subversion "working copy" filesystems. Today
|
|
||||||
the according code is scattered through path-handling code.
|
|
||||||
|
|
||||||
On a related note, Armin Rigo has hacked `pylufs`_ and more recently has
|
|
||||||
written `pyfuse`_ which allow to
|
|
||||||
implement kernel-level linux filesystems with pure python. Now
|
|
||||||
the idea is that the mentioned filesystem implementations would
|
|
||||||
be directly usable for such linux-filesystem glue code.
|
|
||||||
|
|
||||||
In other words, implementing a `memoryfs`_ or a `dictfs`_ would
|
|
||||||
give you two things for free: a filesystem mountable at kernel level
|
|
||||||
as well as a uniform "path" object allowing you to access your
|
|
||||||
filesystem in convenient ways. (At some point it might
|
|
||||||
even become interesting to think about interfacing to
|
|
||||||
`reiserfs v4 features`_ at the Filesystem level but that
|
|
||||||
is a can of subsequent worms).
|
|
||||||
|
|
||||||
.. _`memoryfs`: http://codespeak.net/svn/user/arigo/hack/pyfuse/memoryfs.py
|
|
||||||
.. _`dictfs`: http://codespeak.net/pipermail/py-dev/2005-January/000191.html
|
|
||||||
.. _`pylufs`: http://codespeak.net/svn/user/arigo/hack/pylufs/
|
|
||||||
.. _`pyfuse`: http://codespeak.net/svn/user/arigo/hack/pyfuse/
|
|
||||||
.. _`reiserfs v4 features`: http://www.namesys.com/v4/v4.html
|
|
||||||
|
|
||||||
|
|
||||||
Integrate interactive completion
|
|
||||||
==================================
|
|
||||||
|
|
||||||
It'd be nice to integrate the bash-like
|
|
||||||
rlcompleter2_ python command line completer
|
|
||||||
into the py lib, and making it work remotely
|
|
||||||
and with pdb.
|
|
||||||
|
|
||||||
.. _rlcompleter2: http://codespeak.net/rlcompleter2/
|
|
||||||
|
|
||||||
Consider more features
|
|
||||||
==================================
|
|
||||||
|
|
||||||
There are many more features and useful classes
|
|
||||||
that might be nice to integrate. For example, we might put
|
|
||||||
Armin's `lazy list`_ implementation into the py lib.
|
|
||||||
|
|
||||||
.. _`lazy list`: http://codespeak.net/svn/user/arigo/hack/misc/collect.py
|
|
|
@ -1,640 +0,0 @@
|
||||||
===============================================================
|
|
||||||
py.code_template: Lightweight and flexible code template system
|
|
||||||
===============================================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
Motivation
|
|
||||||
==========
|
|
||||||
|
|
||||||
There are as many python templating systems as there are web frameworks
|
|
||||||
(a lot). This is partly because it is so darned easy to write a templating
|
|
||||||
system in Python. What are the distinguishing characteristics of the
|
|
||||||
py.code_template templating system?
|
|
||||||
|
|
||||||
* Optimized for generating code (Python, C, bash scripts, etc.),
|
|
||||||
not XML or HTML
|
|
||||||
|
|
||||||
* Designed for use by Python programmers, not by web artists
|
|
||||||
|
|
||||||
+ Aesthetic sensibilities are different
|
|
||||||
|
|
||||||
+ The templates should be an organic part of a module -- just more code
|
|
||||||
|
|
||||||
+ Templates do not need to be incredibly full-featured, because
|
|
||||||
programmers are perfectly capable of escaping to Python for
|
|
||||||
advanced features.
|
|
||||||
|
|
||||||
- No requirement to support inheritance
|
|
||||||
- No requirement to support exec
|
|
||||||
|
|
||||||
* Designed so that templates can be coded in the most natural way
|
|
||||||
for the task at hand
|
|
||||||
|
|
||||||
+ Generation of code and scripts often does not follow MVC paradigm!
|
|
||||||
|
|
||||||
+ Small template fragments are typically coded *inside* Python modules
|
|
||||||
|
|
||||||
+ Sometimes it is natural to put strings inside code; sometimes it is
|
|
||||||
natural to put code inside strings. Both should be supported as
|
|
||||||
reasonably and naturally as possible.
|
|
||||||
|
|
||||||
Imaginary-world examples
|
|
||||||
========================
|
|
||||||
|
|
||||||
These would be real-world examples, but, not only is this module not yet
|
|
||||||
implemented, as of now, PyPy is not incredibly useful to the average
|
|
||||||
programmer...
|
|
||||||
|
|
||||||
translator/c/genc.py
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
The original function::
|
|
||||||
|
|
||||||
def gen_readable_parts_of_main_c_file(f, database, preimplementationlines=[]):
|
|
||||||
#
|
|
||||||
# All declarations
|
|
||||||
#
|
|
||||||
structdeflist = database.getstructdeflist()
|
|
||||||
print >> f
|
|
||||||
print >> f, '/***********************************************************/'
|
|
||||||
print >> f, '/*** Structure definitions ***/'
|
|
||||||
print >> f
|
|
||||||
for node in structdeflist:
|
|
||||||
print >> f, 'struct %s;' % node.name
|
|
||||||
print >> f
|
|
||||||
for node in structdeflist:
|
|
||||||
for line in node.definition():
|
|
||||||
print >> f, line
|
|
||||||
print >> f
|
|
||||||
print >> f, '/***********************************************************/'
|
|
||||||
print >> f, '/*** Forward declarations ***/'
|
|
||||||
print >> f
|
|
||||||
for node in database.globalcontainers():
|
|
||||||
for line in node.forward_declaration():
|
|
||||||
print >> f, line
|
|
||||||
|
|
||||||
#
|
|
||||||
# Implementation of functions and global structures and arrays
|
|
||||||
#
|
|
||||||
print >> f
|
|
||||||
print >> f, '/***********************************************************/'
|
|
||||||
print >> f, '/*** Implementations ***/'
|
|
||||||
print >> f
|
|
||||||
for line in preimplementationlines:
|
|
||||||
print >> f, line
|
|
||||||
print >> f, '#include "src/g_include.h"'
|
|
||||||
print >> f
|
|
||||||
blank = True
|
|
||||||
for node in database.globalcontainers():
|
|
||||||
if blank:
|
|
||||||
print >> f
|
|
||||||
blank = False
|
|
||||||
for line in node.implementation():
|
|
||||||
print >> f, line
|
|
||||||
blank = True
|
|
||||||
|
|
||||||
This could be refactored heavily. An initial starting point
|
|
||||||
would look something like this, although later, the template
|
|
||||||
instance could be passed in and reused directly, rather than
|
|
||||||
passing the file handle around::
|
|
||||||
|
|
||||||
def gen_readable_parts_of_main_c_file(f, database, preimplementationlines=[]):
|
|
||||||
def container_implementation():
|
|
||||||
# Helper function designed to introduce blank lines
|
|
||||||
# between container implementations
|
|
||||||
|
|
||||||
blank = True
|
|
||||||
for node in database.globalcontainers():
|
|
||||||
if blank:
|
|
||||||
yield ''
|
|
||||||
blank = False
|
|
||||||
for line in node.implementation():
|
|
||||||
yield line
|
|
||||||
blank = True
|
|
||||||
|
|
||||||
t = code_template.Template()
|
|
||||||
#
|
|
||||||
# All declarations
|
|
||||||
#
|
|
||||||
structdeflist = database.getstructdeflist()
|
|
||||||
t.write(dedent=8, text='''
|
|
||||||
|
|
||||||
/***********************************************************/
|
|
||||||
/*** Structure definitions ***/
|
|
||||||
|
|
||||||
{for node in structdeflist}
|
|
||||||
struct {node.name};
|
|
||||||
{endfor}
|
|
||||||
|
|
||||||
{for node in structdeflist}
|
|
||||||
{for line in node.definition}
|
|
||||||
{line}
|
|
||||||
{endfor}
|
|
||||||
{endfor}
|
|
||||||
|
|
||||||
/***********************************************************/
|
|
||||||
/*** Forward declarations ***/
|
|
||||||
|
|
||||||
{for node in database.globalcontainers()}
|
|
||||||
{for line in node.forward_declaration()}
|
|
||||||
{line}
|
|
||||||
{endfor}
|
|
||||||
{endfor}
|
|
||||||
|
|
||||||
{**
|
|
||||||
** Implementation of functions and global structures and arrays
|
|
||||||
**}
|
|
||||||
|
|
||||||
/***********************************************************/
|
|
||||||
/*** Implementations ***/
|
|
||||||
|
|
||||||
{for line in preimplementationlines}
|
|
||||||
{line}
|
|
||||||
{endfor}
|
|
||||||
|
|
||||||
#include "src/g_include.h"
|
|
||||||
|
|
||||||
{for line in container_implementation()}
|
|
||||||
{line}
|
|
||||||
{endfor}
|
|
||||||
""")
|
|
||||||
t.output(f)
|
|
||||||
|
|
||||||
translator/c/genc.py gen_makefile
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
The original code::
|
|
||||||
|
|
||||||
MAKEFILE = '''
|
|
||||||
CC = gcc
|
|
||||||
|
|
||||||
$(TARGET): $(OBJECTS)
|
|
||||||
\t$(CC) $(LDFLAGS) -o $@ $(OBJECTS) $(LIBDIRS) $(LIBS)
|
|
||||||
|
|
||||||
%.o: %.c
|
|
||||||
\t$(CC) $(CFLAGS) -o $@ -c $< $(INCLUDEDIRS)
|
|
||||||
|
|
||||||
clean:
|
|
||||||
\trm -f $(OBJECTS)
|
|
||||||
'''
|
|
||||||
|
|
||||||
def gen_makefile(self, targetdir):
|
|
||||||
def write_list(lst, prefix):
|
|
||||||
for i, fn in enumerate(lst):
|
|
||||||
print >> f, prefix, fn,
|
|
||||||
if i < len(lst)-1:
|
|
||||||
print >> f, '\\'
|
|
||||||
else:
|
|
||||||
print >> f
|
|
||||||
prefix = ' ' * len(prefix)
|
|
||||||
|
|
||||||
compiler = self.getccompiler(extra_includes=['.'])
|
|
||||||
cfiles = []
|
|
||||||
ofiles = []
|
|
||||||
for fn in compiler.cfilenames:
|
|
||||||
fn = py.path.local(fn).basename
|
|
||||||
assert fn.endswith('.c')
|
|
||||||
cfiles.append(fn)
|
|
||||||
ofiles.append(fn[:-2] + '.o')
|
|
||||||
|
|
||||||
f = targetdir.join('Makefile').open('w')
|
|
||||||
print >> f, '# automatically generated Makefile'
|
|
||||||
print >> f
|
|
||||||
print >> f, 'TARGET =', py.path.local(compiler.outputfilename).basename
|
|
||||||
print >> f
|
|
||||||
write_list(cfiles, 'SOURCES =')
|
|
||||||
print >> f
|
|
||||||
write_list(ofiles, 'OBJECTS =')
|
|
||||||
print >> f
|
|
||||||
args = ['-l'+libname for libname in compiler.libraries]
|
|
||||||
print >> f, 'LIBS =', ' '.join(args)
|
|
||||||
args = ['-L'+path for path in compiler.library_dirs]
|
|
||||||
print >> f, 'LIBDIRS =', ' '.join(args)
|
|
||||||
args = ['-I'+path for path in compiler.include_dirs]
|
|
||||||
write_list(args, 'INCLUDEDIRS =')
|
|
||||||
print >> f
|
|
||||||
print >> f, 'CFLAGS =', ' '.join(compiler.compile_extra)
|
|
||||||
print >> f, 'LDFLAGS =', ' '.join(compiler.link_extra)
|
|
||||||
print >> f, MAKEFILE.strip()
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
|
|
||||||
Could look something like this::
|
|
||||||
|
|
||||||
MAKEFILE = '''
|
|
||||||
# automatically generated Makefile
|
|
||||||
|
|
||||||
TARGET = {py.path.local(compiler.outputfilename).basename}
|
|
||||||
|
|
||||||
{for line in write_list(cfiles, 'SOURCES =')}
|
|
||||||
{line}
|
|
||||||
{endfor}
|
|
||||||
|
|
||||||
{for line in write_list(ofiles, 'OBJECTS =')}
|
|
||||||
{line}
|
|
||||||
{endfor}
|
|
||||||
|
|
||||||
LIBS ={for libname in compiler.libraries} -l{libname}{endfor}
|
|
||||||
LIBDIRS ={for path in compiler.library_dirs} -L{path}{endfor}
|
|
||||||
INCLUDEDIRS ={for path in compiler.include_dirs} -I{path}{endfor}
|
|
||||||
|
|
||||||
CFLAGS ={for extra in compiler.compile_extra} {extra}{endfor}
|
|
||||||
LDFLAGS ={for extra in compiler.link_extra} {extra}{endfor}
|
|
||||||
|
|
||||||
CC = gcc
|
|
||||||
|
|
||||||
$(TARGET): $(OBJECTS)
|
|
||||||
\t$(CC) $(LDFLAGS) -o $@ $(OBJECTS) $(LIBDIRS) $(LIBS)
|
|
||||||
|
|
||||||
%.o: %.c
|
|
||||||
\t$(CC) $(CFLAGS) -o $@ -c $< $(INCLUDEDIRS)
|
|
||||||
|
|
||||||
clean:
|
|
||||||
\trm -f $(OBJECTS)
|
|
||||||
'''
|
|
||||||
|
|
||||||
def gen_makefile(self, targetdir):
|
|
||||||
def write_list(lst, prefix):
|
|
||||||
for i, fn in enumerate(lst):
|
|
||||||
yield '%s %s %s' % (prefix, fn, i < len(list)-1 and '\\' or '')
|
|
||||||
prefix = ' ' * len(prefix)
|
|
||||||
|
|
||||||
compiler = self.getccompiler(extra_includes=['.'])
|
|
||||||
cfiles = []
|
|
||||||
ofiles = []
|
|
||||||
for fn in compiler.cfilenames:
|
|
||||||
fn = py.path.local(fn).basename
|
|
||||||
assert fn.endswith('.c')
|
|
||||||
cfiles.append(fn)
|
|
||||||
ofiles.append(fn[:-2] + '.o')
|
|
||||||
|
|
||||||
code_template.Template(MAKEFILE).output(targetdir.join('Makefile'))
|
|
||||||
|
|
||||||
|
|
||||||
translator/llvm/module/excsupport.py
|
|
||||||
------------------------------------
|
|
||||||
|
|
||||||
The original string::
|
|
||||||
|
|
||||||
invokeunwind_code = '''
|
|
||||||
ccc %(returntype)s%%__entrypoint__%(entrypointname)s {
|
|
||||||
%%result = invoke %(cconv)s %(returntype)s%%%(entrypointname)s to label %%no_exception except label %%exception
|
|
||||||
|
|
||||||
no_exception:
|
|
||||||
store %%RPYTHON_EXCEPTION_VTABLE* null, %%RPYTHON_EXCEPTION_VTABLE** %%last_exception_type
|
|
||||||
ret %(returntype)s %%result
|
|
||||||
|
|
||||||
exception:
|
|
||||||
ret %(noresult)s
|
|
||||||
}
|
|
||||||
|
|
||||||
ccc int %%__entrypoint__raised_LLVMException() {
|
|
||||||
%%tmp = load %%RPYTHON_EXCEPTION_VTABLE** %%last_exception_type
|
|
||||||
%%result = cast %%RPYTHON_EXCEPTION_VTABLE* %%tmp to int
|
|
||||||
ret int %%result
|
|
||||||
}
|
|
||||||
|
|
||||||
internal fastcc void %%unwind() {
|
|
||||||
unwind
|
|
||||||
}
|
|
||||||
'''
|
|
||||||
|
|
||||||
Could look something like this if it was used in conjunction with a template::
|
|
||||||
|
|
||||||
invokeunwind_code = '''
|
|
||||||
ccc {returntype}%__entrypoint__{entrypointname} {
|
|
||||||
%result = invoke {cconv} {returntype}%{entrypointname} to label %no_exception except label %exception
|
|
||||||
|
|
||||||
no_exception:
|
|
||||||
store %RPYTHON_EXCEPTION_VTABLE* null, %RPYTHON_EXCEPTION_VTABLE** %last_exception_type
|
|
||||||
ret {returntype} %result
|
|
||||||
|
|
||||||
exception:
|
|
||||||
ret {noresult}
|
|
||||||
}
|
|
||||||
|
|
||||||
ccc int %__entrypoint__raised_LLVMException() {
|
|
||||||
%tmp = load %RPYTHON_EXCEPTION_VTABLE** %last_exception_type
|
|
||||||
%result = cast %RPYTHON_EXCEPTION_VTABLE* %tmp to int
|
|
||||||
ret int %result
|
|
||||||
}
|
|
||||||
|
|
||||||
internal fastcc void %unwind() {
|
|
||||||
unwind
|
|
||||||
}
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
Template syntax
|
|
||||||
===============
|
|
||||||
|
|
||||||
Design decision
|
|
||||||
---------------
|
|
||||||
|
|
||||||
As all programmers must know by now, all the special symbols on the keyboard
|
|
||||||
are quite heavily overloaded. Often, template systems work around this fact
|
|
||||||
by having special notation like `<*` ... `*>` or {% ... %}. Some template systems
|
|
||||||
even have multiple special notations -- one for comments, one for statements,
|
|
||||||
one for expressions, etc.
|
|
||||||
|
|
||||||
I find these hard to type and ugly. Other markups are either too lightweight,
|
|
||||||
or use characters which occur so frequently in the target languages that it
|
|
||||||
becomes hard to distinguish marked-up content from content which should be
|
|
||||||
rendered as-is.
|
|
||||||
|
|
||||||
The compromise taken by *code_template* is to use braces (**{}**) for markup.
|
|
||||||
|
|
||||||
This immediately raises the question: what about when the marked-up language
|
|
||||||
is C or C++? The answer is that if the leading brace is immediately followed
|
|
||||||
by whitespace, it is normal text; if not it is the start of markup.
|
|
||||||
|
|
||||||
To support normal text which has a leading brace immediately followed by
|
|
||||||
an identifier, if the first whitespace character after the brace is a space
|
|
||||||
character (e.g. not a newline or tab), it will be removed from the output.
|
|
||||||
|
|
||||||
Examples::
|
|
||||||
|
|
||||||
{ This is normal text and the space between { and This will be removed}
|
|
||||||
{'this must be a valid Python expression' + ' because it is treated as markup'}
|
|
||||||
{
|
|
||||||
This is normal text, but nothing is altered (the newline is kept intact)
|
|
||||||
}
|
|
||||||
|
|
||||||
{{1:'Any valid Python expression is allowed as markup'}[1].ljust(30)}
|
|
||||||
|
|
||||||
.. _`Code element`:
|
|
||||||
|
|
||||||
Elements
|
|
||||||
--------
|
|
||||||
|
|
||||||
Templates consist of normal text and code elements.
|
|
||||||
(Comments are considered to be code elements.)
|
|
||||||
|
|
||||||
All code elements start with a `left brace`_ which is not followed by
|
|
||||||
whitespace.
|
|
||||||
|
|
||||||
Keyword element
|
|
||||||
~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
A keyword element is a `code element`_ which starts with a keyword_.
|
|
||||||
|
|
||||||
For example, *{if foo}* is a keyword element, but *{foo}* is a `substituted expression`_.
|
|
||||||
|
|
||||||
Keyword
|
|
||||||
~~~~~~~
|
|
||||||
|
|
||||||
A keyword is a word used in `conditional text`_ or in `repeated text`_, e.g.
|
|
||||||
one of *if*, *elif*, *else*, *endif*, *for*, or *endfor*.
|
|
||||||
|
|
||||||
Keywords are designed to match their Python equivalents. However, since
|
|
||||||
templates cannot use spacing to indicate expression nesting, the additional
|
|
||||||
keywords *endif* and *endfor* are required.
|
|
||||||
|
|
||||||
Left brace
|
|
||||||
~~~~~~~~~~
|
|
||||||
|
|
||||||
All elements other than normal text start with a left brace -- the symbol '{',
|
|
||||||
sometimes known as a 'curly bracket'. A left brace is itself considered
|
|
||||||
to be normal text if it is followed by whitespace. If the whitespace starts
|
|
||||||
with a space character, that space character will be stripped from the output.
|
|
||||||
If the whitespace starts with a tab or linefeed character, the whitespace will
|
|
||||||
be left in the output.
|
|
||||||
|
|
||||||
Normal Text
|
|
||||||
~~~~~~~~~~~
|
|
||||||
|
|
||||||
Normal text remains unsubstituted. Transition from text to the other elements
|
|
||||||
is effected by use of a `left brace`_ which is not followed by whitespace.
|
|
||||||
|
|
||||||
Comment
|
|
||||||
~~~~~~~
|
|
||||||
|
|
||||||
A comment starts with a left brace followed by an asterisk ('{`*`'), and
|
|
||||||
ends with an asterisk followed by a right brace ('`*`}')::
|
|
||||||
|
|
||||||
This is a template -- this text will be copied to the output.
|
|
||||||
{* This is a comment and this text will not be copied to the output *}
|
|
||||||
|
|
||||||
{*
|
|
||||||
Comments can span lines,
|
|
||||||
but cannot be nested
|
|
||||||
*}
|
|
||||||
|
|
||||||
Substituted expression
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Any python expression may be used::
|
|
||||||
|
|
||||||
Dear {record.name},
|
|
||||||
we are sorry to inform you that you did not win {record.contest}.
|
|
||||||
|
|
||||||
The expression must be surrounded by braces, and there must not be any
|
|
||||||
whitespace between the leftmost brace and the start of the expression.
|
|
||||||
|
|
||||||
The expression will automatically be converted to a string with str().
|
|
||||||
|
|
||||||
Conditional text
|
|
||||||
~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
The following template has text which is included conditionally::
|
|
||||||
|
|
||||||
This text will always be included in the output
|
|
||||||
{if foo}
|
|
||||||
This text will be included if foo is true
|
|
||||||
{elif bar}
|
|
||||||
This text will be included if foo is not true but bar is true
|
|
||||||
{else}
|
|
||||||
This text will be included if neither foo nor bar is true
|
|
||||||
{endif}
|
|
||||||
|
|
||||||
The {elif} and {else} elements are optional.
|
|
||||||
|
|
||||||
Repeated text
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
The following template shows how to pull multiple items out of a list::
|
|
||||||
|
|
||||||
{for student, score in sorted(scorelist)}
|
|
||||||
{student.ljust(20)} {score}
|
|
||||||
{endfor}
|
|
||||||
|
|
||||||
Whitespace removal or modification
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
In general, whitespace in `Normal Text`_ is transferred unchanged to the
|
|
||||||
output. There are three exceptions to this rule:
|
|
||||||
|
|
||||||
Line separators
|
|
||||||
~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Each newline is converted to the final output using os.linesep.
|
|
||||||
|
|
||||||
Beginning or end of string
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
py.code_template is designed to allow easy use of templates inside of python
|
|
||||||
modules. The canonical way to write a template is inside a triple-quoted
|
|
||||||
string, e.g.::
|
|
||||||
|
|
||||||
my_template = '''
|
|
||||||
This is my template. It can have any text at all in it except
|
|
||||||
another triple-single-quote.
|
|
||||||
'''
|
|
||||||
|
|
||||||
To support this usage, if the first character is a newline, it will be
|
|
||||||
removed, and if the last line consists solely of whitespace with no
|
|
||||||
trailing newline, it will also be removed.
|
|
||||||
|
|
||||||
A comment or single keyword element on a line
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Whenever a `keyword element`_ or comment_ is on a line
|
|
||||||
*by itself*, that line will not be copied to the output.
|
|
||||||
|
|
||||||
This happens when:
|
|
||||||
- There is nothing on the line before the keyword element
|
|
||||||
or comment except whitespace (spaces and/or tabs).
|
|
||||||
|
|
||||||
- There is nothing on the line after the keyword element
|
|
||||||
or comment except a newline.
|
|
||||||
|
|
||||||
Note that even a multi-line comment or keyword element can
|
|
||||||
have the preceding whitespace and subsequent newline stripped
|
|
||||||
by this rule.
|
|
||||||
|
|
||||||
The primary purpose of this rule is to allow the Python
|
|
||||||
programmer to use indentation, **even inside a template**::
|
|
||||||
|
|
||||||
This is a template
|
|
||||||
|
|
||||||
{if mylist}
|
|
||||||
List items:
|
|
||||||
{for item in mylist}
|
|
||||||
- {item}
|
|
||||||
{endfor}
|
|
||||||
{endif}
|
|
||||||
|
|
||||||
Template usage
|
|
||||||
==============
|
|
||||||
|
|
||||||
Templates are used by importing the Template class from py.code_template,
|
|
||||||
constructing a template, and then sending data with the write() method.
|
|
||||||
|
|
||||||
In general, there are four methods for getting the formatted data back out
|
|
||||||
of the template object:
|
|
||||||
|
|
||||||
- read() reads all the data currently in the object
|
|
||||||
|
|
||||||
- output(fobj) outputs the data to a file
|
|
||||||
|
|
||||||
fobj can either be an open file object, or a string. If it is
|
|
||||||
a string, the file will be opened, written, and closed.
|
|
||||||
|
|
||||||
- open(fobj) (or calling the object constructor with a file object)
|
|
||||||
|
|
||||||
If the open() method is used, or if a file object is passed to
|
|
||||||
the constructor, each write() will automatically flush the data
|
|
||||||
out to the file. If the fobj is a string, it is considered to
|
|
||||||
be *owned*, otherwise it is considered to be *borrowed*. *Owned*
|
|
||||||
file objects are closed when the class is deleted.
|
|
||||||
|
|
||||||
- write() can be explicitly called with a file object, in which case
|
|
||||||
it will invoke output() on that object after it generates the data.
|
|
||||||
|
|
||||||
Template instantiation and methods
|
|
||||||
==================================
|
|
||||||
|
|
||||||
template = code_template.Template(outf=None, cache=None)
|
|
||||||
|
|
||||||
If outf is given, it will be passed to the open() method
|
|
||||||
|
|
||||||
cache may be given as a mapping. If not given, the template will use
|
|
||||||
the shared default cache. This is not thread safe.
|
|
||||||
|
|
||||||
template.open
|
|
||||||
-------------
|
|
||||||
|
|
||||||
template.open(outf, borrowed = None)
|
|
||||||
|
|
||||||
The open method closes the internal file object if it was already open,
|
|
||||||
and then re-opens it on the given file. It is an error to call open()
|
|
||||||
if there is data in the object left over from previous writes. (Call
|
|
||||||
output() instead.)
|
|
||||||
|
|
||||||
borrowed defaults to 0 if outf is a string, and 1 if it is a file object.
|
|
||||||
|
|
||||||
borrowed can also be set explicitly if required.
|
|
||||||
|
|
||||||
template.close
|
|
||||||
--------------
|
|
||||||
|
|
||||||
close() disassociates the file from the template, and closes the file if
|
|
||||||
it was not borrowed. close() is automatically called by the destructor.
|
|
||||||
|
|
||||||
template.write
|
|
||||||
--------------
|
|
||||||
|
|
||||||
template.write(text='', outf=None, dedent=0, localvars=None, globalvars=None,
|
|
||||||
framelevel=1)
|
|
||||||
|
|
||||||
The write method has the following parameters:
|
|
||||||
|
|
||||||
- text is the template itself
|
|
||||||
|
|
||||||
- if outf is not None, the output method will be invoked on the object
|
|
||||||
after the current template is processed. If no outf is given, data
|
|
||||||
will be accumulated internal to the instance until a write() with outf
|
|
||||||
is processed, or read() or output() is called, whichever comes first, if
|
|
||||||
there is no file object. If there is a file object, data will be flushed
|
|
||||||
to the file after every write.
|
|
||||||
|
|
||||||
- dedent, if given is applied to each line in the template, to "de-indent"
|
|
||||||
|
|
||||||
- localvars and globalvars default to the dictionaries of the caller. A copy
|
|
||||||
of localvars is made so that the __TrueSpace__ identifier can be added.
|
|
||||||
|
|
||||||
- cache may be given as a mapping. If not given, the template will use
|
|
||||||
the shared default cache. This is not thread safe.
|
|
||||||
|
|
||||||
- framelevel is used to determine which stackframe to access for globals
|
|
||||||
and locals if localvars and/or globalvars are not specified. The default
|
|
||||||
is to use the caller's frame.
|
|
||||||
|
|
||||||
The write method supports the print >> file protocol by deleting the softspace
|
|
||||||
attribute on every invocation. This allows code like::
|
|
||||||
|
|
||||||
t = code_template.Template()
|
|
||||||
print >> t, "Hello, world"
|
|
||||||
|
|
||||||
|
|
||||||
template.read
|
|
||||||
--------------
|
|
||||||
|
|
||||||
This method reads and flushes all accumulated data in the object. Note that
|
|
||||||
if a file has been associated with the object, there will never be any data
|
|
||||||
to read.
|
|
||||||
|
|
||||||
template.output
|
|
||||||
---------------
|
|
||||||
|
|
||||||
This method takes one parameter, outf. template.output() first
|
|
||||||
invokes template.read() to read and flush all accumulated data,
|
|
||||||
and then outputs the data to the file specified by outf.
|
|
||||||
|
|
||||||
If outf has a write() method, that will be invoked with the
|
|
||||||
data. If outf has no write() method, it will be treated as
|
|
||||||
a filename, and that file will be replaced.
|
|
||||||
|
|
||||||
Caching and thread safety
|
|
||||||
=========================
|
|
||||||
|
|
||||||
The compiled version of every template is cached internal to the
|
|
||||||
code_template module (unless a separate cache object is specified).
|
|
||||||
|
|
||||||
This allows efficient template reuse, but is not currently thread-safe.
|
|
||||||
Alternatively, each invocation of a template object can specify a
|
|
||||||
cache object. This is thread-safe, but not very efficient. A shared
|
|
||||||
model could be implemented later.
|
|
||||||
|
|
|
@ -1,64 +0,0 @@
|
||||||
Release
|
|
||||||
=======
|
|
||||||
|
|
||||||
currently working configurations
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
2.3 - 2.4.2 work
|
|
||||||
|
|
||||||
with setuptools: 2.3 - 2.4.2 as 'develop'
|
|
||||||
|
|
||||||
regular installation: works mostly, strange test-failures
|
|
||||||
|
|
||||||
to be tested: 2.2, windows
|
|
||||||
|
|
||||||
absolutely necessary steps:
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
* documentation
|
|
||||||
|
|
||||||
* improving getting started, describe install methods
|
|
||||||
* describe the rest stuff?
|
|
||||||
* py.log
|
|
||||||
* py.path is mostly undocumented, API documentation
|
|
||||||
|
|
||||||
* basic windows testing, maybe disabling execnet?, what about the scripts in windows?
|
|
||||||
|
|
||||||
* are all c extensions compiled when installing globally?
|
|
||||||
|
|
||||||
* refactoring py.log
|
|
||||||
|
|
||||||
* write/read methods on py.path should be renamed/deprecated: setcontent, getcontent instead?
|
|
||||||
|
|
||||||
* what about _subprocess.c?
|
|
||||||
|
|
||||||
* warning for docutils
|
|
||||||
|
|
||||||
* don't expose _extpy
|
|
||||||
|
|
||||||
* py/bin should be nicefied, get optparse interface
|
|
||||||
|
|
||||||
* _findpy.py
|
|
||||||
* py.cleanup:
|
|
||||||
* py.lookup: add -i option
|
|
||||||
* pytest.cmd
|
|
||||||
* rst2pdf.py: merge with py.rest, add warnings when missing tex
|
|
||||||
* _makepyrelease.py: move somewhere
|
|
||||||
* py.countloc
|
|
||||||
* py.test
|
|
||||||
* py.rest
|
|
||||||
* win32
|
|
||||||
|
|
||||||
* skip tests if dependencies are not installed
|
|
||||||
|
|
||||||
nice to have
|
|
||||||
------------
|
|
||||||
|
|
||||||
* sets.py, subprocess.py in compat
|
|
||||||
* fix -k option to py.test
|
|
||||||
* add --report=(text|terminal|session|rest|tkinter|rest) to py.test
|
|
||||||
* put Armin's collect class into py.__builtin__ (not done)
|
|
||||||
* try get rid of Collect.tryiter() in favour of (not done)
|
|
||||||
using Armin's collect class
|
|
||||||
|
|
||||||
|
|
|
@ -1,39 +0,0 @@
|
||||||
Missing features/bugs in pylib:
|
|
||||||
====================================
|
|
||||||
|
|
||||||
* new skip method, so web interface would show skips which
|
|
||||||
are broken (say py.test.skip("stuff", reason=py.test.BORKEN)),
|
|
||||||
proposed by me and xoraxax
|
|
||||||
|
|
||||||
* integration of session classes - needed for developement
|
|
||||||
|
|
||||||
* more robust failure recovery from execnet - not sure how to perform
|
|
||||||
it, but select() approach sounds like a plan (instead of threads)
|
|
||||||
(unsure what than)
|
|
||||||
|
|
||||||
* provide a bit more patchy approach to green stuff, ie you import it and
|
|
||||||
all (known) operations on sockets are performed via the green interface,
|
|
||||||
this should allow using arbitrary applications (well, not using C-level
|
|
||||||
I/O) to mix with green threads.
|
|
||||||
|
|
||||||
* integrate green execnet a bit more (?)
|
|
||||||
|
|
||||||
* once session integration is done, it would be cool to have nightly
|
|
||||||
testing done in a systematic manner (instead of bunch of hacks, which
|
|
||||||
is how it looks like right now), so for example codespeak would be able
|
|
||||||
to store information (ie via svn) and when one woke up he can type py.test
|
|
||||||
show and see the information of all nightly test runs which he likes.
|
|
||||||
|
|
||||||
* py.test.pdb - there is my hack for a while now, which integrates
|
|
||||||
rlcompleter2 with pdb. First of all it requires some strange changes
|
|
||||||
to rlcompleter itself, which has no tests. Long-term plan would be
|
|
||||||
to have pyrepl+rlcompleter2+pdb fixes integrated into pylib and
|
|
||||||
have it tested. This requires work though.
|
|
||||||
|
|
||||||
* add a link to pylib in pypy/lib? Since pylib features mostly work on top
|
|
||||||
of pypy-c, it would be nice to have it (as we do have it in svn anyway)
|
|
||||||
|
|
||||||
* fix generative tests.
|
|
||||||
- they should be distributed atomically (for various reasons)
|
|
||||||
- fix setup/teardown logic (ie setup_generator/teardown_generator)
|
|
||||||
- XXX there was sth else
|
|
|
@ -1,37 +0,0 @@
|
||||||
Here I'm trying to list all problems regarding pypy-c <-> pylib interaction
|
|
||||||
===========================================================================
|
|
||||||
|
|
||||||
* in test/terminal/terminal.py lines around 141::
|
|
||||||
rev = py.__package__.getrev()
|
|
||||||
self.out.line("using py lib: %s <rev %s>" % (
|
|
||||||
py.path.local(py.__file__).dirpath(), rev))
|
|
||||||
|
|
||||||
* py.code issues::
|
|
||||||
def __init__(self, rawcode):
|
|
||||||
rawcode = getattr(rawcode, 'im_func', rawcode)
|
|
||||||
rawcode = getattr(rawcode, 'func_code', rawcode)
|
|
||||||
self.raw = rawcode
|
|
||||||
self.filename = rawcode.co_filename
|
|
||||||
AttributeError: 'internal-code' object has no attribute 'co_filename'
|
|
||||||
|
|
||||||
* types.BuiltinFunctionType == types.MethodType which confuses apigen
|
|
||||||
|
|
||||||
* compiler module problems - some bogus IndentationError
|
|
||||||
communicates by inspect.getsource()
|
|
||||||
|
|
||||||
* execnet just hangs
|
|
||||||
|
|
||||||
* lack of tmpfile
|
|
||||||
|
|
||||||
* assertion error magic is not working
|
|
||||||
|
|
||||||
* sha counting hangs (misc/testing/test\_initpkg)
|
|
||||||
|
|
||||||
* extpy does not work, because it does not support loops in modules
|
|
||||||
(while pypy __builtins__ module has a loop), funny :-)
|
|
||||||
|
|
||||||
* py.compat.subprocess hangs for obscure reasons
|
|
||||||
(possibly the same stuff as execnet - some threading issues and
|
|
||||||
select.select)
|
|
||||||
|
|
||||||
Armin says: "haha, select.select probably does not release the GIL"
|
|
|
@ -1,15 +0,0 @@
|
||||||
Various tasks which needs to be done at some point
|
|
||||||
==================================================
|
|
||||||
|
|
||||||
* Write down pinging interface, so we'll know if hosts are responding or
|
|
||||||
are mostly down (detecting hanging nodes)
|
|
||||||
|
|
||||||
* Write down support for rsync progress
|
|
||||||
|
|
||||||
* Discovery of nodes which are available for accepting distributed testing
|
|
||||||
|
|
||||||
* Test the tests rescheduling, so high-latency nodes would not take part
|
|
||||||
in that.
|
|
||||||
|
|
||||||
* make sure that C-c semantics are ok (nodes are killed properly).
|
|
||||||
There was an attempt to do so, but it's not tested and not always work.
|
|
|
@ -1,315 +0,0 @@
|
||||||
=====================================================
|
|
||||||
py.magic.greenlet: Lightweight concurrent programming
|
|
||||||
=====================================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
Motivation
|
|
||||||
==========
|
|
||||||
|
|
||||||
The "greenlet" package is a spin-off of `Stackless`_, a version of CPython
|
|
||||||
that supports micro-threads called "tasklets". Tasklets run
|
|
||||||
pseudo-concurrently (typically in a single or a few OS-level threads) and
|
|
||||||
are synchronized with data exchanges on "channels".
|
|
||||||
|
|
||||||
A "greenlet", on the other hand, is a still more primitive notion of
|
|
||||||
micro-thread with no implicit scheduling; coroutines, in other words.
|
|
||||||
This is useful when you want to
|
|
||||||
control exactly when your code runs. You can build custom scheduled
|
|
||||||
micro-threads on top of greenlet; however, it seems that greenlets are
|
|
||||||
useful on their own as a way to make advanced control flow structures.
|
|
||||||
For example, we can recreate generators; the difference with Python's own
|
|
||||||
generators is that our generators can call nested functions and the nested
|
|
||||||
functions can yield values too. (Additionally, you don't need a "yield"
|
|
||||||
keyword. See the example in :source:`py/c-extension/greenlet/test_generator.py`).
|
|
||||||
|
|
||||||
Greenlets are provided as a C extension module for the regular unmodified
|
|
||||||
interpreter.
|
|
||||||
|
|
||||||
.. _`Stackless`: http://www.stackless.com
|
|
||||||
|
|
||||||
Example
|
|
||||||
-------
|
|
||||||
|
|
||||||
Let's consider a system controlled by a terminal-like console, where the user
|
|
||||||
types commands. Assume that the input comes character by character. In such
|
|
||||||
a system, there will typically be a loop like the following one::
|
|
||||||
|
|
||||||
def process_commands(*args):
|
|
||||||
while True:
|
|
||||||
line = ''
|
|
||||||
while not line.endswith('\n'):
|
|
||||||
line += read_next_char()
|
|
||||||
if line == 'quit\n':
|
|
||||||
print "are you sure?"
|
|
||||||
if read_next_char() != 'y':
|
|
||||||
continue # ignore the command
|
|
||||||
process_command(line)
|
|
||||||
|
|
||||||
Now assume that you want to plug this program into a GUI. Most GUI toolkits
|
|
||||||
are event-based. They will invoke a call-back for each character the user
|
|
||||||
presses. [Replace "GUI" with "XML expat parser" if that rings more bells to
|
|
||||||
you ``:-)``] In this setting, it is difficult to implement the
|
|
||||||
read_next_char() function needed by the code above. We have two incompatible
|
|
||||||
functions::
|
|
||||||
|
|
||||||
def event_keydown(key):
|
|
||||||
??
|
|
||||||
|
|
||||||
def read_next_char():
|
|
||||||
?? should wait for the next event_keydown() call
|
|
||||||
|
|
||||||
You might consider doing that with threads. Greenlets are an alternate
|
|
||||||
solution that don't have the related locking and shutdown problems. You
|
|
||||||
start the process_commands() function in its own, separate greenlet, and
|
|
||||||
then you exchange the keypresses with it as follows::
|
|
||||||
|
|
||||||
def event_keydown(key):
|
|
||||||
# jump into g_processor, sending it the key
|
|
||||||
g_processor.switch(key)
|
|
||||||
|
|
||||||
def read_next_char():
|
|
||||||
# g_self is g_processor in this simple example
|
|
||||||
g_self = greenlet.getcurrent()
|
|
||||||
# jump to the parent (main) greenlet, waiting for the next key
|
|
||||||
next_char = g_self.parent.switch()
|
|
||||||
return next_char
|
|
||||||
|
|
||||||
g_processor = greenlet(process_commands)
|
|
||||||
g_processor.switch(*args) # input arguments to process_commands()
|
|
||||||
|
|
||||||
gui.mainloop()
|
|
||||||
|
|
||||||
In this example, the execution flow is: when read_next_char() is called, it
|
|
||||||
is part of the g_processor greenlet, so when it switches to its parent
|
|
||||||
greenlet, it resumes execution in the top-level main loop (the GUI). When
|
|
||||||
the GUI calls event_keydown(), it switches to g_processor, which means that
|
|
||||||
the execution jumps back wherever it was suspended in that greenlet -- in
|
|
||||||
this case, to the switch() instruction in read_next_char() -- and the ``key``
|
|
||||||
argument in event_keydown() is passed as the return value of the switch() in
|
|
||||||
read_next_char().
|
|
||||||
|
|
||||||
Note that read_next_char() will be suspended and resumed with its call stack
|
|
||||||
preserved, so that it will itself return to different positions in
|
|
||||||
process_commands() depending on where it was originally called from. This
|
|
||||||
allows the logic of the program to be kept in a nice control-flow way; we
|
|
||||||
don't have to completely rewrite process_commands() to turn it into a state
|
|
||||||
machine.
|
|
||||||
|
|
||||||
|
|
||||||
Usage
|
|
||||||
=====
|
|
||||||
|
|
||||||
Introduction
|
|
||||||
------------
|
|
||||||
|
|
||||||
A "greenlet" is a small independent pseudo-thread. Think about it as a
|
|
||||||
small stack of frames; the outermost (bottom) frame is the initial
|
|
||||||
function you called, and the innermost frame is the one in which the
|
|
||||||
greenlet is currently paused. You work with greenlets by creating a
|
|
||||||
number of such stacks and jumping execution between them. Jumps are never
|
|
||||||
implicit: a greenlet must choose to jump to another greenlet, which will
|
|
||||||
cause the former to suspend and the latter to resume where it was
|
|
||||||
suspended. Jumping between greenlets is called "switching".
|
|
||||||
|
|
||||||
When you create a greenlet, it gets an initially empty stack; when you
|
|
||||||
first switch to it, it starts the run a specified function, which may call
|
|
||||||
other functions, switch out of the greenlet, etc. When eventually the
|
|
||||||
outermost function finishes its execution, the greenlet's stack becomes
|
|
||||||
empty again and the greenlet is "dead". Greenlets can also die of an
|
|
||||||
uncaught exception.
|
|
||||||
|
|
||||||
For example::
|
|
||||||
|
|
||||||
from py.magic import greenlet
|
|
||||||
|
|
||||||
def test1():
|
|
||||||
print 12
|
|
||||||
gr2.switch()
|
|
||||||
print 34
|
|
||||||
|
|
||||||
def test2():
|
|
||||||
print 56
|
|
||||||
gr1.switch()
|
|
||||||
print 78
|
|
||||||
|
|
||||||
gr1 = greenlet(test1)
|
|
||||||
gr2 = greenlet(test2)
|
|
||||||
gr1.switch()
|
|
||||||
|
|
||||||
The last line jumps to test1, which prints 12, jumps to test2, prints 56,
|
|
||||||
jumps back into test1, prints 34; and then test1 finishes and gr1 dies.
|
|
||||||
At this point, the execution comes back to the original ``gr1.switch()``
|
|
||||||
call. Note that 78 is never printed.
|
|
||||||
|
|
||||||
Parents
|
|
||||||
-------
|
|
||||||
|
|
||||||
Let's see where execution goes when a greenlet dies. Every greenlet has a
|
|
||||||
"parent" greenlet. The parent greenlet is initially the one in which the
|
|
||||||
greenlet was created (this can be changed at any time). The parent is
|
|
||||||
where execution continues when a greenlet dies. This way, greenlets are
|
|
||||||
organized in a tree. Top-level code that doesn't run in a user-created
|
|
||||||
greenlet runs in the implicit "main" greenlet, which is the root of the
|
|
||||||
tree.
|
|
||||||
|
|
||||||
In the above example, both gr1 and gr2 have the main greenlet as a parent.
|
|
||||||
Whenever one of them dies, the execution comes back to "main".
|
|
||||||
|
|
||||||
Uncaught exceptions are propagated into the parent, too. For example, if
|
|
||||||
the above test2() contained a typo, it would generate a NameError that
|
|
||||||
would kill gr2, and the exception would go back directly into "main".
|
|
||||||
The traceback would show test2, but not test1. Remember, switches are not
|
|
||||||
calls, but transfer of execution between parallel "stack containers", and
|
|
||||||
the "parent" defines which stack logically comes "below" the current one.
|
|
||||||
|
|
||||||
Instantiation
|
|
||||||
-------------
|
|
||||||
|
|
||||||
``py.magic.greenlet`` is the greenlet type, which supports the following
|
|
||||||
operations:
|
|
||||||
|
|
||||||
``greenlet(run=None, parent=None)``
|
|
||||||
Create a new greenlet object (without running it). ``run`` is the
|
|
||||||
callable to invoke, and ``parent`` is the parent greenlet, which
|
|
||||||
defaults to the current greenlet.
|
|
||||||
|
|
||||||
``greenlet.getcurrent()``
|
|
||||||
Returns the current greenlet (i.e. the one which called this
|
|
||||||
function).
|
|
||||||
|
|
||||||
``greenlet.GreenletExit``
|
|
||||||
This special exception does not propagate to the parent greenlet; it
|
|
||||||
can be used to kill a single greenlet.
|
|
||||||
|
|
||||||
The ``greenlet`` type can be subclassed, too. A greenlet runs by calling
|
|
||||||
its ``run`` attribute, which is normally set when the greenlet is
|
|
||||||
created; but for subclasses it also makes sense to define a ``run`` method
|
|
||||||
instead of giving a ``run`` argument to the constructor.
|
|
||||||
|
|
||||||
Switching
|
|
||||||
---------
|
|
||||||
|
|
||||||
Switches between greenlets occur when the method switch() of a greenlet is
|
|
||||||
called, in which case execution jumps to the greenlet whose switch() is
|
|
||||||
called, or when a greenlet dies, in which case execution jumps to the
|
|
||||||
parent greenlet. During a switch, an object or an exception is "sent" to
|
|
||||||
the target greenlet; this can be used as a convenient way to pass
|
|
||||||
information between greenlets. For example::
|
|
||||||
|
|
||||||
def test1(x, y):
|
|
||||||
z = gr2.switch(x+y)
|
|
||||||
print z
|
|
||||||
|
|
||||||
def test2(u):
|
|
||||||
print u
|
|
||||||
gr1.switch(42)
|
|
||||||
|
|
||||||
gr1 = greenlet(test1)
|
|
||||||
gr2 = greenlet(test2)
|
|
||||||
gr1.switch("hello", " world")
|
|
||||||
|
|
||||||
This prints "hello world" and 42, with the same order of execution as the
|
|
||||||
previous example. Note that the arguments of test1() and test2() are not
|
|
||||||
provided when the greenlet is created, but only the first time someone
|
|
||||||
switches to it.
|
|
||||||
|
|
||||||
Here are the precise rules for sending objects around:
|
|
||||||
|
|
||||||
``g.switch(obj=None or *args)``
|
|
||||||
Switches execution to the greenlet ``g``, sending it the given
|
|
||||||
``obj``. As a special case, if ``g`` did not start yet, then it will
|
|
||||||
start to run now; in this case, any number of arguments can be
|
|
||||||
provided, and ``g.run(*args)`` is called.
|
|
||||||
|
|
||||||
Dying greenlet
|
|
||||||
If a greenlet's ``run()`` finishes, its return value is the object
|
|
||||||
sent to its parent. If ``run()`` terminates with an exception, the
|
|
||||||
exception is propagated to its parent (unless it is a
|
|
||||||
``greenlet.GreenletExit`` exception, in which case the exception
|
|
||||||
object is caught and *returned* to the parent).
|
|
||||||
|
|
||||||
Apart from the cases described above, the target greenlet normally
|
|
||||||
receives the object as the return value of the call to ``switch()`` in
|
|
||||||
which it was previously suspended. Indeed, although a call to
|
|
||||||
``switch()`` does not return immediately, it will still return at some
|
|
||||||
point in the future, when some other greenlet switches back. When this
|
|
||||||
occurs, then execution resumes just after the ``switch()`` where it was
|
|
||||||
suspended, and the ``switch()`` itself appears to return the object that
|
|
||||||
was just sent. This means that ``x = g.switch(y)`` will send the object
|
|
||||||
``y`` to ``g``, and will later put the (unrelated) object that some
|
|
||||||
(unrelated) greenlet passes back to us into ``x``.
|
|
||||||
|
|
||||||
Note that any attempt to switch to a dead greenlet actually goes to the
|
|
||||||
dead greenlet's parent, or its parent's parent, and so on. (The final
|
|
||||||
parent is the "main" greenlet, which is never dead.)
|
|
||||||
|
|
||||||
Methods and attributes of greenlets
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
``g.switch(obj=None or *args)``
|
|
||||||
Switches execution to the greenlet ``g``. See above.
|
|
||||||
|
|
||||||
``g.run``
|
|
||||||
The callable that ``g`` will run when it starts. After ``g`` started,
|
|
||||||
this attribute no longer exists.
|
|
||||||
|
|
||||||
``g.parent``
|
|
||||||
The parent greenlet. This is writeable, but it is not allowed to
|
|
||||||
create cycles of parents.
|
|
||||||
|
|
||||||
``g.gr_frame``
|
|
||||||
The current top frame, or None.
|
|
||||||
|
|
||||||
``g.dead``
|
|
||||||
True if ``g`` is dead (i.e. it finished its execution).
|
|
||||||
|
|
||||||
``bool(g)``
|
|
||||||
True if ``g`` is active, False if it is dead or not yet started.
|
|
||||||
|
|
||||||
``g.throw([typ, [val, [tb]]])``
|
|
||||||
Switches execution to the greenlet ``g``, but immediately raises the
|
|
||||||
given exception in ``g``. If no argument is provided, the exception
|
|
||||||
defaults to ``greenlet.GreenletExit``. The normal exception
|
|
||||||
propagation rules apply, as described above. Note that calling this
|
|
||||||
method is almost equivalent to the following::
|
|
||||||
|
|
||||||
def raiser():
|
|
||||||
raise typ, val, tb
|
|
||||||
g_raiser = greenlet(raiser, parent=g)
|
|
||||||
g_raiser.switch()
|
|
||||||
|
|
||||||
except that this trick does not work for the
|
|
||||||
``greenlet.GreenletExit`` exception, which would not propagate
|
|
||||||
from ``g_raiser`` to ``g``.
|
|
||||||
|
|
||||||
Greenlets and Python threads
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
Greenlets can be combined with Python threads; in this case, each thread
|
|
||||||
contains an independent "main" greenlet with a tree of sub-greenlets. It
|
|
||||||
is not possible to mix or switch between greenlets belonging to different
|
|
||||||
threads.
|
|
||||||
|
|
||||||
Garbage-collecting live greenlets
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
If all the references to a greenlet object go away (including the
|
|
||||||
references from the parent attribute of other greenlets), then there is no
|
|
||||||
way to ever switch back to this greenlet. In this case, a GreenletExit
|
|
||||||
exception is generated into the greenlet. This is the only case where a
|
|
||||||
greenlet receives the execution asynchronously. This gives
|
|
||||||
``try:finally:`` blocks a chance to clean up resources held by the
|
|
||||||
greenlet. This feature also enables a programming style in which
|
|
||||||
greenlets are infinite loops waiting for data and processing it. Such
|
|
||||||
loops are automatically interrupted when the last reference to the
|
|
||||||
greenlet goes away.
|
|
||||||
|
|
||||||
The greenlet is expected to either die or be resurrected by having a new
|
|
||||||
reference to it stored somewhere; just catching and ignoring the
|
|
||||||
GreenletExit is likely to lead to an infinite loop.
|
|
||||||
|
|
||||||
Greenlets do not participate in garbage collection; cycles involving data
|
|
||||||
that is present in a greenlet's frames will not be detected. Storing
|
|
||||||
references to other greenlets cyclically may lead to leaks.
|
|
|
@ -1,284 +0,0 @@
|
||||||
===============================================
|
|
||||||
Implementation and Customization of ``py.test``
|
|
||||||
===============================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.. _`basicpicture`:
|
|
||||||
|
|
||||||
|
|
||||||
Collecting and running tests / implementation remarks
|
|
||||||
======================================================
|
|
||||||
|
|
||||||
In order to customize ``py.test`` it's good to understand
|
|
||||||
its basic architure (WARNING: these are not guaranteed
|
|
||||||
yet to stay the way they are now!)::
|
|
||||||
|
|
||||||
___________________
|
|
||||||
| |
|
|
||||||
| Collector |
|
|
||||||
|___________________|
|
|
||||||
/ \
|
|
||||||
| Item.run()
|
|
||||||
| ^
|
|
||||||
receive test Items /
|
|
||||||
| /execute test Item
|
|
||||||
| /
|
|
||||||
___________________/
|
|
||||||
| |
|
|
||||||
| Session |
|
|
||||||
|___________________|
|
|
||||||
|
|
||||||
.............................
|
|
||||||
. conftest.py configuration .
|
|
||||||
. cmdline options .
|
|
||||||
.............................
|
|
||||||
|
|
||||||
|
|
||||||
The *Session* basically receives test *Items* from a *Collector*,
|
|
||||||
and executes them via the ``Item.run()`` method. It monitors
|
|
||||||
the outcome of the test and reports about failures and successes.
|
|
||||||
|
|
||||||
.. _`collection process`:
|
|
||||||
|
|
||||||
Collectors and the test collection process
|
|
||||||
------------------------------------------
|
|
||||||
|
|
||||||
The collecting process is iterative, i.e. the session
|
|
||||||
traverses and generates a *collector tree*. Here is an example of such
|
|
||||||
a tree, generated with the command ``py.test --collectonly py/xmlobj``::
|
|
||||||
|
|
||||||
<Directory 'xmlobj'>
|
|
||||||
<Directory 'testing'>
|
|
||||||
<Module 'test_html.py' (py.__.xmlobj.testing.test_html)>
|
|
||||||
<Function 'test_html_name_stickyness'>
|
|
||||||
<Function 'test_stylenames'>
|
|
||||||
<Function 'test_class_None'>
|
|
||||||
<Function 'test_alternating_style'>
|
|
||||||
<Module 'test_xml.py' (py.__.xmlobj.testing.test_xml)>
|
|
||||||
<Function 'test_tag_with_text'>
|
|
||||||
<Function 'test_class_identity'>
|
|
||||||
<Function 'test_tag_with_text_and_attributes'>
|
|
||||||
<Function 'test_tag_with_subclassed_attr_simple'>
|
|
||||||
<Function 'test_tag_nested'>
|
|
||||||
<Function 'test_tag_xmlname'>
|
|
||||||
|
|
||||||
|
|
||||||
By default all directories not starting with a dot are traversed,
|
|
||||||
looking for ``test_*.py`` and ``*_test.py`` files. Those files
|
|
||||||
are imported under their `package name`_.
|
|
||||||
|
|
||||||
.. _`collector API`:
|
|
||||||
|
|
||||||
test items are collectors as well
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
To make the reporting life simple for the session object
|
|
||||||
items offer a ``run()`` method as well. In fact the session
|
|
||||||
distinguishes "collectors" from "items" solely by interpreting
|
|
||||||
their return value. If it is a list, then we recurse into
|
|
||||||
it, otherwise we consider the "test" as passed.
|
|
||||||
|
|
||||||
.. _`package name`:
|
|
||||||
|
|
||||||
constructing the package name for modules
|
|
||||||
-----------------------------------------
|
|
||||||
|
|
||||||
Test modules are imported under their fully qualified
|
|
||||||
name. Given a module ``path`` the fully qualified package
|
|
||||||
name is constructed as follows:
|
|
||||||
|
|
||||||
* determine the last "upward" directory from ``path`` that
|
|
||||||
contains an ``__init__.py`` file. Going upwards
|
|
||||||
means repeatedly calling the ``dirpath()`` method
|
|
||||||
on a path object (which returns the parent directory
|
|
||||||
as a path object).
|
|
||||||
|
|
||||||
* insert this base directory into the sys.path list
|
|
||||||
as its first element
|
|
||||||
|
|
||||||
* import the root package
|
|
||||||
|
|
||||||
* determine the fully qualified name for the module located
|
|
||||||
at ``path`` ...
|
|
||||||
|
|
||||||
* if the imported root package has a __package__ object
|
|
||||||
then call ``__package__.getimportname(path)``
|
|
||||||
|
|
||||||
* otherwise use the relative path of the module path to
|
|
||||||
the base dir and turn slashes into dots and strike
|
|
||||||
the trailing ``.py``.
|
|
||||||
|
|
||||||
The Module collector will eventually trigger
|
|
||||||
``__import__(mod_fqdnname, ...)`` to finally get to
|
|
||||||
the live module object.
|
|
||||||
|
|
||||||
Side note: this whole logic is performed by local path
|
|
||||||
object's ``pyimport()`` method.
|
|
||||||
|
|
||||||
Module Collector
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
The default Module collector looks for test functions
|
|
||||||
and test classes and methods. Test functions and methods
|
|
||||||
are prefixed ``test`` by default. Test classes must
|
|
||||||
start with a capitalized ``Test`` prefix.
|
|
||||||
|
|
||||||
|
|
||||||
Customizing the testing process
|
|
||||||
===============================
|
|
||||||
|
|
||||||
writing conftest.py files
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
You may put conftest.py files containing project-specific
|
|
||||||
configuration in your project's root directory, it's usually
|
|
||||||
best to put it just into the same directory level as your
|
|
||||||
topmost ``__init__.py``. In fact, ``py.test`` performs
|
|
||||||
an "upwards" search starting from the directory that you specify
|
|
||||||
to be tested and will lookup configuration values right-to-left.
|
|
||||||
You may have options that reside e.g. in your home directory
|
|
||||||
but note that project specific settings will be considered
|
|
||||||
first. There is a flag that helps you debugging your
|
|
||||||
conftest.py configurations::
|
|
||||||
|
|
||||||
py.test --traceconfig
|
|
||||||
|
|
||||||
adding custom options
|
|
||||||
+++++++++++++++++++++++
|
|
||||||
|
|
||||||
To register a project-specific command line option
|
|
||||||
you may have the following code within a ``conftest.py`` file::
|
|
||||||
|
|
||||||
import py
|
|
||||||
Option = py.test.config.Option
|
|
||||||
option = py.test.config.addoptions("pypy options",
|
|
||||||
Option('-V', '--view', action="store_true", dest="view", default=False,
|
|
||||||
help="view translation tests' flow graphs with Pygame"),
|
|
||||||
)
|
|
||||||
|
|
||||||
and you can then access ``option.view`` like this::
|
|
||||||
|
|
||||||
if option.view:
|
|
||||||
print "view this!"
|
|
||||||
|
|
||||||
The option will be available if you type ``py.test -h``
|
|
||||||
Note that you may only register upper case short
|
|
||||||
options. ``py.test`` reserves all lower
|
|
||||||
case short options for its own cross-project usage.
|
|
||||||
|
|
||||||
customizing the collecting and running process
|
|
||||||
-----------------------------------------------
|
|
||||||
|
|
||||||
To introduce different test items you can create
|
|
||||||
one or more ``conftest.py`` files in your project.
|
|
||||||
When the collection process traverses directories
|
|
||||||
and modules the default collectors will produce
|
|
||||||
custom Collectors and Items if they are found
|
|
||||||
in a local ``conftest.py`` file.
|
|
||||||
|
|
||||||
example: perform additional ReST checks
|
|
||||||
+++++++++++++++++++++++++++++++++++++++
|
|
||||||
|
|
||||||
With your custom collectors or items you can completely
|
|
||||||
derive from the standard way of collecting and running
|
|
||||||
tests in a localized manner. Let's look at an example.
|
|
||||||
If you invoke ``py.test --collectonly py/documentation``
|
|
||||||
then you get::
|
|
||||||
|
|
||||||
<DocDirectory 'documentation'>
|
|
||||||
<DocDirectory 'example'>
|
|
||||||
<DocDirectory 'pytest'>
|
|
||||||
<Module 'test_setup_flow_example.py' (test_setup_flow_example)>
|
|
||||||
<Class 'TestStateFullThing'>
|
|
||||||
<Instance '()'>
|
|
||||||
<Function 'test_42'>
|
|
||||||
<Function 'test_23'>
|
|
||||||
<ReSTChecker 'TODO.txt'>
|
|
||||||
<ReSTSyntaxTest 'TODO.txt'>
|
|
||||||
<LinkCheckerMaker 'checklinks'>
|
|
||||||
<ReSTChecker 'api.txt'>
|
|
||||||
<ReSTSyntaxTest 'api.txt'>
|
|
||||||
<LinkCheckerMaker 'checklinks'>
|
|
||||||
<CheckLink 'getting-started.html'>
|
|
||||||
...
|
|
||||||
|
|
||||||
In ``py/documentation/conftest.py`` you find the following
|
|
||||||
customization::
|
|
||||||
|
|
||||||
class DocDirectory(py.test.collect.Directory):
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
results = super(DocDirectory, self).run()
|
|
||||||
for x in self.fspath.listdir('*.txt', sort=True):
|
|
||||||
results.append(x.basename)
|
|
||||||
return results
|
|
||||||
|
|
||||||
def join(self, name):
|
|
||||||
if not name.endswith('.txt'):
|
|
||||||
return super(DocDirectory, self).join(name)
|
|
||||||
p = self.fspath.join(name)
|
|
||||||
if p.check(file=1):
|
|
||||||
return ReSTChecker(p, parent=self)
|
|
||||||
|
|
||||||
Directory = DocDirectory
|
|
||||||
|
|
||||||
The existence of the 'Directory' name in the
|
|
||||||
``pypy/documentation/conftest.py`` module makes the collection
|
|
||||||
process defer to our custom "DocDirectory" collector. We extend
|
|
||||||
the set of collected test items by ``ReSTChecker`` instances
|
|
||||||
which themselves create ``ReSTSyntaxTest`` and ``LinkCheckerMaker``
|
|
||||||
items. All of this instances (need to) follow the `collector API`_.
|
|
||||||
|
|
||||||
|
|
||||||
Customizing the collection process in a module
|
|
||||||
----------------------------------------------
|
|
||||||
|
|
||||||
REPEATED WARNING: details of the collection and running process are
|
|
||||||
still subject to refactorings and thus details will change.
|
|
||||||
If you are customizing py.test at "Item" level then you
|
|
||||||
definitely want to be subscribed to the `py-dev mailing list`_
|
|
||||||
to follow ongoing development.
|
|
||||||
|
|
||||||
If you have a module where you want to take responsibility for
|
|
||||||
collecting your own test Items and possibly even for executing
|
|
||||||
a test then you can provide `generative tests`_ that yield
|
|
||||||
callables and possibly arguments as a tuple. This should
|
|
||||||
serve some immediate purposes like paramtrized tests.
|
|
||||||
|
|
||||||
.. _`generative tests`: test.html#generative-tests
|
|
||||||
|
|
||||||
The other extension possibility goes deeper into the machinery
|
|
||||||
and allows you to specify a custom test ``Item`` class which
|
|
||||||
is responsible for setting up and executing an underlying
|
|
||||||
test. [XXX not working: You can integrate your custom ``py.test.collect.Item`` subclass
|
|
||||||
by binding an ``Item`` name to a test class.] Or you can
|
|
||||||
extend the collection process for a whole directory tree
|
|
||||||
by putting Items in a ``conftest.py`` configuration file.
|
|
||||||
The collection process constantly looks at according names
|
|
||||||
in the *chain of conftest.py* modules to determine collectors
|
|
||||||
and items at ``Directory``, ``Module``, ``Class``, ``Function``
|
|
||||||
or ``Generator`` level. Note that, right now, except for ``Function``
|
|
||||||
items all classes are pure collectors, i.e. will return a list
|
|
||||||
of names (possibly empty).
|
|
||||||
|
|
||||||
XXX implement doctests as alternatives to ``Function`` items.
|
|
||||||
|
|
||||||
Customizing execution of Functions
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
- Function test items allow total control of executing their
|
|
||||||
contained test method. ``function.run()`` will get called by the
|
|
||||||
session in order to actually run a test. The method is responsible
|
|
||||||
for performing proper setup/teardown ("Test Fixtures") for a
|
|
||||||
Function test.
|
|
||||||
|
|
||||||
- ``Function.execute(target, *args)`` methods are invoked by
|
|
||||||
the default ``Function.run()`` to actually execute a python
|
|
||||||
function with the given (usually empty set of) arguments.
|
|
||||||
|
|
||||||
|
|
||||||
.. _`py-dev mailing list`: http://codespeak.net/mailman/listinfo/py-dev
|
|
|
@ -1,62 +0,0 @@
|
||||||
py lib documentation
|
|
||||||
=================================================
|
|
||||||
|
|
||||||
The py lib aims at supporting a decent development process
|
|
||||||
addressing deployment, versioning, testing and documentation
|
|
||||||
perspectives.
|
|
||||||
|
|
||||||
`Download and Installation`_
|
|
||||||
|
|
||||||
`0.9.0 release announcement`_
|
|
||||||
|
|
||||||
Main tools and API
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
`py.test`_ introduces to the **py.test** testing utility.
|
|
||||||
|
|
||||||
`py.execnet`_ distributes programs across the net.
|
|
||||||
|
|
||||||
`py.magic.greenlet`_: micro-threads (lightweight in-process concurrent programming)
|
|
||||||
|
|
||||||
`py.path`_: local and subversion Path and Filesystem access
|
|
||||||
|
|
||||||
`py lib scripts`_ describe the scripts contained in the ``py/bin`` directory.
|
|
||||||
|
|
||||||
`apigen`_: a new way to generate rich Python API documentation
|
|
||||||
|
|
||||||
support functionality
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
`py.code`_: High-level access/manipulation of Python code and traceback objects.
|
|
||||||
|
|
||||||
`py.xml`_ for generating in-memory xml/html object trees
|
|
||||||
|
|
||||||
`py.io`_: Helper Classes for Capturing of Input/Output
|
|
||||||
|
|
||||||
`py.log`_: an alpha document about the ad-hoc logging facilities
|
|
||||||
|
|
||||||
`miscellaneous features`_ describes some small but nice py lib features.
|
|
||||||
|
|
||||||
Background and Motivation information
|
|
||||||
-------------------------------------------
|
|
||||||
|
|
||||||
`future`_ handles development visions and plans for the near future.
|
|
||||||
|
|
||||||
`why what how py?`_, describing motivation and background of the py lib.
|
|
||||||
|
|
||||||
.. _`download and installation`: download.html
|
|
||||||
.. _`py-dev at codespeak net`: http://codespeak.net/mailman/listinfo/py-dev
|
|
||||||
.. _`py.execnet`: execnet.html
|
|
||||||
.. _`py.magic.greenlet`: greenlet.html
|
|
||||||
.. _`apigen`: apigen.html
|
|
||||||
.. _`py.log`: log.html
|
|
||||||
.. _`py.io`: io.html
|
|
||||||
.. _`py.path`: path.html
|
|
||||||
.. _`py.code`: code.html
|
|
||||||
.. _`py.test`: test.html
|
|
||||||
.. _`py lib scripts`: bin.html
|
|
||||||
.. _`py.xml`: xml.html
|
|
||||||
.. _`Why What how py?`: why_py.html
|
|
||||||
.. _`future`: future.html
|
|
||||||
.. _`miscellaneous features`: misc.html
|
|
||||||
.. _`0.9.0 release announcement`: release-0.9.0.html
|
|
|
@ -1,45 +0,0 @@
|
||||||
=======
|
|
||||||
py.io
|
|
||||||
=======
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
The 'py' lib provides helper classes for capturing IO during
|
|
||||||
execution of a program.
|
|
||||||
|
|
||||||
IO Capturing examples
|
|
||||||
===============================================
|
|
||||||
|
|
||||||
:api:`py.io.StdCapture`
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
Basic Example:
|
|
||||||
|
|
||||||
>>> import py
|
|
||||||
>>> capture = py.io.StdCapture()
|
|
||||||
>>> print "hello"
|
|
||||||
>>> out,err = capture.reset()
|
|
||||||
>>> out.strip() == "hello"
|
|
||||||
True
|
|
||||||
|
|
||||||
For calling functions you may use a shortcut:
|
|
||||||
|
|
||||||
>>> import py
|
|
||||||
>>> def f(): print "hello"
|
|
||||||
>>> res, out, err = py.io.StdCapture.call(f)
|
|
||||||
>>> out.strip() == "hello"
|
|
||||||
True
|
|
||||||
|
|
||||||
:api:`py.io.StdCaptureFD`
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
If you also want to capture writes to the stdout/stderr
|
|
||||||
filedescriptors you may invoke:
|
|
||||||
|
|
||||||
>>> import py, sys
|
|
||||||
>>> capture = py.io.StdCaptureFD()
|
|
||||||
>>> sys.stderr.write("world")
|
|
||||||
>>> out,err = capture.reset()
|
|
||||||
>>> err
|
|
||||||
'world'
|
|
|
@ -1,33 +0,0 @@
|
||||||
|
|
||||||
=====
|
|
||||||
Links
|
|
||||||
=====
|
|
||||||
|
|
||||||
Some links to ongoing discussions and comments about pylib and technics/concepts pylib uses.
|
|
||||||
|
|
||||||
* `Discussion <http://blog.ianbicking.org/site-packages-considered-harmful.html>`_
|
|
||||||
about site-packages. That's why pylib autopath and py.__.misc.dynpkg are a good idea ;-)
|
|
||||||
|
|
||||||
|
|
||||||
* `Pyinotify <http://pyinotify.sourceforge.net/>`_ uses code from pypy autopath functions.
|
|
||||||
|
|
||||||
* `Testing (WSGI) Applications with Paste <http://pythonpaste.org/testing-applications.html#the-test-environment>`_ and py.test. "This has been written with py.test in mind." Paste uses py.test.
|
|
||||||
|
|
||||||
|
|
||||||
* `Agile Testing <http://agiletesting.blogspot.com/>`_ by Grig Gheorghiu
|
|
||||||
|
|
||||||
* `Slides from 'py library overview' presentation at SoCal Piggies meeting
|
|
||||||
<http://agiletesting.blogspot.com/2005/07/slides-from-py-library-overview.html>`_
|
|
||||||
|
|
||||||
* `Python unit testing part 3: the py.test tool and library
|
|
||||||
<http://agiletesting.blogspot.com/2005/01/python-unit-testing-part-3-pytest-tool.html>`_
|
|
||||||
|
|
||||||
* `greenlets and py.xml
|
|
||||||
<http://agiletesting.blogspot.com/2005/07/py-lib-gems-greenlets-and-pyxml.html>`_
|
|
||||||
|
|
||||||
* `Keyword-based logging with the py library
|
|
||||||
<http://agiletesting.blogspot.com/2005/06/keyword-based-logging-with-py-library.html>`_
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
208
py/doc/log.txt
208
py/doc/log.txt
|
@ -1,208 +0,0 @@
|
||||||
.. role:: code(strong)
|
|
||||||
.. role:: file(literal)
|
|
||||||
|
|
||||||
========================================
|
|
||||||
:code:`py.log` documentation and musings
|
|
||||||
========================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
Foreword
|
|
||||||
========
|
|
||||||
|
|
||||||
This document is an attempt to briefly state the actual specification of the
|
|
||||||
:code:`py.log` module. It was written by Francois Pinard and also contains
|
|
||||||
some ideas for enhancing the py.log facilities.
|
|
||||||
|
|
||||||
NOTE that `py.log` is subject to refactorings, it may change with
|
|
||||||
the next release.
|
|
||||||
|
|
||||||
This document is meant to trigger or facilitate discussions. It shamelessly
|
|
||||||
steals from the `Agile Testing`__ comments, and from other sources as well,
|
|
||||||
without really trying to sort them out.
|
|
||||||
|
|
||||||
__ http://agiletesting.blogspot.com/2005/06/keyword-based-logging-with-py-library.html
|
|
||||||
|
|
||||||
|
|
||||||
Logging organisation
|
|
||||||
====================
|
|
||||||
|
|
||||||
The :code:`py.log` module aims a niche comparable to the one of the
|
|
||||||
`logging module`__ found within the standard Python distributions, yet
|
|
||||||
with much simpler paradigms for configuration and usage.
|
|
||||||
|
|
||||||
__ http://www.python.org/doc/2.4.2/lib/module-logging.html
|
|
||||||
|
|
||||||
Holger Krekel, the main :code:`py` library developer, introduced
|
|
||||||
the idea of keyword-based logging and the idea of logging *producers* and
|
|
||||||
*consumers*. A log producer is an object used by the application code
|
|
||||||
to send messages to various log consumers. When you create a log
|
|
||||||
producer, you define a set of keywords that are then used to both route
|
|
||||||
the logging messages to consumers, and to prefix those messages.
|
|
||||||
|
|
||||||
In fact, each log producer has a few keywords associated with it for
|
|
||||||
identification purposes. These keywords form a tuple of strings, and
|
|
||||||
may be used to later retrieve a particular log producer.
|
|
||||||
|
|
||||||
A log producer may (or may not) be associated with a log consumer, meant
|
|
||||||
to handle log messages in particular ways. The log consumers can be
|
|
||||||
``STDOUT``, ``STDERR``, log files, syslog, the Windows Event Log, user
|
|
||||||
defined functions, etc. (Yet, logging to syslog or to the Windows Event
|
|
||||||
Log is only future plans for now). A log producer has never more than
|
|
||||||
one consumer at a given time, but it is possible to dynamically switch
|
|
||||||
a producer to use another consumer. On the other hand, a single log
|
|
||||||
consumer may be associated with many producers.
|
|
||||||
|
|
||||||
Note that creating and associating a producer and a consumer is done
|
|
||||||
automatically when not otherwise overriden, so using :code:`py` logging
|
|
||||||
is quite comfortable even in the smallest programs. More typically,
|
|
||||||
the application programmer will likely design a hierarchy of producers,
|
|
||||||
and will select keywords appropriately for marking the hierarchy tree.
|
|
||||||
If a node of the hierarchical tree of producers has to be divided in
|
|
||||||
sub-trees, all producers in the sub-trees share, as a common prefix, the
|
|
||||||
keywords of the node being divided. In other words, we go further down
|
|
||||||
in the hierarchy of producers merely by adding keywords.
|
|
||||||
|
|
||||||
Using the :code:`py.log` library
|
|
||||||
================================
|
|
||||||
|
|
||||||
To use the :code:`py.log` library, the user must import it into a Python
|
|
||||||
application, create at least one log producer and one log consumer, have
|
|
||||||
producers and consumers associated, and finally call the log producers
|
|
||||||
as needed, giving them log messages.
|
|
||||||
|
|
||||||
Importing
|
|
||||||
---------
|
|
||||||
|
|
||||||
Once the :code:`py` library is installed on your system, a mere::
|
|
||||||
|
|
||||||
import py
|
|
||||||
|
|
||||||
holds enough magic for lazily importing the various facilities of the
|
|
||||||
:code:`py` library when they are first needed. This is really how
|
|
||||||
:code:`py.log` is made available to the application. For example, after
|
|
||||||
the above ``import py``, one may directly write ``py.log.Producer(...)``
|
|
||||||
and everything should work fine, the user does not have to worry about
|
|
||||||
specifically importing more modules.
|
|
||||||
|
|
||||||
Creating a producer
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
There are three ways for creating a log producer instance:
|
|
||||||
|
|
||||||
+ As soon as ``py.log`` is first evaluated within an application
|
|
||||||
program, a default log producer is created, and made available under
|
|
||||||
the name ``py.log.default``. The keyword ``default`` is associated
|
|
||||||
with that producer.
|
|
||||||
|
|
||||||
+ The ``py.log.Producer()`` constructor may be explicitly called
|
|
||||||
for creating a new instance of a log producer. That constructor
|
|
||||||
accepts, as an argument, the keywords that should be associated with
|
|
||||||
that producer. Keywords may be given either as a tuple of keyword
|
|
||||||
strings, or as a single space-separated string of keywords.
|
|
||||||
|
|
||||||
+ Whenever an attribute is *taken* out of a log producer instance,
|
|
||||||
for the first time that attribute is taken, a new log producer is
|
|
||||||
created. The keywords associated with that new producer are those
|
|
||||||
of the initial producer instance, to which is appended the name of
|
|
||||||
the attribute being taken.
|
|
||||||
|
|
||||||
The last point is especially useful, as it allows using log producers
|
|
||||||
without further declarations, merely creating them *on-the-fly*.
|
|
||||||
|
|
||||||
Creating a consumer
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
There are many ways for creating or denoting a log consumer:
|
|
||||||
|
|
||||||
+ A default consumer exists within the ``py.log`` facilities, which
|
|
||||||
has the effect of writing log messages on the Python standard output
|
|
||||||
stream. That consumer is associated at the very top of the producer
|
|
||||||
hierarchy, and as such, is called whenever no other consumer is
|
|
||||||
found.
|
|
||||||
|
|
||||||
+ The notation ``py.log.STDOUT`` accesses a log consumer which writes
|
|
||||||
log messages on the Python standard output stream.
|
|
||||||
|
|
||||||
+ The notation ``py.log.STDERR`` accesses a log consumer which writes
|
|
||||||
log messages on the Python standard error stream.
|
|
||||||
|
|
||||||
+ The ``py.log.File()`` constructor accepts, as argument, either a file
|
|
||||||
already opened in write mode or any similar file-like object, and
|
|
||||||
creates a log consumer able to write log messages onto that file.
|
|
||||||
|
|
||||||
+ The ``py.log.Path()`` constructor accepts a file name for its first
|
|
||||||
argument, and creates a log consumer able to write log messages into
|
|
||||||
that file. The constructor call accepts a few keyword parameters:
|
|
||||||
|
|
||||||
+ ``append``, which is ``False`` by default, may be used for
|
|
||||||
opening the file in append mode instead of write mode.
|
|
||||||
|
|
||||||
+ ``delayed_create``, which is ``False`` by default, maybe be used
|
|
||||||
for opening the file at the latest possible time. Consequently,
|
|
||||||
the file will not be created if it did not exist, and no actual
|
|
||||||
log message gets written to it.
|
|
||||||
|
|
||||||
+ ``buffering``, which is 1 by default, is used when opening the
|
|
||||||
file. Buffering can be turned off by specifying a 0 value. The
|
|
||||||
buffer size may also be selected through this argument.
|
|
||||||
|
|
||||||
+ Any user defined function may be used for a log consumer. Such a
|
|
||||||
function should accept a single argument, which is the message to
|
|
||||||
write, and do whatever is deemed appropriate by the programmer.
|
|
||||||
When the need arises, this may be an especially useful and flexible
|
|
||||||
feature.
|
|
||||||
|
|
||||||
+ The special value ``None`` means no consumer at all. This acts just
|
|
||||||
like if there was a consumer which would silently discard all log
|
|
||||||
messages sent to it.
|
|
||||||
|
|
||||||
Associating producers and consumers
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
Each log producer may have at most one log consumer associated with
|
|
||||||
it. A log producer gets associated with a log consumer through a
|
|
||||||
``py.log.set_consumer()`` call. That function accepts two arguments,
|
|
||||||
the first identifying a producer (a tuple of keyword strings or a single
|
|
||||||
space-separated string of keywords), the second specifying the precise
|
|
||||||
consumer to use for that producer. Until this function is called for a
|
|
||||||
producer, that producer does not have any explicit consumer associated
|
|
||||||
with it.
|
|
||||||
|
|
||||||
Now, the hierarchy of log producers establishes which consumer gets used
|
|
||||||
whenever a producer has no explicit consumer. When a log producer
|
|
||||||
has no consumer explicitly associated with it, it dynamically and
|
|
||||||
recursively inherits the consumer of its parent node, that is, that node
|
|
||||||
being a bit closer to the root of the hierarchy. In other words, the
|
|
||||||
rightmost keywords of that producer are dropped until another producer
|
|
||||||
is found which has an explicit consumer. A nice side-effect is that,
|
|
||||||
by explicitly associating a consumer with a producer, all consumer-less
|
|
||||||
producers which appear under that producer, in the hierarchy tree,
|
|
||||||
automatically *inherits* that consumer.
|
|
||||||
|
|
||||||
Writing log messages
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
All log producer instances are also functions, and this is by calling
|
|
||||||
them that log messages are generated. Each call to a producer object
|
|
||||||
produces the text for one log entry, which in turn, is sent to the log
|
|
||||||
consumer for that producer.
|
|
||||||
|
|
||||||
The log entry displays, after a prefix identifying the log producer
|
|
||||||
being used, all arguments given in the call, converted to strings and
|
|
||||||
space-separated. (This is meant by design to be fairly similar to what
|
|
||||||
the ``print`` statement does in Python). The prefix itself is made up
|
|
||||||
of a colon-separated list of keywords associated with the producer, the
|
|
||||||
whole being set within square brackets.
|
|
||||||
|
|
||||||
Note that the consumer is responsible for adding the newline at the end
|
|
||||||
of the log entry. That final newline is not part of the text for the
|
|
||||||
log entry.
|
|
||||||
|
|
||||||
Other details
|
|
||||||
-------------
|
|
||||||
|
|
||||||
+ Should speak about pickle-ability of :code:`py.log`.
|
|
||||||
|
|
||||||
+ What is :code:`log.get` (in :file:`logger.py`)?
|
|
220
py/doc/misc.txt
220
py/doc/misc.txt
|
@ -1,220 +0,0 @@
|
||||||
====================================
|
|
||||||
Miscellaneous features of the py lib
|
|
||||||
====================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
Mapping the standard python library into py
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
Warning: This feature is very young and thus experimental.
|
|
||||||
Be prepared to adapt your code later if you use it.
|
|
||||||
|
|
||||||
After you have worked with the py lib a bit, you might enjoy
|
|
||||||
the lazy importing, i.e. you only have to do ``import py`` and
|
|
||||||
work your way to your desired object. Using the full path
|
|
||||||
also ensures that there remains a focus on getting short paths
|
|
||||||
to objects.
|
|
||||||
|
|
||||||
The :api:`py.std` hook
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
Of course, no matter what, everybody will continue to use the
|
|
||||||
python standard library because it is a very usable code base.
|
|
||||||
However, to properly support lazyness the py lib offers a way
|
|
||||||
to get to many standard modules without requiring "import"
|
|
||||||
statements. For example, to get to the print-exception
|
|
||||||
functionality of the standard library you can write::
|
|
||||||
|
|
||||||
py.std.traceback.print_exc()
|
|
||||||
|
|
||||||
without having to do anything else than the usual ``import py``
|
|
||||||
at the beginning. Note that not having imports for the
|
|
||||||
`python standard library` obviously gets rid of the *unused
|
|
||||||
import* problem. Modules only get imported when you actually
|
|
||||||
need them.
|
|
||||||
|
|
||||||
Moreover, this approach resolves some of the issues stated in
|
|
||||||
`the relative/absolute import PEP-328`_, as with the above
|
|
||||||
approach you never have ambiguity problems. The above
|
|
||||||
traceback-usage is an absolute path that will not be
|
|
||||||
accidentally get confused with local names. (Well, never put
|
|
||||||
a file ``py.py`` in an importable path, btw, mind you :-)
|
|
||||||
|
|
||||||
Automagically accessing sub packages doesn't work (yet?)
|
|
||||||
--------------------------------------------------------
|
|
||||||
|
|
||||||
If you use the :api:`py.std` hook you currently cannot magically
|
|
||||||
import nested packages which otherwise need explicit imports of
|
|
||||||
their sub-packages. For example, the suversion bindings
|
|
||||||
require you to do something like::
|
|
||||||
|
|
||||||
import svn.client
|
|
||||||
|
|
||||||
If you just do the naive thing with the py lib, i.e. write
|
|
||||||
``py.std.svn.client`` it will not work unless you previously
|
|
||||||
imported it already. The py lib currently doesn't try to
|
|
||||||
magically make this work. The :api:`py.std` hook really is
|
|
||||||
intended for Python standard modules which very seldomly (if
|
|
||||||
at all) provide such nested packages.
|
|
||||||
|
|
||||||
**Note that you may never rely** on module identity, i.e.
|
|
||||||
that ``X is py.std.X`` for any ``X``. This is to allow
|
|
||||||
us later to lazyly import nested packages. Yes, lazyness
|
|
||||||
is hard to resist :-)
|
|
||||||
|
|
||||||
Note: you get an AttributeError, not an ImportError
|
|
||||||
---------------------------------------------------
|
|
||||||
|
|
||||||
If you say ``py.std.XYZ`` and importing ``XYZ`` produces an
|
|
||||||
``ImportError`` , it will actually show up as an
|
|
||||||
``AttributeError``. It is deemed more important to adhere to
|
|
||||||
the standard ``__getattr__`` protocol than to let the
|
|
||||||
``ImportError`` pass through. For example, you might want to
|
|
||||||
do::
|
|
||||||
|
|
||||||
getattr(py.std.cStringIO, 'StringIO', py.std.StringIO.StringIO)
|
|
||||||
|
|
||||||
and you would expect that it works. It does work although it will
|
|
||||||
take away some lazyness because ``py.std.StringIO.StringIO`` will
|
|
||||||
be imported in any case.
|
|
||||||
|
|
||||||
.. _`the relative/absolute import PEP-328`: http://www.python.org/peps/pep-0328.html
|
|
||||||
|
|
||||||
Support for interaction with system utilities/binaries
|
|
||||||
======================================================
|
|
||||||
|
|
||||||
sources:
|
|
||||||
|
|
||||||
* :source:`py/process/`
|
|
||||||
* :source:`py/path/local/`
|
|
||||||
|
|
||||||
Currently, the py lib offers two ways to interact with
|
|
||||||
system executables. :api:`py.process.cmdexec()` invokes
|
|
||||||
the shell in order to execute a string. The other
|
|
||||||
one, :api:`py.path.local`'s 'sysexec()' method lets you
|
|
||||||
directly execute a binary.
|
|
||||||
|
|
||||||
Both approaches will raise an exception in case of a return-
|
|
||||||
code other than 0 and otherwise return the stdout-output
|
|
||||||
of the child process.
|
|
||||||
|
|
||||||
The shell based approach
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
You can execute a command via your system shell
|
|
||||||
by doing something like::
|
|
||||||
|
|
||||||
out = py.process.cmdexec('ls -v')
|
|
||||||
|
|
||||||
However, the ``cmdexec`` approach has a few shortcomings:
|
|
||||||
|
|
||||||
- it relies on the underlying system shell
|
|
||||||
- it neccessitates shell-escaping for expressing arguments
|
|
||||||
- it does not easily allow to "fix" the binary you want to run.
|
|
||||||
- it only allows to execute executables from the local
|
|
||||||
filesystem
|
|
||||||
|
|
||||||
.. _sysexec:
|
|
||||||
|
|
||||||
local paths have ``sysexec``
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
The py lib currently offers a stripped down functionality of what
|
|
||||||
the new `PEP-324 subprocess module`_ offers. The main functionality
|
|
||||||
of synchronously executing a system executable has a straightforward API::
|
|
||||||
|
|
||||||
binsvn.sysexec('ls', 'http://codespeak.net/svn')
|
|
||||||
|
|
||||||
where ``binsvn`` is a path that points to the ``svn`` commandline
|
|
||||||
binary. Note that this function would not offer any shell-escaping
|
|
||||||
so you really have to pass in separated arguments. This idea
|
|
||||||
fits nicely into `a more general view on path objects`_.
|
|
||||||
|
|
||||||
For a first go, we are just reusing the existing `subprocess
|
|
||||||
implementation`_ but don't expose any of its API apart
|
|
||||||
from the above ``sysexec()`` method.
|
|
||||||
|
|
||||||
Note, however, that currently the support for the ``sysexec`` interface on
|
|
||||||
win32 is not thoroughly tested. If you run into problems with it, we are
|
|
||||||
interested to hear about them. If you are running a Python older than 2.4 you
|
|
||||||
will have to install the `pywin32 package`_.
|
|
||||||
|
|
||||||
|
|
||||||
.. _`future book`: future.html
|
|
||||||
.. _`PEP-324 subprocess module`: http://www.python.org/peps/pep-0324.html
|
|
||||||
.. _`subprocess implementation`: http://www.lysator.liu.se/~astrand/popen5/
|
|
||||||
.. _`a more general view on path objects`: future.html#general-path
|
|
||||||
.. _`pywin32 package`: http://pywin32.sourceforge.net/
|
|
||||||
|
|
||||||
finding an executable local path
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
Finding an executable is quite different on multiple platforms.
|
|
||||||
Currently, the ``PATH`` environment variable based search on
|
|
||||||
unix platforms is supported::
|
|
||||||
|
|
||||||
py.path.local.sysfind('svn')
|
|
||||||
|
|
||||||
which returns the first path whose ``basename`` matches ``svn``.
|
|
||||||
In principle, `sysfind` deploys platform specific algorithms
|
|
||||||
to perform the search. On Windows, for example, it may look
|
|
||||||
at the registry (XXX).
|
|
||||||
|
|
||||||
To make the story complete, we allow to pass in a second ``checker``
|
|
||||||
argument that is called for each found executable. For example, if
|
|
||||||
you have multiple binaries available you may want to select the
|
|
||||||
right version::
|
|
||||||
|
|
||||||
def mysvn(p):
|
|
||||||
""" check that the given svn binary has version 1.1. """
|
|
||||||
line = p.execute('--version'').readlines()[0]
|
|
||||||
if line.find('version 1.1'):
|
|
||||||
return p
|
|
||||||
binsvn = py.path.local.sysfind('svn', checker=mysvn)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Cross-Python Version compatibility helpers
|
|
||||||
=============================================
|
|
||||||
|
|
||||||
sources:
|
|
||||||
|
|
||||||
* :source:`py/compat/`
|
|
||||||
* :source:`py/builtin/`
|
|
||||||
|
|
||||||
The py-lib contains some helpers that make writing scripts that work on various
|
|
||||||
Python versions easier.
|
|
||||||
|
|
||||||
:api:`py.compat`
|
|
||||||
----------------
|
|
||||||
|
|
||||||
:api:`py.compat` provides fixed versions (currently from Python 2.4.4) of
|
|
||||||
various newer modules to be able to use them in various Python versions.
|
|
||||||
Currently these are:
|
|
||||||
|
|
||||||
* doctest
|
|
||||||
* optparse
|
|
||||||
* subprocess
|
|
||||||
* textwrap
|
|
||||||
|
|
||||||
They are used by replacing the normal ``import ...`` byr
|
|
||||||
``from py.compat import ...``.
|
|
||||||
|
|
||||||
:api:`py.builtin`
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
:api:`py.builtin` provides various builtins that were added in later Python
|
|
||||||
versions. If the used Python version used does not provide these builtins, they
|
|
||||||
are pure-Python reimplementations. These currently are:
|
|
||||||
|
|
||||||
* enumerate
|
|
||||||
* reversed
|
|
||||||
* sorted
|
|
||||||
* BaseException
|
|
||||||
* set and frozenset (using either the builtin, if available, or the sets
|
|
||||||
module)
|
|
||||||
|
|
||||||
:api:`py.builtin.BaseException` is just ``Exception`` before Python 2.5.
|
|
||||||
|
|
254
py/doc/path.txt
254
py/doc/path.txt
|
@ -1,254 +0,0 @@
|
||||||
=======
|
|
||||||
py.path
|
|
||||||
=======
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
The 'py' lib provides a uniform high-level api to deal with filesystems
|
|
||||||
and filesystem-like interfaces: :api:`py.path`. It aims to offer a central
|
|
||||||
object to fs-like object trees (reading from and writing to files, adding
|
|
||||||
files/directories, examining the types and structure, etc.), and out-of-the-box
|
|
||||||
provides a number of implementations of this API.
|
|
||||||
|
|
||||||
Path implementations provided by :api:`py.path`
|
|
||||||
===============================================
|
|
||||||
|
|
||||||
:api:`py.path.local`
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
The first and most obvious of the implementations is a wrapper around a local
|
|
||||||
filesystem. It's just a bit nicer in usage than the regular Python APIs, and
|
|
||||||
of course all the functionality is bundled together rather than spread over a
|
|
||||||
number of modules.
|
|
||||||
|
|
||||||
Example usage, here we use the :api:`py.test.ensuretemp()` function to create
|
|
||||||
a :api:`py.path.local` object for us (which wraps a directory)::
|
|
||||||
|
|
||||||
>>> import py
|
|
||||||
>>> temppath = py.test.ensuretemp('py.path_documentation')
|
|
||||||
>>> foopath = temppath.join('foo') # get child 'foo' (lazily)
|
|
||||||
>>> foopath.check() # check if child 'foo' exists
|
|
||||||
False
|
|
||||||
>>> foopath.write('bar') # write some data to it
|
|
||||||
>>> foopath.check()
|
|
||||||
True
|
|
||||||
>>> foopath.read()
|
|
||||||
'bar'
|
|
||||||
>>> foofile = foopath.open() # return a 'real' file object
|
|
||||||
>>> foofile.read(1)
|
|
||||||
'b'
|
|
||||||
|
|
||||||
:api:`py.path.svnurl` and :api:`py.path.svnwc`
|
|
||||||
----------------------------------------------
|
|
||||||
|
|
||||||
Two other :api:`py.path` implementations that the py lib provides wrap the
|
|
||||||
popular `Subversion`_ revision control system: the first (called 'svnurl')
|
|
||||||
by interfacing with a remote server, the second by wrapping a local checkout.
|
|
||||||
Both allow you to access relatively advanced features such as metadata and
|
|
||||||
versioning, and both in a way more user-friendly manner than existing other
|
|
||||||
solutions.
|
|
||||||
|
|
||||||
Some example usage of :api:`py.path.svnurl`::
|
|
||||||
|
|
||||||
.. >>> import py
|
|
||||||
.. >>> if not getattr(py.test.config.option, 'checkremote', 0): py.test.skip("use --checkremote to enable svn remote doctests")
|
|
||||||
>>> url = py.path.svnurl('http://codespeak.net/svn/py')
|
|
||||||
>>> info = url.info()
|
|
||||||
>>> info.kind
|
|
||||||
'dir'
|
|
||||||
>>> firstentry = url.log()[-1]
|
|
||||||
>>> import time
|
|
||||||
>>> time.strftime('%Y-%m-%d', time.gmtime(firstentry.date))
|
|
||||||
'2004-10-02'
|
|
||||||
|
|
||||||
Example usage of :api:`py.path.svnwc`::
|
|
||||||
|
|
||||||
>>> temp = py.test.ensuretemp('py.path_documentation')
|
|
||||||
>>> wc = py.path.svnwc(temp.join('svnwc'))
|
|
||||||
>>> wc.checkout('http://codespeak.net/svn/py/dist/py/path/local')
|
|
||||||
>>> wc.join('local.py').check()
|
|
||||||
True
|
|
||||||
|
|
||||||
.. _`Subversion`: http://subversion.tigris.org/
|
|
||||||
|
|
||||||
Common vs. specific API
|
|
||||||
=======================
|
|
||||||
|
|
||||||
All Path objects support a common set of operations, suitable
|
|
||||||
for many use cases and allowing to transparently switch the
|
|
||||||
path object within an application (e.g. from "local" to "svnwc").
|
|
||||||
The common set includes functions such as `path.read()` to read all data
|
|
||||||
from a file, `path.write()` to write data, `path.listdir()` to get a list
|
|
||||||
of directory entries, `path.check()` to check if a node exists
|
|
||||||
and is of a particular type, `path.join()` to get
|
|
||||||
to a (grand)child, `path.visit()` to recursively walk through a node's
|
|
||||||
children, etc. Only things that are not common on 'normal' filesystems (yet),
|
|
||||||
such as handling metadata (e.g. the Subversion "properties") require
|
|
||||||
using specific APIs.
|
|
||||||
|
|
||||||
Examples
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
A quick 'cookbook' of small examples that will be useful 'in real life',
|
|
||||||
which also presents parts of the 'common' API, and shows some non-common
|
|
||||||
methods:
|
|
||||||
|
|
||||||
Searching `.txt` files
|
|
||||||
+++++++++++++++++++++++++++++++++++++
|
|
||||||
|
|
||||||
Search for a particular string inside all files with a .txt extension in a
|
|
||||||
specific directory.
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
>>> dirpath = temppath.ensure('testdir', dir=True)
|
|
||||||
>>> dirpath.join('textfile1.txt').write('foo bar baz')
|
|
||||||
>>> dirpath.join('textfile2.txt').write('frob bar spam eggs')
|
|
||||||
>>> subdir = dirpath.ensure('subdir', dir=True)
|
|
||||||
>>> subdir.join('textfile1.txt').write('foo baz')
|
|
||||||
>>> subdir.join('textfile2.txt').write('spam eggs spam foo bar spam')
|
|
||||||
>>> results = []
|
|
||||||
>>> for fpath in dirpath.visit('*.txt'):
|
|
||||||
... if 'bar' in fpath.read():
|
|
||||||
... results.append(fpath.basename)
|
|
||||||
>>> results
|
|
||||||
['textfile1.txt', 'textfile2.txt', 'textfile2.txt']
|
|
||||||
|
|
||||||
Working with Paths
|
|
||||||
++++++++++++++++++++
|
|
||||||
|
|
||||||
This example shows the :api:`py.path` features to deal with
|
|
||||||
filesystem paths Note that the filesystem is never touched,
|
|
||||||
all operations are performed on a string level (so the paths
|
|
||||||
don't have to exist, either)::
|
|
||||||
|
|
||||||
>>> p1 = py.path.local('/foo/bar')
|
|
||||||
>>> p2 = p1.join('baz/qux')
|
|
||||||
>>> p2 == py.path.local('/foo/bar/baz/qux')
|
|
||||||
True
|
|
||||||
>>> sep = py.path.local.sep
|
|
||||||
>>> p2.relto(p1).replace(sep, '/') # os-specific path sep in the string
|
|
||||||
'baz/qux'
|
|
||||||
>>> p3 = p1 / 'baz/qux' # the / operator allows joining, too
|
|
||||||
>>> p2 == p3
|
|
||||||
True
|
|
||||||
>>> p4 = p1 + ".py"
|
|
||||||
>>> p4.basename == "bar.py"
|
|
||||||
True
|
|
||||||
>>> p4.ext == ".py"
|
|
||||||
True
|
|
||||||
>>> p4.purebasename == "bar"
|
|
||||||
True
|
|
||||||
|
|
||||||
This should be possible on every implementation of :api:`py.path`, so
|
|
||||||
regardless of whether the implementation wraps a UNIX filesystem, a Windows
|
|
||||||
one, or a database or object tree, these functions should be available (each
|
|
||||||
with their own notion of path seperators and dealing with conversions, etc.).
|
|
||||||
|
|
||||||
Checking path types
|
|
||||||
+++++++++++++++++++++
|
|
||||||
|
|
||||||
Now we will show a bit about the powerful 'check()' method on paths, which
|
|
||||||
allows you to check whether a file exists, what type it is, etc.::
|
|
||||||
|
|
||||||
>>> file1 = temppath.join('file1')
|
|
||||||
>>> file1.check() # does it exist?
|
|
||||||
False
|
|
||||||
>>> file1 = file1.ensure(file=True) # 'touch' the file
|
|
||||||
>>> file1.check()
|
|
||||||
True
|
|
||||||
>>> file1.check(dir=True) # is it a dir?
|
|
||||||
False
|
|
||||||
>>> file1.check(file=True) # or a file?
|
|
||||||
True
|
|
||||||
>>> file1.check(ext='.txt') # check the extension
|
|
||||||
False
|
|
||||||
>>> textfile = temppath.ensure('text.txt', file=True)
|
|
||||||
>>> textfile.check(ext='.txt')
|
|
||||||
True
|
|
||||||
>>> file1.check(basename='file1') # we can use all the path's properties here
|
|
||||||
True
|
|
||||||
|
|
||||||
Setting svn-properties
|
|
||||||
+++++++++++++++++++++++++++++++++++++++
|
|
||||||
|
|
||||||
As an example of 'uncommon' methods, we'll show how to read and write
|
|
||||||
properties in an :api:`py.path.svnwc` instance::
|
|
||||||
|
|
||||||
>>> wc.propget('foo')
|
|
||||||
''
|
|
||||||
>>> wc.propset('foo', 'bar')
|
|
||||||
>>> wc.propget('foo')
|
|
||||||
'bar'
|
|
||||||
>>> len(wc.status().prop_modified) # our own props
|
|
||||||
1
|
|
||||||
>>> msg = wc.revert() # roll back our changes
|
|
||||||
>>> len(wc.status().prop_modified)
|
|
||||||
0
|
|
||||||
|
|
||||||
SVN authentication
|
|
||||||
++++++++++++++++++++++
|
|
||||||
|
|
||||||
Some uncommon functionality can also be provided as extensions, such as SVN
|
|
||||||
authentication::
|
|
||||||
|
|
||||||
>>> auth = py.path.SvnAuth('anonymous', 'user', cache_auth=False,
|
|
||||||
... interactive=False)
|
|
||||||
>>> wc.auth = auth
|
|
||||||
>>> wc.update() # this should work
|
|
||||||
>>> path = wc.ensure('thisshouldnotexist.txt')
|
|
||||||
>>> try:
|
|
||||||
... path.commit('testing')
|
|
||||||
... except py.process.cmdexec.Error, e:
|
|
||||||
... pass
|
|
||||||
>>> 'authorization failed' in str(e)
|
|
||||||
True
|
|
||||||
|
|
||||||
Known problems / limitations
|
|
||||||
===================================
|
|
||||||
|
|
||||||
* The SVN path objects require the "svn" command line,
|
|
||||||
there is currently no support for python bindings.
|
|
||||||
Parsing the svn output can lead to problems, particularly
|
|
||||||
regarding if you have a non-english "locales" setting.
|
|
||||||
|
|
||||||
* While the path objects basically work on windows,
|
|
||||||
there is no attention yet on making unicode paths
|
|
||||||
work or deal with the famous "8.3" filename issues.
|
|
||||||
|
|
||||||
Future plans
|
|
||||||
============
|
|
||||||
|
|
||||||
The Subversion path implementations are based
|
|
||||||
on the `svn` command line, not on the bindings.
|
|
||||||
It makes sense now to directly use the bindings.
|
|
||||||
|
|
||||||
Moreover, it would be good, also considering
|
|
||||||
`py.execnet`_ distribution of programs, to
|
|
||||||
be able to manipulate Windows Paths on Linux
|
|
||||||
and vice versa. So we'd like to consider
|
|
||||||
refactoring the path implementations
|
|
||||||
to provide this choice (and getting rid
|
|
||||||
of platform-dependencies as much as possible).
|
|
||||||
|
|
||||||
There is some experimental small approach
|
|
||||||
(:source:`py/path/gateway/`) aiming at having
|
|
||||||
a convenient Remote Path implementation
|
|
||||||
and some considerations about future
|
|
||||||
works in the according :source:`py/path/gateway/TODO.txt`
|
|
||||||
|
|
||||||
There are various hacks out there to have
|
|
||||||
Memory-Filesystems and even path objects
|
|
||||||
being directly mountable under Linux (via `fuse`).
|
|
||||||
However, the Path object implementations
|
|
||||||
do not internally have a clean abstraction
|
|
||||||
of going to the filesystem - so with some
|
|
||||||
refactoring it should become easier to
|
|
||||||
have very custom Path objects, still offering
|
|
||||||
the quite full interface without requiring
|
|
||||||
to know about all details of the full path
|
|
||||||
implementation.
|
|
||||||
|
|
||||||
.. _`py.execnet`: execnet.html
|
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
py lib 0.9.0: py.test, distributed execution, greenlets and more
|
|
||||||
======================================================================
|
|
||||||
|
|
||||||
Welcome to the 0.9.0 py lib release - a library aiming to
|
|
||||||
support agile and test-driven python development on various levels.
|
|
||||||
|
|
||||||
Main API/Tool Features:
|
|
||||||
|
|
||||||
* py.test: cross-project testing tool with many advanced features
|
|
||||||
* py.execnet: ad-hoc code distribution to SSH, Socket and local sub processes
|
|
||||||
* py.magic.greenlet: micro-threads on standard CPython ("stackless-light")
|
|
||||||
* py.path: path abstractions over local and subversion files
|
|
||||||
* rich documentation of py's exported API
|
|
||||||
* tested against Linux, OSX and partly against Win32, python 2.3-2.5
|
|
||||||
|
|
||||||
All these features and their API have extensive documentation,
|
|
||||||
generated with the new "apigen", which we intend to make accessible
|
|
||||||
for other python projects as well.
|
|
||||||
|
|
||||||
Download/Install: http://codespeak.net/py/0.9.0/download.html
|
|
||||||
Documentation/API: http://codespeak.net/py/0.9.0/index.html
|
|
||||||
|
|
||||||
Work on the py lib has been partially funded by the
|
|
||||||
European Union IST programme and by http://merlinux.de
|
|
||||||
within the PyPy project.
|
|
||||||
|
|
||||||
best, have fun and let us know what you think!
|
|
||||||
|
|
||||||
holger krekel, Maciej Fijalkowski,
|
|
||||||
Guido Wesdorp, Carl Friedrich Bolz
|
|
||||||
|
|
|
@ -1,16 +0,0 @@
|
||||||
|
|
||||||
changes from 0.9.1 to 1.0
|
|
||||||
|
|
||||||
py.test:
|
|
||||||
|
|
||||||
- collector.run() deprecated, implement/use listdir() instead
|
|
||||||
- item.run() deprecated, item.execute() signature modified
|
|
||||||
# item.setup() and item.execute() are the ones to override
|
|
||||||
- py.test --pdb works without implied "-s"
|
|
||||||
- uses less RAM with long test runs??? (due to keeping less
|
|
||||||
references around)
|
|
||||||
|
|
||||||
py.builtin:
|
|
||||||
|
|
||||||
|
|
||||||
|
|
1078
py/doc/style.css
1078
py/doc/style.css
File diff suppressed because it is too large
Load Diff
|
@ -1,64 +0,0 @@
|
||||||
.. include:: <s5defs.txt>
|
|
||||||
|
|
||||||
=================================================
|
|
||||||
py.execnet - simple ad-hoc networking
|
|
||||||
=================================================
|
|
||||||
|
|
||||||
:Authors: Holger Krekel, merlinux GmbH
|
|
||||||
:Date: 13th March 2006
|
|
||||||
|
|
||||||
remote method invocation is cumbersome
|
|
||||||
========================================
|
|
||||||
|
|
||||||
- CORBA/RMI/SOAP model is cumbersome
|
|
||||||
- "infection" with object references throughout your program
|
|
||||||
- need to define interfaces, generate stubs/skeletons
|
|
||||||
- need to start server processes ahead of time
|
|
||||||
- complicates programming
|
|
||||||
|
|
||||||
what you want of ad-hoc networks
|
|
||||||
====================================
|
|
||||||
|
|
||||||
- ad hoc **local protocols**
|
|
||||||
- avoid defining and maintaining global interfaces
|
|
||||||
- deploy protocols purely from the client side
|
|
||||||
- zero installation required on server side
|
|
||||||
|
|
||||||
py.execnet model of ad-hoc networks
|
|
||||||
====================================
|
|
||||||
|
|
||||||
- *Gateways* can be setup via e.g. SSH logins or via Popen
|
|
||||||
- *Gateway.remote_exec* allows execution of arbitrary code
|
|
||||||
- means of communication between the two sides: *Channels*
|
|
||||||
(with send & receive methods)
|
|
||||||
- example requirements: ssh login + python installed
|
|
||||||
|
|
||||||
py.execnet.SshGateway example
|
|
||||||
====================================
|
|
||||||
|
|
||||||
interactive::
|
|
||||||
|
|
||||||
gw = py.execnet.SshGateway('codespeak.net')
|
|
||||||
|
|
||||||
channel = gw.remote_exec("""
|
|
||||||
for filename in channel:
|
|
||||||
try:
|
|
||||||
content = open(filename).read()
|
|
||||||
except (OSError, IOError):
|
|
||||||
content = None
|
|
||||||
channel.send(content)
|
|
||||||
""")
|
|
||||||
|
|
||||||
next steps / references
|
|
||||||
====================================
|
|
||||||
|
|
||||||
- ad-hoc p2p networks
|
|
||||||
- chaining channels / passing channels around
|
|
||||||
- ensure it also works nicely on win32
|
|
||||||
- btw, py.execnet is part of the py lib
|
|
||||||
|
|
||||||
http://codespeak.net/py/
|
|
||||||
|
|
||||||
.. |bullet| unicode:: U+02022
|
|
||||||
.. footer:: Holger Krekel (merlinux) |bullet| 13th March 2006
|
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
#!/usr/bin/python
|
|
||||||
|
|
||||||
import py
|
|
||||||
|
|
||||||
for x in py.path.local():
|
|
||||||
if x.ext == '.txt':
|
|
||||||
cmd = ("python /home/hpk/projects/docutils/tools/rst2s5.py "
|
|
||||||
"%s %s" %(x, x.new(ext='.html')))
|
|
||||||
print "execing", cmd
|
|
||||||
py.std.os.system(cmd)
|
|
||||||
|
|
|
@ -1,54 +0,0 @@
|
||||||
|
|
||||||
import py
|
|
||||||
py.magic.autopath()
|
|
||||||
import py
|
|
||||||
pydir = py.path.local(py.__file__).dirpath()
|
|
||||||
distdir = pydir.dirpath()
|
|
||||||
dist_url = 'http://codespeak.net/svn/py/dist/'
|
|
||||||
#issue_url = 'http://codespeak.net/issue/py-dev/'
|
|
||||||
|
|
||||||
docdir = pydir.join('documentation')
|
|
||||||
reffile = docdir / 'talk' / '_ref.txt'
|
|
||||||
|
|
||||||
linkrex = py.std.re.compile('`(\S+)`_')
|
|
||||||
|
|
||||||
name2target = {}
|
|
||||||
def addlink(linkname, linktarget):
|
|
||||||
assert linkname and linkname != '/'
|
|
||||||
if linktarget in name2target:
|
|
||||||
if linkname in name2target[linktarget]:
|
|
||||||
return
|
|
||||||
name2target.setdefault(linktarget, []).append(linkname)
|
|
||||||
|
|
||||||
for textfile in docdir.visit(lambda x: x.ext == '.txt',
|
|
||||||
lambda x: x.check(dotfile=0)):
|
|
||||||
for linkname in linkrex.findall(textfile.read()):
|
|
||||||
if '/' in linkname:
|
|
||||||
for startloc in ('', 'py'):
|
|
||||||
cand = distdir.join(startloc, linkname)
|
|
||||||
if cand.check():
|
|
||||||
rel = cand.relto(distdir)
|
|
||||||
# we are in py/doc/x.txt
|
|
||||||
count = rel.count("/") + 1
|
|
||||||
target = '../' * count + rel
|
|
||||||
addlink(linkname, target)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
print "WARNING %s: link %r may be bogus" %(textfile, linkname)
|
|
||||||
elif linkname.startswith('issue'):
|
|
||||||
addlink(linkname, issue_url+linkname)
|
|
||||||
|
|
||||||
items = name2target.items()
|
|
||||||
items.sort()
|
|
||||||
|
|
||||||
lines = []
|
|
||||||
for linktarget, linknamelist in items:
|
|
||||||
linknamelist.sort()
|
|
||||||
for linkname in linknamelist[:-1]:
|
|
||||||
lines.append(".. _`%s`:" % linkname)
|
|
||||||
lines.append(".. _`%s`: %s" %(linknamelist[-1], linktarget))
|
|
||||||
|
|
||||||
reffile.write("\n".join(lines))
|
|
||||||
print "wrote %d references to %r" %(len(lines), reffile)
|
|
||||||
#print "last ten lines"
|
|
||||||
#for x in lines[-10:]: print x
|
|
|
@ -1,16 +0,0 @@
|
||||||
|
|
||||||
* Persistant storage layer for storing py.test output, sharing such stuff
|
|
||||||
and presenting (Presenting mostly means combining tones of hacks here
|
|
||||||
and there). We need to store test results, revisions and additional
|
|
||||||
metadata like apigen output
|
|
||||||
|
|
||||||
* Having some kind of pdbplus, which will combine rlcompleter, apigen
|
|
||||||
information and other various fixes.
|
|
||||||
|
|
||||||
* Improve distributed testing by:
|
|
||||||
|
|
||||||
- sharing even more code with normal testing
|
|
||||||
- using greenexecnet wherever possible (falling back to normal
|
|
||||||
execnet)
|
|
||||||
- make test redistribution somehow (in a clean way!)
|
|
||||||
- C-c support
|
|
|
@ -1,200 +0,0 @@
|
||||||
.. include:: <s5defs.txt>
|
|
||||||
|
|
||||||
=================================================
|
|
||||||
py.test - flexible and powerful automated testing
|
|
||||||
=================================================
|
|
||||||
|
|
||||||
:Authors: Holger Krekel, merlinux GmbH
|
|
||||||
:Date: 13th March 2006
|
|
||||||
|
|
||||||
Intro: Benefits of Automated Testing
|
|
||||||
======================================
|
|
||||||
|
|
||||||
- prove that code changes actually fix a certain issue
|
|
||||||
- minimizing Time to Feedback for developers
|
|
||||||
- reducing overall Time to Market
|
|
||||||
- document usage of plugins
|
|
||||||
- tests as a means of communication
|
|
||||||
- easing entry for newcomers
|
|
||||||
|
|
||||||
py.test Purposes & Goals
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- automated cross-project open source testing tool
|
|
||||||
- flexible per-project customization
|
|
||||||
- reusing test methods/reporting across projects
|
|
||||||
- various iterative test collection methods
|
|
||||||
- support for distributed testing
|
|
||||||
- py lib is a development support library
|
|
||||||
|
|
||||||
What is Python?
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- easy-to-learn flexible OO high level language
|
|
||||||
- glue-language for connecting C++, Java and scripting
|
|
||||||
- used e.g. by Google for deployment/testing/implementation
|
|
||||||
- used by BIND (mainstream DNS internet server) for testing
|
|
||||||
- Jython provides Python for JVM
|
|
||||||
- IronPython provides Python for .NET
|
|
||||||
- CPython is mainstream C-based platform
|
|
||||||
- PyPy - Python in Python implementation
|
|
||||||
|
|
||||||
Main drivers of py.test development
|
|
||||||
======================================
|
|
||||||
|
|
||||||
- PyPy project testing needs (part of EU project)
|
|
||||||
- needs by individual (freely contributing) projects
|
|
||||||
- at least 20 projects using py.test and py lib
|
|
||||||
- higher level innovation plans by merlinux & experts
|
|
||||||
- commercial needs
|
|
||||||
- almost three years of (non-fulltime) development
|
|
||||||
|
|
||||||
Authors & copyrights
|
|
||||||
==============================================
|
|
||||||
|
|
||||||
- initial: Holger Krekel, Armin Rigo
|
|
||||||
- major contributors: Jan Balster, Brian Dorsey, Grig
|
|
||||||
Gheorghiu
|
|
||||||
- many others with small patches
|
|
||||||
- MIT license
|
|
||||||
|
|
||||||
who is merlinux?
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- small company founded in 2004 by Holger Krekel and Laura
|
|
||||||
Creighton
|
|
||||||
|
|
||||||
- purpose: research and development / open source technologies
|
|
||||||
|
|
||||||
- 7 employees (no win32 experts!), 6 freelancers
|
|
||||||
|
|
||||||
- three larger projects:
|
|
||||||
|
|
||||||
- PyPy - next generation Python implementation
|
|
||||||
- mailwitness - digital invoicing/signatures
|
|
||||||
- provider of development servers
|
|
||||||
|
|
||||||
- technologies: virtualization, deployment and testing
|
|
||||||
|
|
||||||
Main Features of py.test
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- simplest possible ``assert`` approach
|
|
||||||
- clean setup/teardown semantics
|
|
||||||
- stdout/stderr capturing per test
|
|
||||||
- per-project/directory cmdline options (many predefined)
|
|
||||||
- test selection support
|
|
||||||
- customizable auto-collection of tests
|
|
||||||
- `more features`_ ...
|
|
||||||
|
|
||||||
.. _`more features`: ../test.html#features
|
|
||||||
|
|
||||||
Main User-Level entry points
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- ``py.test.raises(Exc, func, *args, **kwargs)``
|
|
||||||
- ``py.test.fail(msg)`` -> fail a test
|
|
||||||
- ``py.test.skip(msg)`` -> skip a test
|
|
||||||
- ``py.test.ensuretemp(prefix)`` -> per-test session temporary directory
|
|
||||||
- ``conftest.py`` can modify almost arbitrary testing aspects
|
|
||||||
(but it's a bit involved)
|
|
||||||
|
|
||||||
some py lib components
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- ``py.execnet`` provides ad-hoc means to distribute programs
|
|
||||||
- ``py.path`` objects abstract local and svn files
|
|
||||||
- ``py.log`` offers (preliminary) logging support
|
|
||||||
- ``py.xml.html`` for programmatic html generation
|
|
||||||
- lazy import ...``import py`` is enough
|
|
||||||
|
|
||||||
py.test Implementation
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- `basic picture`_
|
|
||||||
- Session objects (Terminal and Tcl-GUI)
|
|
||||||
- reporting hooks are on session objects
|
|
||||||
- Collector hierarchy yield iteratively tests
|
|
||||||
- uses py lib extensively (py.path/py.execnet)
|
|
||||||
- "conftest.py" per-directory configuration mechanism
|
|
||||||
|
|
||||||
.. _`basic picture`: ../test.html
|
|
||||||
|
|
||||||
Session objects
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- responsible for driving the testing process
|
|
||||||
- make use of iterative Collector hierarchies
|
|
||||||
- responsible for reporting (XXX)
|
|
||||||
- can be split to a Frontend and BackendSession
|
|
||||||
for distributed testing (GUI frontend uses it)
|
|
||||||
|
|
||||||
Collector objects
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- Collectors / Test Items form a tree
|
|
||||||
- the tree is build iteratively (driven from Sessions)
|
|
||||||
- collector tree can be viewed with ``--collectonly``
|
|
||||||
- ``run()`` returns list of (test) names or runs the test
|
|
||||||
- ``join(name)`` returns a sub collector/item
|
|
||||||
- various helper methods to e.g. determine file/location
|
|
||||||
|
|
||||||
Extensions: ReST documentation checking
|
|
||||||
=========================================
|
|
||||||
|
|
||||||
- `py/documentation/conftest.py`_ provides test
|
|
||||||
items for checking documentation and link integrity
|
|
||||||
|
|
||||||
- uses its own collector/testitem hierarchy
|
|
||||||
|
|
||||||
- invokes ``docutils`` processing, reports errors
|
|
||||||
|
|
||||||
.. _`py/documentation/conftest.py`: ../conftest.py
|
|
||||||
|
|
||||||
Extensions: Distributed Testing
|
|
||||||
==============================================
|
|
||||||
|
|
||||||
- using py.execnet to dispatch on different python versions
|
|
||||||
- using py.execnet to dispatch tests on other hosts/platforms
|
|
||||||
- currently: Popen, SSH and Socket gateways
|
|
||||||
- missing support pushing tests to "the other side"
|
|
||||||
- missing for deployment on multiple machines
|
|
||||||
- but it's already possible ...
|
|
||||||
|
|
||||||
Example using pywinauto from linux
|
|
||||||
==============================================
|
|
||||||
|
|
||||||
- start socketserver.py on windows
|
|
||||||
- connect a SocketGateway e.g. from linux
|
|
||||||
- send tests, execute and report tracebacks through the
|
|
||||||
gateway
|
|
||||||
- remotely use pywinauto to automate testing of GUI work flow
|
|
||||||
- interactive example ...
|
|
||||||
|
|
||||||
Status of py lib
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- mostly developed on linux/OSX
|
|
||||||
- basically all tests pass on win32 as well
|
|
||||||
- but missing some win32 convenience
|
|
||||||
- some support for generation of html/ReST/PDFs reports
|
|
||||||
- py.execnet works rather reliably (pending deeper win32 testing)
|
|
||||||
- flexible configuration but sometimes non-obvious/documented
|
|
||||||
(requires understanding of internals)
|
|
||||||
|
|
||||||
Next Steps py lib / py.test
|
|
||||||
===============================
|
|
||||||
|
|
||||||
- refined py.execnet distribution of programs
|
|
||||||
- more configurable and customizable reporting
|
|
||||||
- implement support for testing distribution
|
|
||||||
- explore refined win32 support
|
|
||||||
- automated collection of unittest.py based tests
|
|
||||||
- make spawning processes/gateways more robust
|
|
||||||
- doctest support
|
|
||||||
- unify logging approaches (py.log.*)
|
|
||||||
- ...
|
|
||||||
|
|
||||||
|
|
||||||
.. |bullet| unicode:: U+02022
|
|
||||||
.. footer:: Holger Krekel (merlinux) |bullet| |bullet| 13th March 2006
|
|
Binary file not shown.
Before Width: | Height: | Size: 49 B |
|
@ -1,25 +0,0 @@
|
||||||
/* This file has been placed in the public domain. */
|
|
||||||
/* The following styles size, place, and layer the slide components.
|
|
||||||
Edit these if you want to change the overall slide layout.
|
|
||||||
The commented lines can be uncommented (and modified, if necessary)
|
|
||||||
to help you with the rearrangement process. */
|
|
||||||
|
|
||||||
/* target = 1024x768 */
|
|
||||||
|
|
||||||
div#header, div#footer, .slide {width: 100%; top: 0; left: 0;}
|
|
||||||
div#header {position: fixed; top: 0; height: 3em; z-index: 1;}
|
|
||||||
div#footer {top: auto; bottom: 0; height: 2.5em; z-index: 5;}
|
|
||||||
.slide {top: 0; width: 92%; padding: 2.5em 4% 4%; z-index: 2;}
|
|
||||||
div#controls {left: 50%; bottom: 0; width: 50%; z-index: 100;}
|
|
||||||
div#controls form {position: absolute; bottom: 0; right: 0; width: 100%;
|
|
||||||
margin: 0;}
|
|
||||||
#currentSlide {position: absolute; width: 10%; left: 45%; bottom: 1em;
|
|
||||||
z-index: 10;}
|
|
||||||
html>body #currentSlide {position: fixed;}
|
|
||||||
|
|
||||||
/*
|
|
||||||
div#header {background: #FCC;}
|
|
||||||
div#footer {background: #CCF;}
|
|
||||||
div#controls {background: #BBD;}
|
|
||||||
div#currentSlide {background: #FFC;}
|
|
||||||
*/
|
|
|
@ -1,42 +0,0 @@
|
||||||
<public:component>
|
|
||||||
<public:attach event="onpropertychange" onevent="doFix()" />
|
|
||||||
|
|
||||||
<script>
|
|
||||||
|
|
||||||
// IE5.5+ PNG Alpha Fix v1.0 by Angus Turnbull http://www.twinhelix.com
|
|
||||||
// Free usage permitted as long as this notice remains intact.
|
|
||||||
|
|
||||||
// This must be a path to a blank image. That's all the configuration you need here.
|
|
||||||
var blankImg = 'ui/default/blank.gif';
|
|
||||||
|
|
||||||
var f = 'DXImageTransform.Microsoft.AlphaImageLoader';
|
|
||||||
|
|
||||||
function filt(s, m) {
|
|
||||||
if (filters[f]) {
|
|
||||||
filters[f].enabled = s ? true : false;
|
|
||||||
if (s) with (filters[f]) { src = s; sizingMethod = m }
|
|
||||||
} else if (s) style.filter = 'progid:'+f+'(src="'+s+'",sizingMethod="'+m+'")';
|
|
||||||
}
|
|
||||||
|
|
||||||
function doFix() {
|
|
||||||
if ((parseFloat(navigator.userAgent.match(/MSIE (\S+)/)[1]) < 5.5) ||
|
|
||||||
(event && !/(background|src)/.test(event.propertyName))) return;
|
|
||||||
|
|
||||||
if (tagName == 'IMG') {
|
|
||||||
if ((/\.png$/i).test(src)) {
|
|
||||||
filt(src, 'image'); // was 'scale'
|
|
||||||
src = blankImg;
|
|
||||||
} else if (src.indexOf(blankImg) < 0) filt();
|
|
||||||
} else if (style.backgroundImage) {
|
|
||||||
if (style.backgroundImage.match(/^url[("']+(.*\.png)[)"']+$/i)) {
|
|
||||||
var s = RegExp.$1;
|
|
||||||
style.backgroundImage = '';
|
|
||||||
filt(s, 'crop');
|
|
||||||
} else filt();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
doFix();
|
|
||||||
|
|
||||||
</script>
|
|
||||||
</public:component>
|
|
|
@ -1,8 +0,0 @@
|
||||||
/* This file has been placed in the public domain. */
|
|
||||||
/* DO NOT CHANGE THESE unless you really want to break Opera Show */
|
|
||||||
.slide {
|
|
||||||
visibility: visible !important;
|
|
||||||
position: static !important;
|
|
||||||
page-break-before: always;
|
|
||||||
}
|
|
||||||
#slide0 {page-break-before: avoid;}
|
|
|
@ -1,16 +0,0 @@
|
||||||
/* This file has been placed in the public domain. */
|
|
||||||
/* Don't change this unless you want the layout stuff to show up in the
|
|
||||||
outline view! */
|
|
||||||
|
|
||||||
.layout div, #footer *, #controlForm * {display: none;}
|
|
||||||
#footer, #controls, #controlForm, #navLinks, #toggle {
|
|
||||||
display: block; visibility: visible; margin: 0; padding: 0;}
|
|
||||||
#toggle {float: right; padding: 0.5em;}
|
|
||||||
html>body #toggle {position: fixed; top: 0; right: 0;}
|
|
||||||
|
|
||||||
/* making the outline look pretty-ish */
|
|
||||||
|
|
||||||
#slide0 h1, #slide0 h2, #slide0 h3, #slide0 h4 {border: none; margin: 0;}
|
|
||||||
#toggle {border: 1px solid; border-width: 0 0 1px 1px; background: #FFF;}
|
|
||||||
|
|
||||||
.outline {display: inline ! important;}
|
|
|
@ -1,121 +0,0 @@
|
||||||
/* This file has been placed in the public domain. */
|
|
||||||
/* Following are the presentation styles -- edit away! */
|
|
||||||
|
|
||||||
html, body {margin: 0; padding: 0;}
|
|
||||||
body {background: #fff color: #222; font-size: 2em;}
|
|
||||||
/* Replace the background style above with the style below (and again for
|
|
||||||
div#header) for a graphic: */
|
|
||||||
/* background: white url(bodybg.gif) -16px 0 no-repeat; */
|
|
||||||
:link, :visited {text-decoration: none; color: #00C;}
|
|
||||||
#controls :active {color: #88A !important;}
|
|
||||||
#controls :focus {outline: 1px dotted #227;}
|
|
||||||
h1, h2, h3, h4 {font-size: 100%; margin: 0; padding: 0; font-weight: inherit;}
|
|
||||||
|
|
||||||
blockquote {padding: 0 2em 0.5em; margin: 0 1.5em 0.5em;}
|
|
||||||
blockquote p {margin: 0;}
|
|
||||||
|
|
||||||
kbd {font-weight: bold; font-size: 1em;}
|
|
||||||
sup {font-size: smaller; line-height: 1px;}
|
|
||||||
|
|
||||||
.slide pre {padding: 0; margin-left: 0; margin-right: 0; font-size: 90%;}
|
|
||||||
.slide ul ul li {list-style: square; }
|
|
||||||
.slide img.leader {display: block; margin: 0 auto;}
|
|
||||||
.slide tt {font-size: 90%;}
|
|
||||||
|
|
||||||
div#header, div#footer {background: #005; color: #AAB; font-family: sans-serif;}
|
|
||||||
/* background: #005 url(bodybg.gif) -16px 0 no-repeat; */
|
|
||||||
div#footer {font-size: 0.5em; font-weight: bold; padding: 1em 0;}
|
|
||||||
#footer h1 {display: block; padding: 0 1em;}
|
|
||||||
#footer h2 {display: block; padding: 0.8em 1em 0;}
|
|
||||||
|
|
||||||
.slide {font-size: 1.5em;}
|
|
||||||
.slide li {font-size: 1.0em; padding-bottom: 0.2em;}
|
|
||||||
.slide h1 {position: absolute; top: 0.45em; z-index: 1;
|
|
||||||
margin: 0; padding-left: 0.7em; white-space: nowrap;
|
|
||||||
font: bold 110% sans-serif; color: #DDE; background: #005;}
|
|
||||||
.slide h2 {font: bold 120%/1em sans-serif; padding-top: 0.5em;}
|
|
||||||
.slide h3 {font: bold 100% sans-serif; padding-top: 0.5em;}
|
|
||||||
h1 abbr {font-variant: small-caps;}
|
|
||||||
|
|
||||||
div#controls {position: absolute; left: 50%; bottom: 0;
|
|
||||||
width: 50%; text-align: right; font: bold 0.9em sans-serif;}
|
|
||||||
html>body div#controls {position: fixed; padding: 0 0 1em 0; top: auto;}
|
|
||||||
div#controls form {position: absolute; bottom: 0; right: 0; width: 100%;
|
|
||||||
margin: 0; padding: 0;}
|
|
||||||
#controls #navLinks a {padding: 0; margin: 0 0.5em;
|
|
||||||
background: #005; border: none; color: #779; cursor: pointer;}
|
|
||||||
#controls #navList {height: 1em;}
|
|
||||||
#controls #navList #jumplist {position: absolute; bottom: 0; right: 0;
|
|
||||||
background: #DDD; color: #227;}
|
|
||||||
|
|
||||||
#currentSlide {text-align: center; font-size: 0.5em; color: #449;
|
|
||||||
font-family: sans-serif; font-weight: bold;}
|
|
||||||
|
|
||||||
#slide0 {padding-top: 1.5em}
|
|
||||||
#slide0 h1 {position: static; margin: 1em 0 0; padding: 0; color: #000;
|
|
||||||
font: bold 2em sans-serif; white-space: normal; background: transparent;}
|
|
||||||
#slide0 h2 {font: bold italic 1em sans-serif; margin: 0.25em;}
|
|
||||||
#slide0 h3 {margin-top: 1.5em; font-size: 1.5em;}
|
|
||||||
#slide0 h4 {margin-top: 0; font-size: 1em;}
|
|
||||||
|
|
||||||
ul.urls {list-style: none; display: inline; margin: 0;}
|
|
||||||
.urls li {display: inline; margin: 0;}
|
|
||||||
.external {border-bottom: 1px dotted gray;}
|
|
||||||
html>body .external {border-bottom: none;}
|
|
||||||
.external:after {content: " \274F"; font-size: smaller; color: #77B;}
|
|
||||||
|
|
||||||
.incremental, .incremental *, .incremental *:after {visibility: visible;
|
|
||||||
color: white; border: 0;}
|
|
||||||
img.incremental {visibility: hidden;}
|
|
||||||
.slide .current {color: green;}
|
|
||||||
|
|
||||||
.slide-display {display: inline ! important;}
|
|
||||||
|
|
||||||
.huge {font-family: sans-serif; font-weight: bold; font-size: 150%;}
|
|
||||||
.big {font-family: sans-serif; font-weight: bold; font-size: 120%;}
|
|
||||||
.small {font-size: 75%;}
|
|
||||||
.tiny {font-size: 50%;}
|
|
||||||
.huge tt, .big tt, .small tt, .tiny tt {font-size: 115%;}
|
|
||||||
.huge pre, .big pre, .small pre, .tiny pre {font-size: 115%;}
|
|
||||||
|
|
||||||
.maroon {color: maroon;}
|
|
||||||
.red {color: red;}
|
|
||||||
.magenta {color: magenta;}
|
|
||||||
.fuchsia {color: fuchsia;}
|
|
||||||
.pink {color: #FAA;}
|
|
||||||
.orange {color: orange;}
|
|
||||||
.yellow {color: yellow;}
|
|
||||||
.lime {color: lime;}
|
|
||||||
.green {color: green;}
|
|
||||||
.olive {color: olive;}
|
|
||||||
.teal {color: teal;}
|
|
||||||
.cyan {color: cyan;}
|
|
||||||
.aqua {color: aqua;}
|
|
||||||
.blue {color: blue;}
|
|
||||||
.navy {color: navy;}
|
|
||||||
.purple {color: purple;}
|
|
||||||
.black {color: black;}
|
|
||||||
.gray {color: gray;}
|
|
||||||
.silver {color: silver;}
|
|
||||||
.white {color: white;}
|
|
||||||
|
|
||||||
.left {text-align: left ! important;}
|
|
||||||
.center {text-align: center ! important;}
|
|
||||||
.right {text-align: right ! important;}
|
|
||||||
|
|
||||||
.animation {position: relative; margin: 1em 0; padding: 0;}
|
|
||||||
.animation img {position: absolute;}
|
|
||||||
|
|
||||||
/* Docutils-specific overrides */
|
|
||||||
|
|
||||||
.slide table.docinfo {margin: 1em 0 0.5em 2em;}
|
|
||||||
|
|
||||||
pre.literal-block, pre.doctest-block {background-color: white;}
|
|
||||||
|
|
||||||
tt.docutils {background-color: white;}
|
|
||||||
|
|
||||||
/* diagnostics */
|
|
||||||
/*
|
|
||||||
li:after {content: " [" attr(class) "]"; color: #F88;}
|
|
||||||
div:before {content: "[" attr(class) "]"; color: #F88;}
|
|
||||||
*/
|
|
|
@ -1,24 +0,0 @@
|
||||||
/* This file has been placed in the public domain. */
|
|
||||||
/* The following rule is necessary to have all slides appear in print!
|
|
||||||
DO NOT REMOVE IT! */
|
|
||||||
.slide, ul {page-break-inside: avoid; visibility: visible !important;}
|
|
||||||
h1 {page-break-after: avoid;}
|
|
||||||
|
|
||||||
body {font-size: 12pt; background: white;}
|
|
||||||
* {color: black;}
|
|
||||||
|
|
||||||
#slide0 h1 {font-size: 200%; border: none; margin: 0.5em 0 0.25em;}
|
|
||||||
#slide0 h3 {margin: 0; padding: 0;}
|
|
||||||
#slide0 h4 {margin: 0 0 0.5em; padding: 0;}
|
|
||||||
#slide0 {margin-bottom: 3em;}
|
|
||||||
|
|
||||||
#header {display: none;}
|
|
||||||
#footer h1 {margin: 0; border-bottom: 1px solid; color: gray;
|
|
||||||
font-style: italic;}
|
|
||||||
#footer h2, #controls {display: none;}
|
|
||||||
|
|
||||||
.print {display: inline ! important;}
|
|
||||||
|
|
||||||
/* The following rule keeps the layout stuff out of print.
|
|
||||||
Remove at your own risk! */
|
|
||||||
.layout, .layout * {display: none !important;}
|
|
Binary file not shown.
Before Width: | Height: | Size: 2.1 KiB |
|
@ -1,11 +0,0 @@
|
||||||
/* This file has been placed in the public domain. */
|
|
||||||
/* Do not edit or override these styles!
|
|
||||||
The system will likely break if you do. */
|
|
||||||
|
|
||||||
div#header, div#footer, div#controls, .slide {position: absolute;}
|
|
||||||
html>body div#header, html>body div#footer,
|
|
||||||
html>body div#controls, html>body .slide {position: fixed;}
|
|
||||||
.handout {display: none;}
|
|
||||||
.layout {display: block;}
|
|
||||||
.slide, .hideme, .incremental {visibility: hidden;}
|
|
||||||
#slide0 {visibility: visible;}
|
|
|
@ -1,13 +0,0 @@
|
||||||
/* This file has been placed in the public domain. */
|
|
||||||
|
|
||||||
/* required to make the slide show run at all */
|
|
||||||
@import url(s5-core.css);
|
|
||||||
|
|
||||||
/* sets basic placement and size of slide components */
|
|
||||||
@import url(framing.css);
|
|
||||||
|
|
||||||
/* styles that make the slides look good */
|
|
||||||
@import url(pretty.css);
|
|
||||||
|
|
||||||
/* pypy override */
|
|
||||||
@import url(../py.css);
|
|
|
@ -1,558 +0,0 @@
|
||||||
// S5 v1.1 slides.js -- released into the Public Domain
|
|
||||||
// Modified for Docutils (http://docutils.sf.net) by David Goodger
|
|
||||||
//
|
|
||||||
// Please see http://www.meyerweb.com/eric/tools/s5/credits.html for
|
|
||||||
// information about all the wonderful and talented contributors to this code!
|
|
||||||
|
|
||||||
var undef;
|
|
||||||
var slideCSS = '';
|
|
||||||
var snum = 0;
|
|
||||||
var smax = 1;
|
|
||||||
var slideIDs = new Array();
|
|
||||||
var incpos = 0;
|
|
||||||
var number = undef;
|
|
||||||
var s5mode = true;
|
|
||||||
var defaultView = 'slideshow';
|
|
||||||
var controlVis = 'visible';
|
|
||||||
|
|
||||||
var isIE = navigator.appName == 'Microsoft Internet Explorer' ? 1 : 0;
|
|
||||||
var isOp = navigator.userAgent.indexOf('Opera') > -1 ? 1 : 0;
|
|
||||||
var isGe = navigator.userAgent.indexOf('Gecko') > -1 && navigator.userAgent.indexOf('Safari') < 1 ? 1 : 0;
|
|
||||||
|
|
||||||
function hasClass(object, className) {
|
|
||||||
if (!object.className) return false;
|
|
||||||
return (object.className.search('(^|\\s)' + className + '(\\s|$)') != -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
function hasValue(object, value) {
|
|
||||||
if (!object) return false;
|
|
||||||
return (object.search('(^|\\s)' + value + '(\\s|$)') != -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
function removeClass(object,className) {
|
|
||||||
if (!object) return;
|
|
||||||
object.className = object.className.replace(new RegExp('(^|\\s)'+className+'(\\s|$)'), RegExp.$1+RegExp.$2);
|
|
||||||
}
|
|
||||||
|
|
||||||
function addClass(object,className) {
|
|
||||||
if (!object || hasClass(object, className)) return;
|
|
||||||
if (object.className) {
|
|
||||||
object.className += ' '+className;
|
|
||||||
} else {
|
|
||||||
object.className = className;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function GetElementsWithClassName(elementName,className) {
|
|
||||||
var allElements = document.getElementsByTagName(elementName);
|
|
||||||
var elemColl = new Array();
|
|
||||||
for (var i = 0; i< allElements.length; i++) {
|
|
||||||
if (hasClass(allElements[i], className)) {
|
|
||||||
elemColl[elemColl.length] = allElements[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return elemColl;
|
|
||||||
}
|
|
||||||
|
|
||||||
function isParentOrSelf(element, id) {
|
|
||||||
if (element == null || element.nodeName=='BODY') return false;
|
|
||||||
else if (element.id == id) return true;
|
|
||||||
else return isParentOrSelf(element.parentNode, id);
|
|
||||||
}
|
|
||||||
|
|
||||||
function nodeValue(node) {
|
|
||||||
var result = "";
|
|
||||||
if (node.nodeType == 1) {
|
|
||||||
var children = node.childNodes;
|
|
||||||
for (var i = 0; i < children.length; ++i) {
|
|
||||||
result += nodeValue(children[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (node.nodeType == 3) {
|
|
||||||
result = node.nodeValue;
|
|
||||||
}
|
|
||||||
return(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
function slideLabel() {
|
|
||||||
var slideColl = GetElementsWithClassName('*','slide');
|
|
||||||
var list = document.getElementById('jumplist');
|
|
||||||
smax = slideColl.length;
|
|
||||||
for (var n = 0; n < smax; n++) {
|
|
||||||
var obj = slideColl[n];
|
|
||||||
|
|
||||||
var did = 'slide' + n.toString();
|
|
||||||
if (obj.getAttribute('id')) {
|
|
||||||
slideIDs[n] = obj.getAttribute('id');
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
obj.setAttribute('id',did);
|
|
||||||
slideIDs[n] = did;
|
|
||||||
}
|
|
||||||
if (isOp) continue;
|
|
||||||
|
|
||||||
var otext = '';
|
|
||||||
var menu = obj.firstChild;
|
|
||||||
if (!menu) continue; // to cope with empty slides
|
|
||||||
while (menu && menu.nodeType == 3) {
|
|
||||||
menu = menu.nextSibling;
|
|
||||||
}
|
|
||||||
if (!menu) continue; // to cope with slides with only text nodes
|
|
||||||
|
|
||||||
var menunodes = menu.childNodes;
|
|
||||||
for (var o = 0; o < menunodes.length; o++) {
|
|
||||||
otext += nodeValue(menunodes[o]);
|
|
||||||
}
|
|
||||||
list.options[list.length] = new Option(n + ' : ' + otext, n);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function currentSlide() {
|
|
||||||
var cs;
|
|
||||||
var footer_nodes;
|
|
||||||
var vis = 'visible';
|
|
||||||
if (document.getElementById) {
|
|
||||||
cs = document.getElementById('currentSlide');
|
|
||||||
footer_nodes = document.getElementById('footer').childNodes;
|
|
||||||
} else {
|
|
||||||
cs = document.currentSlide;
|
|
||||||
footer = document.footer.childNodes;
|
|
||||||
}
|
|
||||||
cs.innerHTML = '<span id="csHere">' + snum + '<\/span> ' +
|
|
||||||
'<span id="csSep">\/<\/span> ' +
|
|
||||||
'<span id="csTotal">' + (smax-1) + '<\/span>';
|
|
||||||
if (snum == 0) {
|
|
||||||
vis = 'hidden';
|
|
||||||
}
|
|
||||||
cs.style.visibility = vis;
|
|
||||||
for (var i = 0; i < footer_nodes.length; i++) {
|
|
||||||
if (footer_nodes[i].nodeType == 1) {
|
|
||||||
footer_nodes[i].style.visibility = vis;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function go(step) {
|
|
||||||
if (document.getElementById('slideProj').disabled || step == 0) return;
|
|
||||||
var jl = document.getElementById('jumplist');
|
|
||||||
var cid = slideIDs[snum];
|
|
||||||
var ce = document.getElementById(cid);
|
|
||||||
if (incrementals[snum].length > 0) {
|
|
||||||
for (var i = 0; i < incrementals[snum].length; i++) {
|
|
||||||
removeClass(incrementals[snum][i], 'current');
|
|
||||||
removeClass(incrementals[snum][i], 'incremental');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (step != 'j') {
|
|
||||||
snum += step;
|
|
||||||
lmax = smax - 1;
|
|
||||||
if (snum > lmax) snum = lmax;
|
|
||||||
if (snum < 0) snum = 0;
|
|
||||||
} else
|
|
||||||
snum = parseInt(jl.value);
|
|
||||||
var nid = slideIDs[snum];
|
|
||||||
var ne = document.getElementById(nid);
|
|
||||||
if (!ne) {
|
|
||||||
ne = document.getElementById(slideIDs[0]);
|
|
||||||
snum = 0;
|
|
||||||
}
|
|
||||||
if (step < 0) {incpos = incrementals[snum].length} else {incpos = 0;}
|
|
||||||
if (incrementals[snum].length > 0 && incpos == 0) {
|
|
||||||
for (var i = 0; i < incrementals[snum].length; i++) {
|
|
||||||
if (hasClass(incrementals[snum][i], 'current'))
|
|
||||||
incpos = i + 1;
|
|
||||||
else
|
|
||||||
addClass(incrementals[snum][i], 'incremental');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (incrementals[snum].length > 0 && incpos > 0)
|
|
||||||
addClass(incrementals[snum][incpos - 1], 'current');
|
|
||||||
ce.style.visibility = 'hidden';
|
|
||||||
ne.style.visibility = 'visible';
|
|
||||||
jl.selectedIndex = snum;
|
|
||||||
currentSlide();
|
|
||||||
number = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
function goTo(target) {
|
|
||||||
if (target >= smax || target == snum) return;
|
|
||||||
go(target - snum);
|
|
||||||
}
|
|
||||||
|
|
||||||
function subgo(step) {
|
|
||||||
if (step > 0) {
|
|
||||||
removeClass(incrementals[snum][incpos - 1],'current');
|
|
||||||
removeClass(incrementals[snum][incpos], 'incremental');
|
|
||||||
addClass(incrementals[snum][incpos],'current');
|
|
||||||
incpos++;
|
|
||||||
} else {
|
|
||||||
incpos--;
|
|
||||||
removeClass(incrementals[snum][incpos],'current');
|
|
||||||
addClass(incrementals[snum][incpos], 'incremental');
|
|
||||||
addClass(incrementals[snum][incpos - 1],'current');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function toggle() {
|
|
||||||
var slideColl = GetElementsWithClassName('*','slide');
|
|
||||||
var slides = document.getElementById('slideProj');
|
|
||||||
var outline = document.getElementById('outlineStyle');
|
|
||||||
if (!slides.disabled) {
|
|
||||||
slides.disabled = true;
|
|
||||||
outline.disabled = false;
|
|
||||||
s5mode = false;
|
|
||||||
fontSize('1em');
|
|
||||||
for (var n = 0; n < smax; n++) {
|
|
||||||
var slide = slideColl[n];
|
|
||||||
slide.style.visibility = 'visible';
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
slides.disabled = false;
|
|
||||||
outline.disabled = true;
|
|
||||||
s5mode = true;
|
|
||||||
fontScale();
|
|
||||||
for (var n = 0; n < smax; n++) {
|
|
||||||
var slide = slideColl[n];
|
|
||||||
slide.style.visibility = 'hidden';
|
|
||||||
}
|
|
||||||
slideColl[snum].style.visibility = 'visible';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function showHide(action) {
|
|
||||||
var obj = GetElementsWithClassName('*','hideme')[0];
|
|
||||||
switch (action) {
|
|
||||||
case 's': obj.style.visibility = 'visible'; break;
|
|
||||||
case 'h': obj.style.visibility = 'hidden'; break;
|
|
||||||
case 'k':
|
|
||||||
if (obj.style.visibility != 'visible') {
|
|
||||||
obj.style.visibility = 'visible';
|
|
||||||
} else {
|
|
||||||
obj.style.visibility = 'hidden';
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 'keys' code adapted from MozPoint (http://mozpoint.mozdev.org/)
|
|
||||||
function keys(key) {
|
|
||||||
if (!key) {
|
|
||||||
key = event;
|
|
||||||
key.which = key.keyCode;
|
|
||||||
}
|
|
||||||
if (key.which == 84) {
|
|
||||||
toggle();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (s5mode) {
|
|
||||||
switch (key.which) {
|
|
||||||
case 10: // return
|
|
||||||
case 13: // enter
|
|
||||||
if (window.event && isParentOrSelf(window.event.srcElement, 'controls')) return;
|
|
||||||
if (key.target && isParentOrSelf(key.target, 'controls')) return;
|
|
||||||
if(number != undef) {
|
|
||||||
goTo(number);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case 32: // spacebar
|
|
||||||
case 34: // page down
|
|
||||||
case 39: // rightkey
|
|
||||||
case 40: // downkey
|
|
||||||
if(number != undef) {
|
|
||||||
go(number);
|
|
||||||
} else if (!incrementals[snum] || incpos >= incrementals[snum].length) {
|
|
||||||
go(1);
|
|
||||||
} else {
|
|
||||||
subgo(1);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 33: // page up
|
|
||||||
case 37: // leftkey
|
|
||||||
case 38: // upkey
|
|
||||||
if(number != undef) {
|
|
||||||
go(-1 * number);
|
|
||||||
} else if (!incrementals[snum] || incpos <= 0) {
|
|
||||||
go(-1);
|
|
||||||
} else {
|
|
||||||
subgo(-1);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 36: // home
|
|
||||||
goTo(0);
|
|
||||||
break;
|
|
||||||
case 35: // end
|
|
||||||
goTo(smax-1);
|
|
||||||
break;
|
|
||||||
case 67: // c
|
|
||||||
showHide('k');
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (key.which < 48 || key.which > 57) {
|
|
||||||
number = undef;
|
|
||||||
} else {
|
|
||||||
if (window.event && isParentOrSelf(window.event.srcElement, 'controls')) return;
|
|
||||||
if (key.target && isParentOrSelf(key.target, 'controls')) return;
|
|
||||||
number = (((number != undef) ? number : 0) * 10) + (key.which - 48);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
function clicker(e) {
|
|
||||||
number = undef;
|
|
||||||
var target;
|
|
||||||
if (window.event) {
|
|
||||||
target = window.event.srcElement;
|
|
||||||
e = window.event;
|
|
||||||
} else target = e.target;
|
|
||||||
if (target.href != null || hasValue(target.rel, 'external') || isParentOrSelf(target, 'controls') || isParentOrSelf(target,'embed') || isParentOrSelf(target, 'object')) return true;
|
|
||||||
if (!e.which || e.which == 1) {
|
|
||||||
if (!incrementals[snum] || incpos >= incrementals[snum].length) {
|
|
||||||
go(1);
|
|
||||||
} else {
|
|
||||||
subgo(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function findSlide(hash) {
|
|
||||||
var target = document.getElementById(hash);
|
|
||||||
if (target) {
|
|
||||||
for (var i = 0; i < slideIDs.length; i++) {
|
|
||||||
if (target.id == slideIDs[i]) return i;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function slideJump() {
|
|
||||||
if (window.location.hash == null || window.location.hash == '') {
|
|
||||||
currentSlide();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (window.location.hash == null) return;
|
|
||||||
var dest = null;
|
|
||||||
dest = findSlide(window.location.hash.slice(1));
|
|
||||||
if (dest == null) {
|
|
||||||
dest = 0;
|
|
||||||
}
|
|
||||||
go(dest - snum);
|
|
||||||
}
|
|
||||||
|
|
||||||
function fixLinks() {
|
|
||||||
var thisUri = window.location.href;
|
|
||||||
thisUri = thisUri.slice(0, thisUri.length - window.location.hash.length);
|
|
||||||
var aelements = document.getElementsByTagName('A');
|
|
||||||
for (var i = 0; i < aelements.length; i++) {
|
|
||||||
var a = aelements[i].href;
|
|
||||||
var slideID = a.match('\#.+');
|
|
||||||
if ((slideID) && (slideID[0].slice(0,1) == '#')) {
|
|
||||||
var dest = findSlide(slideID[0].slice(1));
|
|
||||||
if (dest != null) {
|
|
||||||
if (aelements[i].addEventListener) {
|
|
||||||
aelements[i].addEventListener("click", new Function("e",
|
|
||||||
"if (document.getElementById('slideProj').disabled) return;" +
|
|
||||||
"go("+dest+" - snum); " +
|
|
||||||
"if (e.preventDefault) e.preventDefault();"), true);
|
|
||||||
} else if (aelements[i].attachEvent) {
|
|
||||||
aelements[i].attachEvent("onclick", new Function("",
|
|
||||||
"if (document.getElementById('slideProj').disabled) return;" +
|
|
||||||
"go("+dest+" - snum); " +
|
|
||||||
"event.returnValue = false;"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function externalLinks() {
|
|
||||||
if (!document.getElementsByTagName) return;
|
|
||||||
var anchors = document.getElementsByTagName('a');
|
|
||||||
for (var i=0; i<anchors.length; i++) {
|
|
||||||
var anchor = anchors[i];
|
|
||||||
if (anchor.getAttribute('href') && hasValue(anchor.rel, 'external')) {
|
|
||||||
anchor.target = '_blank';
|
|
||||||
addClass(anchor,'external');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function createControls() {
|
|
||||||
var controlsDiv = document.getElementById("controls");
|
|
||||||
if (!controlsDiv) return;
|
|
||||||
var hider = ' onmouseover="showHide(\'s\');" onmouseout="showHide(\'h\');"';
|
|
||||||
var hideDiv, hideList = '';
|
|
||||||
if (controlVis == 'hidden') {
|
|
||||||
hideDiv = hider;
|
|
||||||
} else {
|
|
||||||
hideList = hider;
|
|
||||||
}
|
|
||||||
controlsDiv.innerHTML = '<form action="#" id="controlForm"' + hideDiv + '>' +
|
|
||||||
'<div id="navLinks">' +
|
|
||||||
'<a accesskey="t" id="toggle" href="javascript:toggle();">Ø<\/a>' +
|
|
||||||
'<a accesskey="z" id="prev" href="javascript:go(-1);">«<\/a>' +
|
|
||||||
'<a accesskey="x" id="next" href="javascript:go(1);">»<\/a>' +
|
|
||||||
'<div id="navList"' + hideList + '><select id="jumplist" onchange="go(\'j\');"><\/select><\/div>' +
|
|
||||||
'<\/div><\/form>';
|
|
||||||
if (controlVis == 'hidden') {
|
|
||||||
var hidden = document.getElementById('navLinks');
|
|
||||||
} else {
|
|
||||||
var hidden = document.getElementById('jumplist');
|
|
||||||
}
|
|
||||||
addClass(hidden,'hideme');
|
|
||||||
}
|
|
||||||
|
|
||||||
function fontScale() { // causes layout problems in FireFox that get fixed if browser's Reload is used; same may be true of other Gecko-based browsers
|
|
||||||
if (!s5mode) return false;
|
|
||||||
var vScale = 22; // both yield 32 (after rounding) at 1024x768
|
|
||||||
var hScale = 32; // perhaps should auto-calculate based on theme's declared value?
|
|
||||||
if (window.innerHeight) {
|
|
||||||
var vSize = window.innerHeight;
|
|
||||||
var hSize = window.innerWidth;
|
|
||||||
} else if (document.documentElement.clientHeight) {
|
|
||||||
var vSize = document.documentElement.clientHeight;
|
|
||||||
var hSize = document.documentElement.clientWidth;
|
|
||||||
} else if (document.body.clientHeight) {
|
|
||||||
var vSize = document.body.clientHeight;
|
|
||||||
var hSize = document.body.clientWidth;
|
|
||||||
} else {
|
|
||||||
var vSize = 700; // assuming 1024x768, minus chrome and such
|
|
||||||
var hSize = 1024; // these do not account for kiosk mode or Opera Show
|
|
||||||
}
|
|
||||||
var newSize = Math.min(Math.round(vSize/vScale),Math.round(hSize/hScale));
|
|
||||||
fontSize(newSize + 'px');
|
|
||||||
if (isGe) { // hack to counter incremental reflow bugs
|
|
||||||
var obj = document.getElementsByTagName('body')[0];
|
|
||||||
obj.style.display = 'none';
|
|
||||||
obj.style.display = 'block';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function fontSize(value) {
|
|
||||||
if (!(s5ss = document.getElementById('s5ss'))) {
|
|
||||||
if (!isIE) {
|
|
||||||
document.getElementsByTagName('head')[0].appendChild(s5ss = document.createElement('style'));
|
|
||||||
s5ss.setAttribute('media','screen, projection');
|
|
||||||
s5ss.setAttribute('id','s5ss');
|
|
||||||
} else {
|
|
||||||
document.createStyleSheet();
|
|
||||||
document.s5ss = document.styleSheets[document.styleSheets.length - 1];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!isIE) {
|
|
||||||
while (s5ss.lastChild) s5ss.removeChild(s5ss.lastChild);
|
|
||||||
s5ss.appendChild(document.createTextNode('body {font-size: ' + value + ' !important;}'));
|
|
||||||
} else {
|
|
||||||
document.s5ss.addRule('body','font-size: ' + value + ' !important;');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function notOperaFix() {
|
|
||||||
slideCSS = document.getElementById('slideProj').href;
|
|
||||||
var slides = document.getElementById('slideProj');
|
|
||||||
var outline = document.getElementById('outlineStyle');
|
|
||||||
slides.setAttribute('media','screen');
|
|
||||||
outline.disabled = true;
|
|
||||||
if (isGe) {
|
|
||||||
slides.setAttribute('href','null'); // Gecko fix
|
|
||||||
slides.setAttribute('href',slideCSS); // Gecko fix
|
|
||||||
}
|
|
||||||
if (isIE && document.styleSheets && document.styleSheets[0]) {
|
|
||||||
document.styleSheets[0].addRule('img', 'behavior: url(ui/default/iepngfix.htc)');
|
|
||||||
document.styleSheets[0].addRule('div', 'behavior: url(ui/default/iepngfix.htc)');
|
|
||||||
document.styleSheets[0].addRule('.slide', 'behavior: url(ui/default/iepngfix.htc)');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getIncrementals(obj) {
|
|
||||||
var incrementals = new Array();
|
|
||||||
if (!obj)
|
|
||||||
return incrementals;
|
|
||||||
var children = obj.childNodes;
|
|
||||||
for (var i = 0; i < children.length; i++) {
|
|
||||||
var child = children[i];
|
|
||||||
if (hasClass(child, 'incremental')) {
|
|
||||||
if (child.nodeName == 'OL' || child.nodeName == 'UL') {
|
|
||||||
removeClass(child, 'incremental');
|
|
||||||
for (var j = 0; j < child.childNodes.length; j++) {
|
|
||||||
if (child.childNodes[j].nodeType == 1) {
|
|
||||||
addClass(child.childNodes[j], 'incremental');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
incrementals[incrementals.length] = child;
|
|
||||||
removeClass(child,'incremental');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (hasClass(child, 'show-first')) {
|
|
||||||
if (child.nodeName == 'OL' || child.nodeName == 'UL') {
|
|
||||||
removeClass(child, 'show-first');
|
|
||||||
if (child.childNodes[isGe].nodeType == 1) {
|
|
||||||
removeClass(child.childNodes[isGe], 'incremental');
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
incrementals[incrementals.length] = child;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
incrementals = incrementals.concat(getIncrementals(child));
|
|
||||||
}
|
|
||||||
return incrementals;
|
|
||||||
}
|
|
||||||
|
|
||||||
function createIncrementals() {
|
|
||||||
var incrementals = new Array();
|
|
||||||
for (var i = 0; i < smax; i++) {
|
|
||||||
incrementals[i] = getIncrementals(document.getElementById(slideIDs[i]));
|
|
||||||
}
|
|
||||||
return incrementals;
|
|
||||||
}
|
|
||||||
|
|
||||||
function defaultCheck() {
|
|
||||||
var allMetas = document.getElementsByTagName('meta');
|
|
||||||
for (var i = 0; i< allMetas.length; i++) {
|
|
||||||
if (allMetas[i].name == 'defaultView') {
|
|
||||||
defaultView = allMetas[i].content;
|
|
||||||
}
|
|
||||||
if (allMetas[i].name == 'controlVis') {
|
|
||||||
controlVis = allMetas[i].content;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Key trap fix, new function body for trap()
|
|
||||||
function trap(e) {
|
|
||||||
if (!e) {
|
|
||||||
e = event;
|
|
||||||
e.which = e.keyCode;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
modifierKey = e.ctrlKey || e.altKey || e.metaKey;
|
|
||||||
}
|
|
||||||
catch(e) {
|
|
||||||
modifierKey = false;
|
|
||||||
}
|
|
||||||
return modifierKey || e.which == 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
function startup() {
|
|
||||||
defaultCheck();
|
|
||||||
if (!isOp) createControls();
|
|
||||||
slideLabel();
|
|
||||||
fixLinks();
|
|
||||||
externalLinks();
|
|
||||||
fontScale();
|
|
||||||
if (!isOp) {
|
|
||||||
notOperaFix();
|
|
||||||
incrementals = createIncrementals();
|
|
||||||
slideJump();
|
|
||||||
if (defaultView == 'outline') {
|
|
||||||
toggle();
|
|
||||||
}
|
|
||||||
document.onkeyup = keys;
|
|
||||||
document.onkeypress = trap;
|
|
||||||
document.onclick = clicker;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
window.onload = startup;
|
|
||||||
window.onresize = function(){setTimeout('fontScale()', 50);}
|
|
Binary file not shown.
Before Width: | Height: | Size: 2.3 KiB |
Binary file not shown.
Before Width: | Height: | Size: 2.1 KiB |
|
@ -1,74 +0,0 @@
|
||||||
body, h1, h2, h3, h4, td, p, div {
|
|
||||||
/*margin-top: 80px;
|
|
||||||
position: fixed;*/
|
|
||||||
font-family: sans-serif;
|
|
||||||
font-size: 0.9em;
|
|
||||||
}
|
|
||||||
|
|
||||||
#slide0 h1.title {
|
|
||||||
text-align: center;
|
|
||||||
font-family: sans-serif;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#header, div#footer, div#controls {
|
|
||||||
background-color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#header {
|
|
||||||
background-image: url("merlinux-klein.png");
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
margin: 3px;
|
|
||||||
height: 100px;
|
|
||||||
border-bottom: 1px solid black;
|
|
||||||
}
|
|
||||||
|
|
||||||
.slide h1 {
|
|
||||||
background-color: white;
|
|
||||||
margin-left: 180px;
|
|
||||||
padding-left: 0px;
|
|
||||||
color: black;
|
|
||||||
font-size: 1.2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#footer {
|
|
||||||
padding: 3px;
|
|
||||||
height: 4em;
|
|
||||||
border-top: 1px solid black;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#footer h1, div#footer h2, div#footer h3 {
|
|
||||||
font-family: "Times New Roman";
|
|
||||||
font-style: italic;
|
|
||||||
padding: 0px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div#footer h1 {
|
|
||||||
font-size: 2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
#controls {
|
|
||||||
border: 1px solid red;
|
|
||||||
background-color: red;
|
|
||||||
width: 100px;
|
|
||||||
visibility: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
#controls #controlForm {
|
|
||||||
visibility: visible;
|
|
||||||
}
|
|
||||||
|
|
||||||
#navList, #navLinks {
|
|
||||||
background-color: transparent;
|
|
||||||
}
|
|
||||||
|
|
||||||
#navLinks a#toggle, #navLinks a#prev, #navLinks a#next {
|
|
||||||
background-color: transparent;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* some hacks to fix whitespace between list items */
|
|
||||||
|
|
||||||
li, li p {
|
|
||||||
line-height: 1.2em;
|
|
||||||
font-size: 1em;
|
|
||||||
margin: 0px;
|
|
||||||
}
|
|
622
py/doc/test.txt
622
py/doc/test.txt
|
@ -1,622 +0,0 @@
|
||||||
================================
|
|
||||||
The ``py.test`` tool and library
|
|
||||||
================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
|
|
||||||
This document is about the *usage* of the ``py.test`` testing tool. There is
|
|
||||||
also document describing the `implementation and the extending of py.test`_.
|
|
||||||
|
|
||||||
.. _`implementation and the extending of py.test`: impl-test.html
|
|
||||||
|
|
||||||
starting point: ``py.test`` command line tool
|
|
||||||
=============================================
|
|
||||||
|
|
||||||
We presume you have done an installation as per the
|
|
||||||
download_ page after which you should be able to execute the
|
|
||||||
'py.test' tool from a command line shell.
|
|
||||||
|
|
||||||
``py.test`` is the command line tool to run tests. You can supply it
|
|
||||||
with a Python test file (or directory) by passing it as an argument::
|
|
||||||
|
|
||||||
py.test test_sample.py
|
|
||||||
|
|
||||||
``py.test`` looks for any functions and methods in the module that
|
|
||||||
start with with ``test_`` and will then run those methods. Assertions
|
|
||||||
about test outcomes are done via the standard ``assert`` statement.
|
|
||||||
|
|
||||||
This means you can write tests without any boilerplate::
|
|
||||||
|
|
||||||
# content of test_sample.py
|
|
||||||
def test_answer():
|
|
||||||
assert 42 == 43
|
|
||||||
|
|
||||||
You may have test functions and test methods, there is no
|
|
||||||
need to subclass or to put tests into a class.
|
|
||||||
You can also use ``py.test`` to run all tests in a directory structure by
|
|
||||||
invoking it without any arguments::
|
|
||||||
|
|
||||||
py.test
|
|
||||||
|
|
||||||
This will automatically collect and run any Python module whose filenames
|
|
||||||
start with ``test_`` or ends with ``_test`` from the directory and any
|
|
||||||
subdirectories, starting with the current directory, and run them. Each
|
|
||||||
Python test module is inspected for test methods starting with ``test_``.
|
|
||||||
|
|
||||||
.. _download: download.html
|
|
||||||
.. _features:
|
|
||||||
|
|
||||||
Basic Features of ``py.test``
|
|
||||||
=============================
|
|
||||||
|
|
||||||
assert with the ``assert`` statement
|
|
||||||
------------------------------------
|
|
||||||
|
|
||||||
Writing assertions is very simple and this is one of py.test's
|
|
||||||
most noticeable features, as you can use the ``assert``
|
|
||||||
statement with arbitrary expressions. For example you can
|
|
||||||
write the following in your tests::
|
|
||||||
|
|
||||||
assert hasattr(x, 'attribute')
|
|
||||||
|
|
||||||
to state that your object has a certain ``attribute``. In case this
|
|
||||||
assertion fails the test ``reporter`` will provide you with a very
|
|
||||||
helpful analysis and a clean traceback.
|
|
||||||
|
|
||||||
Note that in order to display helpful analysis of a failing
|
|
||||||
``assert`` statement some magic takes place behind the
|
|
||||||
scenes. For now, you only need to know that if something
|
|
||||||
looks strange or you suspect a bug in that
|
|
||||||
*behind-the-scenes-magic* you may turn off the magic by
|
|
||||||
providing the ``--nomagic`` option.
|
|
||||||
|
|
||||||
how to write assertions about exceptions
|
|
||||||
----------------------------------------
|
|
||||||
|
|
||||||
In order to write assertions about exceptions, you use
|
|
||||||
one of two forms::
|
|
||||||
|
|
||||||
py.test.raises(Exception, func, *args, **kwargs)
|
|
||||||
py.test.raises(Exception, "func(*args, **kwargs)")
|
|
||||||
|
|
||||||
both of which execute the given function with args and kwargs and
|
|
||||||
asserts that the given ``Exception`` is raised. The reporter will
|
|
||||||
provide you with helpful output in case of failures such as *no
|
|
||||||
exception* or *wrong exception*.
|
|
||||||
|
|
||||||
|
|
||||||
automatic collection of tests on all levels
|
|
||||||
-------------------------------------------
|
|
||||||
|
|
||||||
The automated test collection process walks the current
|
|
||||||
directory (or the directory given as a command line argument)
|
|
||||||
and all its subdirectories and collects python modules with a
|
|
||||||
leading ``test_`` or trailing ``_test`` filename. From each
|
|
||||||
test module every function with a leading ``test_`` or class with
|
|
||||||
a leading ``Test`` name is collected. The collecting process can
|
|
||||||
be customized at directory, module or class level. (see
|
|
||||||
`collection process`_ for some implementation details).
|
|
||||||
|
|
||||||
.. _`generative tests`:
|
|
||||||
.. _`collection process`: impl-test.html#collection-process
|
|
||||||
|
|
||||||
generative tests: yielding more tests
|
|
||||||
-------------------------------------
|
|
||||||
|
|
||||||
*Generative tests* are test methods that are *generator functions* which
|
|
||||||
``yield`` callables and their arguments. This is most useful for running a
|
|
||||||
test function multiple times against different parameters.
|
|
||||||
Example::
|
|
||||||
|
|
||||||
def test_generative():
|
|
||||||
for x in (42,17,49):
|
|
||||||
yield check, x
|
|
||||||
|
|
||||||
def check(arg):
|
|
||||||
assert arg % 7 == 0 # second generated tests fails!
|
|
||||||
|
|
||||||
Note that ``test_generative()`` will cause three tests
|
|
||||||
to get run, notably ``check(42)``, ``check(17)`` and ``check(49)``
|
|
||||||
of which the middle one will obviously fail.
|
|
||||||
|
|
||||||
.. _`selection by keyword`:
|
|
||||||
|
|
||||||
selecting tests by keyword
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
You can selectively run tests by specifiying a keyword
|
|
||||||
on the command line. Example::
|
|
||||||
|
|
||||||
py.test -k test_simple
|
|
||||||
|
|
||||||
will run all tests that are found from the current directory
|
|
||||||
and where the word "test_simple" equals the start of one part of the
|
|
||||||
path leading up to the test item. Directory and file basenames as well
|
|
||||||
as function, class and function/method names each form a possibly
|
|
||||||
matching name.
|
|
||||||
|
|
||||||
Note that the exact semantics are still experimental but
|
|
||||||
should always remain intuitive.
|
|
||||||
|
|
||||||
testing with multiple python versions / executables
|
|
||||||
---------------------------------------------------
|
|
||||||
|
|
||||||
With ``--exec=EXECUTABLE`` you can specify a python
|
|
||||||
executable (e.g. ``python2.2``) with which the tests
|
|
||||||
will be executed.
|
|
||||||
|
|
||||||
|
|
||||||
testing starts immediately
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
Testing starts as soon as the first ``test item``
|
|
||||||
is collected. The collection process is iterative
|
|
||||||
and does not need to complete before your first
|
|
||||||
test items are executed.
|
|
||||||
|
|
||||||
no interference with cmdline utilities
|
|
||||||
--------------------------------------
|
|
||||||
|
|
||||||
As ``py.test`` mainly operates as a separate cmdline
|
|
||||||
tool you can easily have a command line utility and
|
|
||||||
some tests in the same file.
|
|
||||||
|
|
||||||
debug with the ``print`` statement
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
By default, ``py.test`` catches text written to stdout/stderr during
|
|
||||||
the execution of each individual test. This output will only be
|
|
||||||
displayed however if the test fails; you will not see it
|
|
||||||
otherwise. This allows you to put debugging print statements in your
|
|
||||||
code without being overwhelmed by all the output that might be
|
|
||||||
generated by tests that do not fail.
|
|
||||||
|
|
||||||
Each failing test that produced output during the running of the test
|
|
||||||
will have its output displayed in the ``recorded stdout`` section.
|
|
||||||
|
|
||||||
The catching of stdout/stderr output can be disabled using the
|
|
||||||
``--nocapture`` option to the ``py.test`` tool. Any output will
|
|
||||||
in this case be displayed as soon as it is generated.
|
|
||||||
|
|
||||||
test execution order
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
Tests usually run in the order in which they appear in the files.
|
|
||||||
However, tests should not rely on running one after another, as
|
|
||||||
this prevents more advanced usages: running tests
|
|
||||||
distributedly or selectively, or in "looponfailing" mode,
|
|
||||||
will cause them to run in random order.
|
|
||||||
|
|
||||||
useful tracebacks, recursion detection
|
|
||||||
--------------------------------------
|
|
||||||
|
|
||||||
A lot of care is taken to present nice tracebacks in case of test
|
|
||||||
failure. Try::
|
|
||||||
|
|
||||||
py.test py/documentation/example/pytest/failure_demo.py
|
|
||||||
|
|
||||||
to see a variety of 17 tracebacks, each tailored to a different
|
|
||||||
failure situation.
|
|
||||||
|
|
||||||
``py.test`` uses the same order for presenting tracebacks as Python
|
|
||||||
itself: the outer function is shown first, and the most recent call is
|
|
||||||
shown last. Similarly, a ``py.test`` reported traceback starts with your
|
|
||||||
failing test function and then works its way downwards. If the maximum
|
|
||||||
recursion depth has been exceeded during the running of a test, for
|
|
||||||
instance because of infinite recursion, ``py.test`` will indicate
|
|
||||||
where in the code the recursion was taking place. You can
|
|
||||||
inhibit traceback "cutting" magic by supplying ``--fulltrace``.
|
|
||||||
|
|
||||||
There is also the possibility of usind ``--tb=short`` to get the regular Python
|
|
||||||
tracebacks (which can sometimes be useful when they are extremely long). Or you
|
|
||||||
can use ``--tb=no`` to not show any tracebacks at all.
|
|
||||||
|
|
||||||
no inheritance requirement
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
Test classes are recognized by their leading ``Test`` name. Unlike
|
|
||||||
``unitest.py``, you don't need to inherit from some base class to make
|
|
||||||
them be found by the test runner. Besides being easier, it also allows
|
|
||||||
you to write test classes that subclass from application level
|
|
||||||
classes.
|
|
||||||
|
|
||||||
disabling a test class
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
If you want to disable a complete test class you
|
|
||||||
can set the class-level attribute ``disabled``.
|
|
||||||
For example, in order to avoid running some tests on Win32::
|
|
||||||
|
|
||||||
class TestEgSomePosixStuff:
|
|
||||||
disabled = sys.platform == 'win32'
|
|
||||||
|
|
||||||
def test_xxx(self):
|
|
||||||
...
|
|
||||||
|
|
||||||
testing for deprecated APIs
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
In your tests you can use ``py.test.deprecated_call(func, *args, **kwargs)``
|
|
||||||
to test that a particular function call triggers a DeprecationWarning.
|
|
||||||
This is useful for testing phasing out of old APIs in your projects.
|
|
||||||
|
|
||||||
Managing test state across test modules, classes and methods
|
|
||||||
------------------------------------------------------------
|
|
||||||
|
|
||||||
Often you want to create some files, database connections or other
|
|
||||||
state in order to run tests in a certain environment. With
|
|
||||||
``py.test`` there are three scopes for which you can provide hooks to
|
|
||||||
manage such state. Again, ``py.test`` will detect these hooks in
|
|
||||||
modules on a name basis. The following module-level hooks will
|
|
||||||
automatically be called by the session::
|
|
||||||
|
|
||||||
def setup_module(module):
|
|
||||||
""" setup up any state specific to the execution
|
|
||||||
of the given module.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def teardown_module(module):
|
|
||||||
""" teardown any state that was previously setup
|
|
||||||
with a setup_module method.
|
|
||||||
"""
|
|
||||||
|
|
||||||
The following hooks are available for test classes::
|
|
||||||
|
|
||||||
def setup_class(cls):
|
|
||||||
""" setup up any state specific to the execution
|
|
||||||
of the given class (which usually contains tests).
|
|
||||||
"""
|
|
||||||
|
|
||||||
def teardown_class(cls):
|
|
||||||
""" teardown any state that was previously setup
|
|
||||||
with a call to setup_class.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def setup_method(self, method):
|
|
||||||
""" setup up any state tied to the execution of the given
|
|
||||||
method in a class. setup_method is invoked for every
|
|
||||||
test method of a class.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def teardown_method(self, method):
|
|
||||||
""" teardown any state that was previously setup
|
|
||||||
with a setup_method call.
|
|
||||||
"""
|
|
||||||
|
|
||||||
The last two hooks, ``setup_method`` and ``teardown_method``, are
|
|
||||||
equivalent to ``setUp`` and ``tearDown`` in the Python standard
|
|
||||||
library's ``unitest`` module.
|
|
||||||
|
|
||||||
All setup/teardown methods are optional. You could have a
|
|
||||||
``setup_module`` but no ``teardown_module`` and the other way round.
|
|
||||||
|
|
||||||
Note that while the test session guarantees that for every ``setup`` a
|
|
||||||
corresponding ``teardown`` will be invoked (if it exists) it does
|
|
||||||
*not* guarantee that any ``setup`` is called only happens once. For
|
|
||||||
example, the session might decide to call the ``setup_module`` /
|
|
||||||
``teardown_module`` pair more than once during the execution of a test
|
|
||||||
module.
|
|
||||||
|
|
||||||
Experimental doctest support
|
|
||||||
------------------------------------------------------------
|
|
||||||
|
|
||||||
If you want to integrate doctests, ``py.test`` now by default
|
|
||||||
picks up files matching the ``test_*.txt`` or ``*_test.txt``
|
|
||||||
patterns and processes them as text files containing doctests.
|
|
||||||
This is an experimental feature and likely to change
|
|
||||||
its implementation.
|
|
||||||
|
|
||||||
Working Examples
|
|
||||||
================
|
|
||||||
|
|
||||||
Example for managing state at module, class and method level
|
|
||||||
------------------------------------------------------------
|
|
||||||
|
|
||||||
Here is a working example for what goes on when you setup modules,
|
|
||||||
classes and methods::
|
|
||||||
|
|
||||||
# [[from py/documentation/example/pytest/test_setup_flow_example.py]]
|
|
||||||
|
|
||||||
def setup_module(module):
|
|
||||||
module.TestStateFullThing.classcount = 0
|
|
||||||
|
|
||||||
class TestStateFullThing:
|
|
||||||
def setup_class(cls):
|
|
||||||
cls.classcount += 1
|
|
||||||
|
|
||||||
def teardown_class(cls):
|
|
||||||
cls.classcount -= 1
|
|
||||||
|
|
||||||
def setup_method(self, method):
|
|
||||||
self.id = eval(method.func_name[5:])
|
|
||||||
|
|
||||||
def test_42(self):
|
|
||||||
assert self.classcount == 1
|
|
||||||
assert self.id == 42
|
|
||||||
|
|
||||||
def test_23(self):
|
|
||||||
assert self.classcount == 1
|
|
||||||
assert self.id == 23
|
|
||||||
|
|
||||||
def teardown_module(module):
|
|
||||||
assert module.TestStateFullThing.classcount == 0
|
|
||||||
|
|
||||||
For this example the control flow happens as follows::
|
|
||||||
|
|
||||||
import test_setup_flow_example
|
|
||||||
setup_module(test_setup_flow_example)
|
|
||||||
setup_class(TestStateFullThing)
|
|
||||||
instance = TestStateFullThing()
|
|
||||||
setup_method(instance, instance.test_42)
|
|
||||||
instance.test_42()
|
|
||||||
setup_method(instance, instance.test_23)
|
|
||||||
instance.test_23()
|
|
||||||
teardown_class(TestStateFullThing)
|
|
||||||
teardown_module(test_setup_flow_example)
|
|
||||||
|
|
||||||
|
|
||||||
Note that ``setup_class(TestStateFullThing)`` is called and not
|
|
||||||
``TestStateFullThing.setup_class()`` which would require you
|
|
||||||
to insert ``setup_class = classmethod(setup_class)`` to make
|
|
||||||
your setup function callable. Did we mention that lazyness
|
|
||||||
is a virtue?
|
|
||||||
|
|
||||||
Some ``py.test`` command-line options
|
|
||||||
=====================================
|
|
||||||
|
|
||||||
Regular options
|
|
||||||
---------------
|
|
||||||
|
|
||||||
``-v, --verbose``
|
|
||||||
Increase verbosity. This shows a test per line while running and also
|
|
||||||
shows the traceback after interrupting the test run with Ctrl-C.
|
|
||||||
|
|
||||||
|
|
||||||
``-x, --exitfirst``
|
|
||||||
exit instantly on the first error or the first failed test.
|
|
||||||
|
|
||||||
|
|
||||||
``-s, --nocapture``
|
|
||||||
disable catching of sys.stdout/stderr output.
|
|
||||||
|
|
||||||
|
|
||||||
``-k KEYWORD``
|
|
||||||
only run test items matching the given keyword expression. You can also add
|
|
||||||
use ``-k -KEYWORD`` to exlude tests from being run. The keyword is matched
|
|
||||||
against filename, test class name, method name.
|
|
||||||
|
|
||||||
|
|
||||||
``-l, --showlocals``
|
|
||||||
show locals in tracebacks: for every frame in the traceback, show the values
|
|
||||||
of the local variables.
|
|
||||||
|
|
||||||
|
|
||||||
``--pdb``
|
|
||||||
drop into pdb (the `Python debugger`_) on exceptions. If the debugger is
|
|
||||||
quitted, the next test is run. This implies ``-s``.
|
|
||||||
|
|
||||||
|
|
||||||
``--tb=TBSTYLE``
|
|
||||||
traceback verboseness: ``long`` is the default, ``short`` are the normal
|
|
||||||
Python tracebacks, ``no`` omits tracebacks completely.
|
|
||||||
|
|
||||||
|
|
||||||
``--fulltrace``
|
|
||||||
Don't cut any tracebacks. The default is to leave out frames if an infinite
|
|
||||||
recursion is detected.
|
|
||||||
|
|
||||||
|
|
||||||
``--nomagic``
|
|
||||||
Refrain from using magic as much as possible. This can be useful if you are
|
|
||||||
suspicious that ``py.test`` somehow interferes with your program in
|
|
||||||
unintended ways (if this is the case, please contact us!).
|
|
||||||
|
|
||||||
|
|
||||||
``--collectonly``
|
|
||||||
Only collect tests, don't execute them.
|
|
||||||
|
|
||||||
|
|
||||||
``--traceconfig``
|
|
||||||
trace considerations of conftest.py files. Useful when you have various
|
|
||||||
conftest.py files around and are unsure about their interaction.
|
|
||||||
|
|
||||||
``-f, --looponfailing``
|
|
||||||
Loop on failing test set. This is a feature you can use when you are trying
|
|
||||||
to fix a number of failing tests: First all the tests are being run. If a
|
|
||||||
number of tests are failing, these are run repeatedly afterwards. Every
|
|
||||||
repetition is started once a file below the directory that you started
|
|
||||||
testing for is changed. If one of the previously failing tests now passes,
|
|
||||||
it is removed from the test set.
|
|
||||||
|
|
||||||
``--exec=EXECUTABLE``
|
|
||||||
Python executable to run the tests with. Useful for testing on different
|
|
||||||
versions of Python.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
experimental options
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
**Note**: these options could change in the future.
|
|
||||||
|
|
||||||
|
|
||||||
``-d, --dist``
|
|
||||||
ad-hoc `distribute tests across machines`_ (requires conftest settings)
|
|
||||||
|
|
||||||
|
|
||||||
``-w, --startserver``
|
|
||||||
starts local web server for displaying test progress.
|
|
||||||
|
|
||||||
|
|
||||||
``-r, --runbrowser``
|
|
||||||
Run browser (implies --startserver).
|
|
||||||
|
|
||||||
|
|
||||||
``--boxed``
|
|
||||||
Use boxed tests: run each test in an external process. Very useful for testing
|
|
||||||
things that occasionally segfault (since normally the segfault then would
|
|
||||||
stop the whole test process).
|
|
||||||
|
|
||||||
``--rest``
|
|
||||||
`reStructured Text`_ output reporting.
|
|
||||||
|
|
||||||
|
|
||||||
.. _`reStructured Text`: http://docutils.sourceforge.net
|
|
||||||
.. _`Python debugger`: http://docs.python.org/lib/module-pdb.html
|
|
||||||
|
|
||||||
|
|
||||||
.. _`distribute tests across machines`:
|
|
||||||
|
|
||||||
|
|
||||||
Automated Distributed Testing
|
|
||||||
==================================
|
|
||||||
|
|
||||||
If you have a project with a large number of tests, and you have
|
|
||||||
machines accessible through SSH, ``py.test`` can distribute
|
|
||||||
tests across the machines. It does not require any particular
|
|
||||||
installation on the remote machine sides as it uses `py.execnet`_
|
|
||||||
mechanisms to distribute execution. Using distributed testing
|
|
||||||
can speed up your development process considerably and it
|
|
||||||
may also be useful where you need to use a remote server
|
|
||||||
that has more resources (e.g. RAM/diskspace) than your
|
|
||||||
local machine.
|
|
||||||
|
|
||||||
*WARNING*: support for distributed testing is experimental,
|
|
||||||
its mechanics and configuration options may change without
|
|
||||||
prior notice. Particularly, not all reporting features
|
|
||||||
of the in-process py.test have been integrated into
|
|
||||||
the distributed testing approach.
|
|
||||||
|
|
||||||
Requirements
|
|
||||||
------------
|
|
||||||
|
|
||||||
Local requirements:
|
|
||||||
|
|
||||||
* ssh client
|
|
||||||
* python
|
|
||||||
|
|
||||||
requirements for remote machines:
|
|
||||||
|
|
||||||
* ssh daemon running
|
|
||||||
* ssh keys setup to allow login without a password
|
|
||||||
* python
|
|
||||||
* unix like machine (reliance on ``os.fork``)
|
|
||||||
|
|
||||||
How to use it
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
When you issue ``py.test -d`` then your computer becomes
|
|
||||||
the distributor of tests ("master") and will start collecting
|
|
||||||
and distributing tests to several machines. The machines
|
|
||||||
need to be specified in a ``conftest.py`` file.
|
|
||||||
|
|
||||||
At start up, the master connects to each node using `py.execnet.SshGateway`_
|
|
||||||
and *rsyncs* all specified python packages to all nodes.
|
|
||||||
Then the master collects all of the tests and immediately sends test item
|
|
||||||
descriptions to its connected nodes. Each node has a local queue of tests
|
|
||||||
to run and begins to execute the tests, following the setup and teardown
|
|
||||||
semantics. The test are distributed at function and method level.
|
|
||||||
When a test run on a node is completed it reports back the result
|
|
||||||
to the master.
|
|
||||||
|
|
||||||
The master can run one of three reporters to process the events
|
|
||||||
from the testing nodes: command line, rest output and ajaxy web based.
|
|
||||||
|
|
||||||
.. _`py.execnet`: execnet.html
|
|
||||||
.. _`py.execnet.SshGateway`: execnet.html
|
|
||||||
|
|
||||||
Differences from local tests
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
* Test order is rather random (instead of in file order).
|
|
||||||
* the test process may hang due to network problems
|
|
||||||
* you may not reference files outside of rsynced directory structures
|
|
||||||
|
|
||||||
Configuration
|
|
||||||
-------------
|
|
||||||
|
|
||||||
You must create a conftest.py in any parent directory above your tests.
|
|
||||||
|
|
||||||
The options that you need to specify in that conftest.py file are:
|
|
||||||
|
|
||||||
* `dist_hosts`: a required list of host specifications
|
|
||||||
* `dist_rsync_roots` - a list of relative locations to copy to the remote machines.
|
|
||||||
* `dist_rsync_ignore` - a list of relative locations to ignore for rsyncing
|
|
||||||
* `dist_remotepython` - the remote python executable to run.
|
|
||||||
* `dist_nicelevel` - process priority of remote nodes.
|
|
||||||
* `dist_boxed` - will run each single test in a separate process
|
|
||||||
(allowing to survive segfaults for example)
|
|
||||||
* `dist_taskspernode` - Maximum number of tasks being queued to remote nodes
|
|
||||||
|
|
||||||
Sample configuration::
|
|
||||||
|
|
||||||
dist_hosts = ['localhost', 'user@someserver:/tmp/somedir']
|
|
||||||
dist_rsync_roots = ['../pypy', '../py']
|
|
||||||
dist_remotepython = 'python2.4'
|
|
||||||
dist_nicelevel = 10
|
|
||||||
dist_boxed = False
|
|
||||||
dist_maxwait = 100
|
|
||||||
dist_taskspernode = 10
|
|
||||||
|
|
||||||
To use the browser-based reporter (with a nice AJAX interface) you have to tell
|
|
||||||
``py.test`` to run a small server locally using the ``-w`` or ``--startserver``
|
|
||||||
command line options. Afterwards you can point your browser to localhost:8000
|
|
||||||
to see the progress of the testing.
|
|
||||||
|
|
||||||
Development Notes
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
Changing the behavior of the web based reporter requires `pypy`_ since the
|
|
||||||
javascript is actually generated fom rpython source.
|
|
||||||
|
|
||||||
.. _`pypy`: http://codespeak.net/pypy
|
|
||||||
|
|
||||||
Future/Planned Features of py.test
|
|
||||||
==================================
|
|
||||||
|
|
||||||
integrating various test methods
|
|
||||||
-------------------------------------------
|
|
||||||
|
|
||||||
There are various conftest.py's out there
|
|
||||||
that do html-reports, ad-hoc distribute tests
|
|
||||||
to windows machines or other fun stuff.
|
|
||||||
These approaches should be offerred natively
|
|
||||||
by py.test at some point (requires refactorings).
|
|
||||||
In addition, performing special checks such
|
|
||||||
as w3c-conformance tests or ReST checks
|
|
||||||
should be offered from mainline py.test.
|
|
||||||
|
|
||||||
more distributed testing
|
|
||||||
-----------------------------------------
|
|
||||||
|
|
||||||
We'd like to generalize and extend our ad-hoc
|
|
||||||
distributed testing approach to allow for running
|
|
||||||
on multiple platforms simultanously and selectively.
|
|
||||||
The web reporter should learn to deal with driving
|
|
||||||
complex multi-platform test runs and providing
|
|
||||||
useful introspection and interactive debugging hooks.
|
|
||||||
|
|
||||||
|
|
||||||
move to report event based architecture
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
To facilitate writing of custom reporters
|
|
||||||
py.test is to learn to generate reporting events
|
|
||||||
at all levels which a reporter can choose to
|
|
||||||
interpret and present. The distributed testing
|
|
||||||
approach already uses such an approach and
|
|
||||||
we'd like to unify this with the default
|
|
||||||
in-process py.test mode.
|
|
||||||
|
|
||||||
|
|
||||||
see what other tools do currently (nose, etc.)
|
|
||||||
----------------------------------------------------
|
|
||||||
|
|
||||||
There are various tools out there, among them
|
|
||||||
the nose_ clone. It's about time to look again
|
|
||||||
at these and other tools, integrate interesting
|
|
||||||
features and maybe collaborate on some issues.
|
|
||||||
|
|
||||||
.. _nose: http://somethingaboutorange.com/mrl/projects/nose/
|
|
|
@ -1,145 +0,0 @@
|
||||||
|
|
||||||
import py
|
|
||||||
from py.__.test import repevent
|
|
||||||
|
|
||||||
def setup_module(mod):
|
|
||||||
mod.tmpdir = py.test.ensuretemp('docdoctest')
|
|
||||||
|
|
||||||
def countoutcomes(session):
|
|
||||||
l = []
|
|
||||||
session.main(l.append)
|
|
||||||
passed = failed = skipped = 0
|
|
||||||
for event in l:
|
|
||||||
if isinstance(event, repevent.ReceivedItemOutcome):
|
|
||||||
if event.outcome.passed:
|
|
||||||
passed += 1
|
|
||||||
elif event.outcome.skipped:
|
|
||||||
skipped += 1
|
|
||||||
else:
|
|
||||||
failed += 1
|
|
||||||
elif isinstance(event, repevent.FailedTryiter):
|
|
||||||
failed += 1
|
|
||||||
return failed, passed, skipped
|
|
||||||
|
|
||||||
def test_doctest_extra_exec():
|
|
||||||
# XXX get rid of the next line:
|
|
||||||
py.magic.autopath().dirpath('conftest.py').copy(tmpdir.join('conftest.py'))
|
|
||||||
xtxt = tmpdir.join('y.txt')
|
|
||||||
xtxt.write(py.code.Source("""
|
|
||||||
hello::
|
|
||||||
.. >>> raise ValueError
|
|
||||||
>>> None
|
|
||||||
"""))
|
|
||||||
config = py.test.config._reparse([xtxt])
|
|
||||||
session = config.initsession()
|
|
||||||
failed, passed, skipped = countoutcomes(session)
|
|
||||||
assert failed == 1
|
|
||||||
|
|
||||||
def test_doctest_basic():
|
|
||||||
# XXX get rid of the next line:
|
|
||||||
py.magic.autopath().dirpath('conftest.py').copy(tmpdir.join('conftest.py'))
|
|
||||||
|
|
||||||
xtxt = tmpdir.join('x.txt')
|
|
||||||
xtxt.write(py.code.Source("""
|
|
||||||
..
|
|
||||||
>>> from os.path import abspath
|
|
||||||
|
|
||||||
hello world
|
|
||||||
|
|
||||||
>>> assert abspath
|
|
||||||
>>> i=3
|
|
||||||
>>> print i
|
|
||||||
3
|
|
||||||
|
|
||||||
yes yes
|
|
||||||
|
|
||||||
>>> i
|
|
||||||
3
|
|
||||||
|
|
||||||
end
|
|
||||||
"""))
|
|
||||||
config = py.test.config._reparse([xtxt])
|
|
||||||
session = config.initsession()
|
|
||||||
failed, passed, skipped = countoutcomes(session)
|
|
||||||
assert failed == 0
|
|
||||||
assert passed + skipped == 2
|
|
||||||
|
|
||||||
def test_deindent():
|
|
||||||
from py.__.doc.conftest import deindent
|
|
||||||
assert deindent('foo') == 'foo'
|
|
||||||
assert deindent('foo\n bar') == 'foo\n bar'
|
|
||||||
assert deindent(' foo\n bar\n') == 'foo\nbar\n'
|
|
||||||
assert deindent(' foo\n\n bar\n') == 'foo\n\nbar\n'
|
|
||||||
assert deindent(' foo\n bar\n') == 'foo\n bar\n'
|
|
||||||
assert deindent(' foo\n bar\n') == ' foo\nbar\n'
|
|
||||||
|
|
||||||
def test_doctest_eol():
|
|
||||||
# XXX get rid of the next line:
|
|
||||||
py.magic.autopath().dirpath('conftest.py').copy(tmpdir.join('conftest.py'))
|
|
||||||
|
|
||||||
ytxt = tmpdir.join('y.txt')
|
|
||||||
ytxt.write(py.code.Source(".. >>> 1 + 1\r\n 2\r\n\r\n"))
|
|
||||||
config = py.test.config._reparse([ytxt])
|
|
||||||
session = config.initsession()
|
|
||||||
failed, passed, skipped = countoutcomes(session)
|
|
||||||
assert failed == 0
|
|
||||||
assert passed + skipped == 2
|
|
||||||
|
|
||||||
def test_doctest_indentation():
|
|
||||||
# XXX get rid of the next line:
|
|
||||||
py.magic.autopath().dirpath('conftest.py').copy(tmpdir.join('conftest.py'))
|
|
||||||
|
|
||||||
txt = tmpdir.join('foo.txt')
|
|
||||||
txt.write('..\n >>> print "foo\\n bar"\n foo\n bar\n')
|
|
||||||
config = py.test.config._reparse([txt])
|
|
||||||
session = config.initsession()
|
|
||||||
failed, passed, skipped = countoutcomes(session)
|
|
||||||
assert failed == 0
|
|
||||||
assert skipped + passed == 2
|
|
||||||
|
|
||||||
def test_js_ignore():
|
|
||||||
py.magic.autopath().dirpath('conftest.py').copy(tmpdir.join('conftest.py'))
|
|
||||||
tmpdir.ensure('__init__.py')
|
|
||||||
xtxt = tmpdir.join('x.txt')
|
|
||||||
xtxt.write(py.code.Source("""
|
|
||||||
`blah`_
|
|
||||||
|
|
||||||
.. _`blah`: javascript:some_function()
|
|
||||||
"""))
|
|
||||||
config = py.test.config._reparse([xtxt])
|
|
||||||
session = config.initsession()
|
|
||||||
|
|
||||||
failed, passed, skipped = countoutcomes(session)
|
|
||||||
assert failed == 0
|
|
||||||
assert skipped + passed == 3
|
|
||||||
|
|
||||||
def test_resolve_linkrole():
|
|
||||||
from py.__.doc.conftest import get_apigen_relpath
|
|
||||||
apigen_relpath = get_apigen_relpath()
|
|
||||||
from py.__.doc.conftest import resolve_linkrole
|
|
||||||
assert resolve_linkrole('api', 'py.foo.bar', False) == (
|
|
||||||
'py.foo.bar', apigen_relpath + 'api/foo.bar.html')
|
|
||||||
assert resolve_linkrole('api', 'py.foo.bar()', False) == (
|
|
||||||
'py.foo.bar()', apigen_relpath + 'api/foo.bar.html')
|
|
||||||
assert resolve_linkrole('api', 'py', False) == (
|
|
||||||
'py', apigen_relpath + 'api/index.html')
|
|
||||||
py.test.raises(AssertionError, 'resolve_linkrole("api", "foo.bar")')
|
|
||||||
assert resolve_linkrole('source', 'py/foo/bar.py', False) == (
|
|
||||||
'py/foo/bar.py', apigen_relpath + 'source/foo/bar.py.html')
|
|
||||||
assert resolve_linkrole('source', 'py/foo/', False) == (
|
|
||||||
'py/foo/', apigen_relpath + 'source/foo/index.html')
|
|
||||||
assert resolve_linkrole('source', 'py/', False) == (
|
|
||||||
'py/', apigen_relpath + 'source/index.html')
|
|
||||||
py.test.raises(AssertionError, 'resolve_linkrole("source", "/foo/bar/")')
|
|
||||||
|
|
||||||
def test_resolve_linkrole_check_api():
|
|
||||||
from py.__.doc.conftest import resolve_linkrole
|
|
||||||
assert resolve_linkrole('api', 'py.test.ensuretemp')
|
|
||||||
py.test.raises(AssertionError, "resolve_linkrole('api', 'py.foo.baz')")
|
|
||||||
|
|
||||||
def test_resolve_linkrole_check_source():
|
|
||||||
from py.__.doc.conftest import resolve_linkrole
|
|
||||||
assert resolve_linkrole('source', 'py/path/common.py')
|
|
||||||
py.test.raises(AssertionError,
|
|
||||||
"resolve_linkrole('source', 'py/foo/bar.py')")
|
|
||||||
|
|
|
@ -1,191 +0,0 @@
|
||||||
==============================================
|
|
||||||
Why, who, what and how do you do *the py lib*?
|
|
||||||
==============================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
|
|
||||||
Why did we start the py lib?
|
|
||||||
============================
|
|
||||||
|
|
||||||
Among the main motivation for the py lib and its flagship
|
|
||||||
py.test tool were:
|
|
||||||
|
|
||||||
- to test applications with a testing tool that provides
|
|
||||||
advanced features out of the box, yet allows full customization
|
|
||||||
per-project.
|
|
||||||
|
|
||||||
- distribute applications in an ad-hoc way both for testing
|
|
||||||
and for application integration purposes.
|
|
||||||
|
|
||||||
- help with neutralizing platform and python version differences
|
|
||||||
|
|
||||||
- offer a uniform way to access local and remote file resources
|
|
||||||
|
|
||||||
- offer some unique features like micro-threads (greenlets)
|
|
||||||
|
|
||||||
|
|
||||||
What is the py libs current focus?
|
|
||||||
==================================
|
|
||||||
|
|
||||||
testing testing testing
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
Currently, the main focus of the py lib is to get a decent
|
|
||||||
`test environment`_, indeed to produce the best one out there.
|
|
||||||
Writing, distributing and deploying tests should become
|
|
||||||
a snap ... and fun!
|
|
||||||
|
|
||||||
On a side note: automated tests fit very well to the dynamism
|
|
||||||
of Python. Automated tests ease development and allow fast
|
|
||||||
refactoring cycles. Automated tests are a means of
|
|
||||||
communication as well.
|
|
||||||
|
|
||||||
|
|
||||||
ad-hoc distribution of programs
|
|
||||||
------------------------------------
|
|
||||||
|
|
||||||
The py lib through its `py.execnet`_ namespaces offers
|
|
||||||
support for ad-hoc distributing programs across
|
|
||||||
a network and subprocesses. We'd like to generalize
|
|
||||||
this approach further to instantiate and let whole
|
|
||||||
ad-hoc networks communicate with each other while
|
|
||||||
keeping to a simple programming model.
|
|
||||||
|
|
||||||
.. _`py.execnet`: execnet.html
|
|
||||||
|
|
||||||
|
|
||||||
allowing maximum refactoring in the future ...
|
|
||||||
----------------------------------------------
|
|
||||||
|
|
||||||
explicit name export control
|
|
||||||
............................
|
|
||||||
|
|
||||||
In order to allow a fast development pace across versions of
|
|
||||||
the py lib there is **explicit name export control**. You
|
|
||||||
should only see names which make sense to use from the outside
|
|
||||||
and which the py lib developers want to guarantee across versions.
|
|
||||||
However, you don't need to treat the ``py`` lib as
|
|
||||||
anything special. You can simply use the usual ``import``
|
|
||||||
statement and will not notice much of a difference - except that
|
|
||||||
the namespaces you'll see from the ``py`` lib are relatively
|
|
||||||
clean and have no clutter.
|
|
||||||
|
|
||||||
Release policy & API maintenance
|
|
||||||
........................................
|
|
||||||
|
|
||||||
We'll talk about major, minor and micro numbers as the three
|
|
||||||
numbers in "1.2.3" respectively. These are the
|
|
||||||
the rough release policies:
|
|
||||||
|
|
||||||
- Micro-releases are bug fix releases and should not introduce
|
|
||||||
new names to the public API. They may add tests and thus
|
|
||||||
further define the behaviour of the py lib. They may
|
|
||||||
completly change the implementation but the public API
|
|
||||||
tests should continue to run (unless they needed to
|
|
||||||
get fixed themselves).
|
|
||||||
|
|
||||||
- No **tested feature** of the exported py API shall vanish
|
|
||||||
across minor releases until it is marked deprecated.
|
|
||||||
|
|
||||||
For example, pure API tests of a future version 1.0 are to
|
|
||||||
continue to fully run on 1.1 and so on. If an API gets
|
|
||||||
deprecated with a minor release it goes with the next minor
|
|
||||||
release. Thus if you don't use deprecated APIs you should
|
|
||||||
be able to use the next two minor releases. However, if
|
|
||||||
you relied on some untested implementation behaviour,
|
|
||||||
you may still get screwed. Solution: add API tests to the
|
|
||||||
py lib :-) It's really the tests that make the difference.
|
|
||||||
|
|
||||||
- Pure API tests are not allowed to access any implementation
|
|
||||||
level details. For example, accessing names starting with
|
|
||||||
a single leading '_' is generally seen as an implementation
|
|
||||||
level detail.
|
|
||||||
|
|
||||||
- major releases *should*, but are not required to, pass
|
|
||||||
all API tests of the previous latest major released
|
|
||||||
version.
|
|
||||||
|
|
||||||
|
|
||||||
the need to find the right *paths* ...
|
|
||||||
--------------------------------------
|
|
||||||
|
|
||||||
Another focus are well tested so called *path* implementations
|
|
||||||
that allow you to seemlessly work with different backends,
|
|
||||||
currently a local filesystem, subversion working copies and
|
|
||||||
subversion remote URLs. The `jorendorff path.py`_ implementation
|
|
||||||
goes somewhat in the same direction but doesn't try to
|
|
||||||
systematically access local and remote file systems as well as
|
|
||||||
other hierarchic namespaces. The latter is the focus of the
|
|
||||||
``py.path`` API.
|
|
||||||
|
|
||||||
.. _`jorendorff path.py`: http://www.jorendorff.com/articles/python/path/
|
|
||||||
|
|
||||||
How does py development work?
|
|
||||||
=============================
|
|
||||||
|
|
||||||
Communication and coding style
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
We are discussing things on our `py-dev mailing list`_
|
|
||||||
and collaborate via the codespeak subversion repository.
|
|
||||||
|
|
||||||
We follow a `coding style`_ which strongly builds on `PEP 8`_,
|
|
||||||
the basic python coding style document.
|
|
||||||
|
|
||||||
It's easy to get commit rights especially if you are an
|
|
||||||
experienced python developer and share some of the
|
|
||||||
frustrations described above.
|
|
||||||
|
|
||||||
Licensing
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
The Py lib is released under the MIT license and all
|
|
||||||
contributors need to release their contributions
|
|
||||||
under this license as well.
|
|
||||||
|
|
||||||
connections with PyPy_
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
A major motivation for writing the py lib stems from needs
|
|
||||||
during PyPy_ development, most importantly testing and
|
|
||||||
file system access issues. PyPy puts a lot of pressure
|
|
||||||
on a testing environment and thus is a good **reality test**.
|
|
||||||
|
|
||||||
Who is "we"?
|
|
||||||
=============================
|
|
||||||
|
|
||||||
Some initial code was written from *Jens-Uwe Mager* and *Holger
|
|
||||||
Krekel*, after which Holger continued on a previous
|
|
||||||
incarnations of the py.test tool (known first as 'utest', then
|
|
||||||
as 'std.utest', now for some 2 years 'py.test').
|
|
||||||
|
|
||||||
Helpful discussions took place with *Martijn Faassen*, *Stephan
|
|
||||||
Schwarzer*, *Brian Dorsey*, *Grigh Gheorghiu* and then
|
|
||||||
*Armin Rigo* who contributed important parts.
|
|
||||||
He and Holger came up with a couple of iterations of the
|
|
||||||
testing-code that reduced the API to basically nothing: just the
|
|
||||||
plain assert statement and a ``py.test.raises`` method to
|
|
||||||
check for occuring exceptions within tests.
|
|
||||||
|
|
||||||
Currently (as of 2007), there are more people involved
|
|
||||||
and also have worked funded through merlinux_ and the
|
|
||||||
PyPy EU project, Carl Friedrich Bolz, Guido Wesdorp
|
|
||||||
and Maciej Fijalkowski who contributed particularly
|
|
||||||
in 2006 and 2007 major parts of the py lib.
|
|
||||||
|
|
||||||
.. _`talk at EP2004`: http://codespeak.net/svn/user/hpk/talks/std-talk.txt
|
|
||||||
.. _`coding style`: coding-style.html
|
|
||||||
.. _`PEP 8`: http://www.python.org/peps/pep-0008.html
|
|
||||||
.. _`py-dev mailing list`: http://codespeak.net/mailman/listinfo/py-dev
|
|
||||||
.. _`test environment`: test.html
|
|
||||||
.. _`PyPy`: http://codespeak.net/pypy
|
|
||||||
.. _future: future.html
|
|
||||||
.. _`py.test tool and library`: test.html
|
|
||||||
.. _merlinux: http://merlinux.de
|
|
||||||
|
|
||||||
--
|
|
||||||
|
|
||||||
.. [#] FOSS is an evolving acronym for Free and Open Source Software
|
|
||||||
|
|
172
py/doc/xml.txt
172
py/doc/xml.txt
|
@ -1,172 +0,0 @@
|
||||||
====================================================
|
|
||||||
py.xml: Lightweight and flexible xml/html generation
|
|
||||||
====================================================
|
|
||||||
|
|
||||||
.. contents::
|
|
||||||
.. sectnum::
|
|
||||||
|
|
||||||
Motivation
|
|
||||||
==========
|
|
||||||
|
|
||||||
There are a plethora of frameworks and libraries to generate
|
|
||||||
xml and html trees. However, many of them are large, have a
|
|
||||||
steep learning curve and are often hard to debug. Not to
|
|
||||||
speak of the fact that they are frameworks to begin with.
|
|
||||||
|
|
||||||
The py lib strives to offer enough functionality to represent
|
|
||||||
itself and especially its API in html or xml.
|
|
||||||
|
|
||||||
.. _xist: http://www.livinglogic.de/Python/xist/index.html
|
|
||||||
.. _`exchange data`: execnet.html#exchange-data
|
|
||||||
|
|
||||||
a pythonic object model , please
|
|
||||||
================================
|
|
||||||
|
|
||||||
The py lib offers a pythonic way to generate xml/html, based on
|
|
||||||
ideas from xist_ which `uses python class objects`_ to build
|
|
||||||
xml trees. However, xist_'s implementation is somewhat heavy
|
|
||||||
because it has additional goals like transformations and
|
|
||||||
supporting many namespaces. But its basic idea is very easy.
|
|
||||||
|
|
||||||
.. _`uses python class objects`: http://www.livinglogic.de/Python/xist/Howto.html
|
|
||||||
|
|
||||||
generating arbitrary xml structures
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
With ``py.xml.Namespace`` you have the basis
|
|
||||||
to generate custom xml-fragments on the fly::
|
|
||||||
|
|
||||||
class ns(py.xml.Namespace):
|
|
||||||
"my custom xml namespace"
|
|
||||||
doc = ns.books(
|
|
||||||
ns.book(
|
|
||||||
ns.author("May Day"),
|
|
||||||
ns.title("python for java programmers"),),
|
|
||||||
ns.book(
|
|
||||||
ns.author("why"),
|
|
||||||
ns.title("Java for Python programmers"),),
|
|
||||||
publisher="N.N",
|
|
||||||
)
|
|
||||||
print doc.unicode(indent=2).encode('utf8')
|
|
||||||
|
|
||||||
will give you this representation::
|
|
||||||
|
|
||||||
<books publisher="N.N">
|
|
||||||
<book>
|
|
||||||
<author>May Day</author>
|
|
||||||
<title>python for java programmers</title></book>
|
|
||||||
<book>
|
|
||||||
<author>why</author>
|
|
||||||
<title>Java for Python programmers</title></book></books>
|
|
||||||
|
|
||||||
In a sentence: positional arguments are child-tags and
|
|
||||||
keyword-arguments are attributes.
|
|
||||||
|
|
||||||
On a side note, you'll see that the unicode-serializer
|
|
||||||
supports a nice indentation style which keeps your generated
|
|
||||||
html readable, basically through emulating python's white
|
|
||||||
space significance by putting closing-tags rightmost and
|
|
||||||
almost invisible at first glance :-)
|
|
||||||
|
|
||||||
basic example for generating html
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
Consider this example::
|
|
||||||
|
|
||||||
from py.xml import html # html namespace
|
|
||||||
|
|
||||||
paras = "First Para", "Second para"
|
|
||||||
|
|
||||||
doc = html.html(
|
|
||||||
html.head(
|
|
||||||
html.meta(name="Content-Type", value="text/html; charset=latin1")),
|
|
||||||
html.body(
|
|
||||||
[html.p(p) for p in paras]))
|
|
||||||
|
|
||||||
print unicode(doc).encode('latin1')
|
|
||||||
|
|
||||||
Again, tags are objects which contain tags and have attributes.
|
|
||||||
More exactly, Tags inherit from the list type and thus can be
|
|
||||||
manipulated as list objects. They additionally support a default
|
|
||||||
way to represent themselves as a serialized unicode object.
|
|
||||||
|
|
||||||
If you happen to look at the py.xml implementation you'll
|
|
||||||
note that the tag/namespace implementation consumes some 50 lines
|
|
||||||
with another 50 lines for the unicode serialization code.
|
|
||||||
|
|
||||||
CSS-styling your html Tags
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
One aspect where many of the huge python xml/html generation
|
|
||||||
frameworks utterly fail is a clean and convenient integration
|
|
||||||
of CSS styling. Often, developers are left alone with keeping
|
|
||||||
CSS style definitions in sync with some style files
|
|
||||||
represented as strings (often in a separate .css file). Not
|
|
||||||
only is this hard to debug but the missing abstractions make
|
|
||||||
it hard to modify the styling of your tags or to choose custom
|
|
||||||
style representations (inline, html.head or external). Add the
|
|
||||||
Browers usual tolerance of messyness and errors in Style
|
|
||||||
references and welcome to hell, known as the domain of
|
|
||||||
developing web applications :-)
|
|
||||||
|
|
||||||
By contrast, consider this CSS styling example::
|
|
||||||
|
|
||||||
class my(html):
|
|
||||||
"my initial custom style"
|
|
||||||
class body(html.body):
|
|
||||||
style = html.Style(font_size = "120%")
|
|
||||||
|
|
||||||
class h2(html.h2):
|
|
||||||
style = html.Style(background = "grey")
|
|
||||||
|
|
||||||
class p(html.p):
|
|
||||||
style = html.Style(font_weight="bold")
|
|
||||||
|
|
||||||
doc = my.html(
|
|
||||||
my.head(),
|
|
||||||
my.body(
|
|
||||||
my.h2("hello world"),
|
|
||||||
my.p("bold as bold can")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
print doc.unicode(indent=2)
|
|
||||||
|
|
||||||
This will give you a small'n mean self contained
|
|
||||||
represenation by default::
|
|
||||||
|
|
||||||
<html>
|
|
||||||
<head/>
|
|
||||||
<body style="font-size: 120%">
|
|
||||||
<h2 style="background: grey">hello world</h2>
|
|
||||||
<p style="font-weight: bold">bold as bold can</p></body></html>
|
|
||||||
|
|
||||||
Most importantly, note that the inline-styling is just an
|
|
||||||
implementation detail of the unicode serialization code.
|
|
||||||
You can easily modify the serialization to put your styling into the
|
|
||||||
``html.head`` or in a separate file and autogenerate CSS-class
|
|
||||||
names or ids.
|
|
||||||
|
|
||||||
Hey, you could even write tests that you are using correct
|
|
||||||
styles suitable for specific browser requirements. Did i mention
|
|
||||||
that the ability to easily write tests for your generated
|
|
||||||
html and its serialization could help to develop _stable_ user
|
|
||||||
interfaces?
|
|
||||||
|
|
||||||
More to come ...
|
|
||||||
----------------
|
|
||||||
|
|
||||||
For now, i don't think we should strive to offer much more
|
|
||||||
than the above. However, it is probably not hard to offer
|
|
||||||
*partial serialization* to allow generating maybe hundreds of
|
|
||||||
complex html documents per second. Basically we would allow
|
|
||||||
putting callables both as Tag content and as values of
|
|
||||||
attributes. A slightly more advanced Serialization would then
|
|
||||||
produce a list of unicode objects intermingled with callables.
|
|
||||||
At HTTP-Request time the callables would get called to
|
|
||||||
complete the probably request-specific serialization of
|
|
||||||
your Tags. Hum, it's probably harder to explain this than to
|
|
||||||
actually code it :-)
|
|
||||||
|
|
||||||
.. _Nevow: http://www.divmod.org/projects/nevow
|
|
||||||
.. _`py.test`: test.html
|
|
|
@ -1 +0,0 @@
|
||||||
""" input/output helping """
|
|
|
@ -1,22 +0,0 @@
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
def dupfile(f, mode=None, buffering=0, raising=False):
|
|
||||||
""" return a new open file object that's a duplicate of f
|
|
||||||
|
|
||||||
mode is duplicated if not given, 'buffering' controls
|
|
||||||
buffer size (defaulting to no buffering) and 'raising'
|
|
||||||
defines whether an exception is raised when an incompatible
|
|
||||||
file object is passed in (if raising is False, the file
|
|
||||||
object itself will be returned)
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
fd = f.fileno()
|
|
||||||
except AttributeError:
|
|
||||||
if raising:
|
|
||||||
raise
|
|
||||||
return f
|
|
||||||
newfd = os.dup(fd)
|
|
||||||
mode = mode and mode or f.mode
|
|
||||||
return os.fdopen(newfd, mode, buffering)
|
|
||||||
|
|
|
@ -1,59 +0,0 @@
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import py
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
class FDCapture:
|
|
||||||
""" Capture IO to/from a given os-level filedescriptor. """
|
|
||||||
|
|
||||||
def __init__(self, targetfd, tmpfile=None):
|
|
||||||
self.targetfd = targetfd
|
|
||||||
if tmpfile is None:
|
|
||||||
tmpfile = self.maketmpfile()
|
|
||||||
self.tmpfile = tmpfile
|
|
||||||
self._savefd = os.dup(targetfd)
|
|
||||||
os.dup2(self.tmpfile.fileno(), targetfd)
|
|
||||||
self._patched = []
|
|
||||||
|
|
||||||
def setasfile(self, name, module=sys):
|
|
||||||
""" patch <module>.<name> to self.tmpfile
|
|
||||||
"""
|
|
||||||
key = (module, name)
|
|
||||||
self._patched.append((key, getattr(module, name)))
|
|
||||||
setattr(module, name, self.tmpfile)
|
|
||||||
|
|
||||||
def unsetfiles(self):
|
|
||||||
""" unpatch all patched items
|
|
||||||
"""
|
|
||||||
while self._patched:
|
|
||||||
(module, name), value = self._patched.pop()
|
|
||||||
setattr(module, name, value)
|
|
||||||
|
|
||||||
def done(self):
|
|
||||||
""" unpatch and clean up, returns the self.tmpfile (file object)
|
|
||||||
"""
|
|
||||||
os.dup2(self._savefd, self.targetfd)
|
|
||||||
self.unsetfiles()
|
|
||||||
os.close(self._savefd)
|
|
||||||
self.tmpfile.seek(0)
|
|
||||||
return self.tmpfile
|
|
||||||
|
|
||||||
def maketmpfile(self):
|
|
||||||
""" create a temporary file
|
|
||||||
"""
|
|
||||||
f = tempfile.TemporaryFile()
|
|
||||||
newf = py.io.dupfile(f)
|
|
||||||
f.close()
|
|
||||||
return newf
|
|
||||||
|
|
||||||
def writeorg(self, str):
|
|
||||||
""" write a string to the original file descriptor
|
|
||||||
"""
|
|
||||||
tempfp = tempfile.TemporaryFile()
|
|
||||||
try:
|
|
||||||
os.dup2(self._savefd, tempfp.fileno())
|
|
||||||
tempfp.write(str)
|
|
||||||
finally:
|
|
||||||
tempfp.close()
|
|
||||||
|
|
|
@ -1,148 +0,0 @@
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import py
|
|
||||||
try: from cStringIO import StringIO
|
|
||||||
except ImportError: from StringIO import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
class Capture(object):
|
|
||||||
def call(cls, func, *args, **kwargs):
|
|
||||||
""" return a (res, out, err) tuple where
|
|
||||||
out and err represent the output/error output
|
|
||||||
during function execution.
|
|
||||||
call the given function with args/kwargs
|
|
||||||
and capture output/error during its execution.
|
|
||||||
"""
|
|
||||||
so = cls()
|
|
||||||
try:
|
|
||||||
res = func(*args, **kwargs)
|
|
||||||
finally:
|
|
||||||
out, err = so.reset()
|
|
||||||
return res, out, err
|
|
||||||
call = classmethod(call)
|
|
||||||
|
|
||||||
def reset(self):
|
|
||||||
""" reset sys.stdout and sys.stderr
|
|
||||||
|
|
||||||
returns a tuple of file objects (out, err) for the captured
|
|
||||||
data
|
|
||||||
"""
|
|
||||||
outfile, errfile = self.done()
|
|
||||||
return outfile.read(), errfile.read()
|
|
||||||
|
|
||||||
|
|
||||||
class StdCaptureFD(Capture):
|
|
||||||
""" This class allows to capture writes to FD1 and FD2
|
|
||||||
and may connect a NULL file to FD0 (and prevent
|
|
||||||
reads from sys.stdin)
|
|
||||||
"""
|
|
||||||
def __init__(self, out=True, err=True, mixed=False, in_=True, patchsys=True):
|
|
||||||
if in_:
|
|
||||||
self._oldin = (sys.stdin, os.dup(0))
|
|
||||||
sys.stdin = DontReadFromInput()
|
|
||||||
fd = os.open(devnullpath, os.O_RDONLY)
|
|
||||||
os.dup2(fd, 0)
|
|
||||||
os.close(fd)
|
|
||||||
if out:
|
|
||||||
self.out = py.io.FDCapture(1)
|
|
||||||
if patchsys:
|
|
||||||
self.out.setasfile('stdout')
|
|
||||||
if err:
|
|
||||||
if mixed and out:
|
|
||||||
tmpfile = self.out.tmpfile
|
|
||||||
else:
|
|
||||||
tmpfile = None
|
|
||||||
self.err = py.io.FDCapture(2, tmpfile=tmpfile)
|
|
||||||
if patchsys:
|
|
||||||
self.err.setasfile('stderr')
|
|
||||||
|
|
||||||
def done(self):
|
|
||||||
""" return (outfile, errfile) and stop capturing. """
|
|
||||||
outfile = errfile = emptyfile
|
|
||||||
if hasattr(self, 'out'):
|
|
||||||
outfile = self.out.done()
|
|
||||||
if hasattr(self, 'err'):
|
|
||||||
errfile = self.err.done()
|
|
||||||
if hasattr(self, '_oldin'):
|
|
||||||
oldsys, oldfd = self._oldin
|
|
||||||
os.dup2(oldfd, 0)
|
|
||||||
os.close(oldfd)
|
|
||||||
sys.stdin = oldsys
|
|
||||||
return outfile, errfile
|
|
||||||
|
|
||||||
class StdCapture(Capture):
|
|
||||||
""" This class allows to capture writes to sys.stdout|stderr "in-memory"
|
|
||||||
and will raise errors on tries to read from sys.stdin. It only
|
|
||||||
modifies sys.stdout|stderr|stdin attributes and does not
|
|
||||||
touch underlying File Descriptors (use StdCaptureFD for that).
|
|
||||||
"""
|
|
||||||
def __init__(self, out=True, err=True, in_=True, mixed=False):
|
|
||||||
self._out = out
|
|
||||||
self._err = err
|
|
||||||
self._in = in_
|
|
||||||
if out:
|
|
||||||
self.oldout = sys.stdout
|
|
||||||
sys.stdout = self.newout = StringIO()
|
|
||||||
if err:
|
|
||||||
self.olderr = sys.stderr
|
|
||||||
if out and mixed:
|
|
||||||
newerr = self.newout
|
|
||||||
else:
|
|
||||||
newerr = StringIO()
|
|
||||||
sys.stderr = self.newerr = newerr
|
|
||||||
if in_:
|
|
||||||
self.oldin = sys.stdin
|
|
||||||
sys.stdin = self.newin = DontReadFromInput()
|
|
||||||
|
|
||||||
def reset(self):
|
|
||||||
""" return captured output as strings and restore sys.stdout/err."""
|
|
||||||
x, y = self.done()
|
|
||||||
return x.read(), y.read()
|
|
||||||
|
|
||||||
def done(self):
|
|
||||||
""" return (outfile, errfile) and stop capturing. """
|
|
||||||
o,e = sys.stdout, sys.stderr
|
|
||||||
outfile = errfile = emptyfile
|
|
||||||
if self._out:
|
|
||||||
try:
|
|
||||||
sys.stdout = self.oldout
|
|
||||||
except AttributeError:
|
|
||||||
raise IOError("stdout capturing already reset")
|
|
||||||
del self.oldout
|
|
||||||
outfile = self.newout
|
|
||||||
outfile.seek(0)
|
|
||||||
if self._err:
|
|
||||||
try:
|
|
||||||
sys.stderr = self.olderr
|
|
||||||
except AttributeError:
|
|
||||||
raise IOError("stderr capturing already reset")
|
|
||||||
del self.olderr
|
|
||||||
errfile = self.newerr
|
|
||||||
errfile.seek(0)
|
|
||||||
if self._in:
|
|
||||||
sys.stdin = self.oldin
|
|
||||||
return outfile, errfile
|
|
||||||
|
|
||||||
class DontReadFromInput:
|
|
||||||
"""Temporary stub class. Ideally when stdin is accessed, the
|
|
||||||
capturing should be turned off, with possibly all data captured
|
|
||||||
so far sent to the screen. This should be configurable, though,
|
|
||||||
because in automated test runs it is better to crash than
|
|
||||||
hang indefinitely.
|
|
||||||
"""
|
|
||||||
def read(self, *args):
|
|
||||||
raise IOError("reading from stdin while output is captured")
|
|
||||||
readline = read
|
|
||||||
readlines = read
|
|
||||||
__iter__ = read
|
|
||||||
|
|
||||||
try:
|
|
||||||
devnullpath = os.devnull
|
|
||||||
except AttributeError:
|
|
||||||
if os.name == 'nt':
|
|
||||||
devnullpath = 'NUL'
|
|
||||||
else:
|
|
||||||
devnullpath = '/dev/null'
|
|
||||||
|
|
||||||
emptyfile = StringIO()
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
#
|
|
|
@ -1,20 +0,0 @@
|
||||||
|
|
||||||
import py
|
|
||||||
|
|
||||||
def test_dupfile():
|
|
||||||
somefile = py.std.os.tmpfile()
|
|
||||||
flist = []
|
|
||||||
for i in range(5):
|
|
||||||
nf = py.io.dupfile(somefile)
|
|
||||||
assert nf != somefile
|
|
||||||
assert nf.fileno() != somefile.fileno()
|
|
||||||
assert nf not in flist
|
|
||||||
print >>nf, i,
|
|
||||||
flist.append(nf)
|
|
||||||
for i in range(5):
|
|
||||||
f = flist[i]
|
|
||||||
f.close()
|
|
||||||
somefile.seek(0)
|
|
||||||
s = somefile.read()
|
|
||||||
assert s.startswith("01234")
|
|
||||||
somefile.close()
|
|
|
@ -1,59 +0,0 @@
|
||||||
import os, sys
|
|
||||||
import py
|
|
||||||
|
|
||||||
class TestFDCapture:
|
|
||||||
def test_basic(self):
|
|
||||||
tmpfile = py.std.os.tmpfile()
|
|
||||||
fd = tmpfile.fileno()
|
|
||||||
cap = py.io.FDCapture(fd)
|
|
||||||
os.write(fd, "hello")
|
|
||||||
f = cap.done()
|
|
||||||
s = f.read()
|
|
||||||
assert s == "hello"
|
|
||||||
|
|
||||||
def test_stderr(self):
|
|
||||||
cap = py.io.FDCapture(2)
|
|
||||||
cap.setasfile('stderr')
|
|
||||||
print >>sys.stderr, "hello"
|
|
||||||
f = cap.done()
|
|
||||||
s = f.read()
|
|
||||||
assert s == "hello\n"
|
|
||||||
|
|
||||||
def test_stdin(self):
|
|
||||||
f = os.tmpfile()
|
|
||||||
print >>f, "3"
|
|
||||||
f.seek(0)
|
|
||||||
cap = py.io.FDCapture(0, tmpfile=f)
|
|
||||||
# check with os.read() directly instead of raw_input(), because
|
|
||||||
# sys.stdin itself may be redirected (as py.test now does by default)
|
|
||||||
x = os.read(0, 100).strip()
|
|
||||||
f = cap.done()
|
|
||||||
assert x == "3"
|
|
||||||
|
|
||||||
def test_writeorg(self):
|
|
||||||
tmppath = py.test.ensuretemp('test_writeorg').ensure('stderr',
|
|
||||||
file=True)
|
|
||||||
tmpfp = tmppath.open('w+b')
|
|
||||||
try:
|
|
||||||
cap = py.io.FDCapture(tmpfp.fileno())
|
|
||||||
print >>tmpfp, 'foo'
|
|
||||||
cap.writeorg('bar\n')
|
|
||||||
finally:
|
|
||||||
tmpfp.close()
|
|
||||||
f = cap.done()
|
|
||||||
scap = f.read()
|
|
||||||
assert scap == 'foo\n'
|
|
||||||
stmp = tmppath.read()
|
|
||||||
assert stmp == "bar\n"
|
|
||||||
|
|
||||||
def test_writeorg_wrongtype(self):
|
|
||||||
tmppath = py.test.ensuretemp('test_writeorg').ensure('stdout',
|
|
||||||
file=True)
|
|
||||||
tmpfp = tmppath.open('r')
|
|
||||||
try:
|
|
||||||
cap = py.io.FDCapture(tmpfp.fileno())
|
|
||||||
py.test.raises(IOError, "cap.writeorg('bar\\n')")
|
|
||||||
finally:
|
|
||||||
tmpfp.close()
|
|
||||||
f = cap.done()
|
|
||||||
|
|
|
@ -1,150 +0,0 @@
|
||||||
import os, sys
|
|
||||||
import py
|
|
||||||
|
|
||||||
class TestStdCapture:
|
|
||||||
def getcapture(self, **kw):
|
|
||||||
return py.io.StdCapture(**kw)
|
|
||||||
|
|
||||||
def test_capturing_done_simple(self):
|
|
||||||
cap = self.getcapture()
|
|
||||||
print "hello world"
|
|
||||||
print >>sys.stderr, "hello error"
|
|
||||||
outfile, errfile = cap.done()
|
|
||||||
assert outfile.read() == "hello world\n"
|
|
||||||
assert errfile.read() == "hello error\n"
|
|
||||||
|
|
||||||
def test_capturing_reset_simple(self):
|
|
||||||
cap = self.getcapture()
|
|
||||||
print "hello world"
|
|
||||||
print >>sys.stderr, "hello error"
|
|
||||||
out, err = cap.reset()
|
|
||||||
assert out == "hello world\n"
|
|
||||||
assert err == "hello error\n"
|
|
||||||
|
|
||||||
def test_capturing_mixed(self):
|
|
||||||
cap = self.getcapture(mixed=True)
|
|
||||||
print "hello",
|
|
||||||
print >>sys.stderr, "world",
|
|
||||||
print >>sys.stdout, ".",
|
|
||||||
out, err = cap.reset()
|
|
||||||
assert out.strip() == "hello world ."
|
|
||||||
assert not err
|
|
||||||
|
|
||||||
def test_capturing_twice_error(self):
|
|
||||||
cap = self.getcapture()
|
|
||||||
print "hello"
|
|
||||||
cap.reset()
|
|
||||||
py.test.raises(EnvironmentError, "cap.reset()")
|
|
||||||
|
|
||||||
def test_capturing_modify_sysouterr_in_between(self):
|
|
||||||
oldout = sys.stdout
|
|
||||||
olderr = sys.stderr
|
|
||||||
cap = self.getcapture()
|
|
||||||
print "hello",
|
|
||||||
print >>sys.stderr, "world",
|
|
||||||
sys.stdout = py.std.StringIO.StringIO()
|
|
||||||
sys.stderr = py.std.StringIO.StringIO()
|
|
||||||
print "not seen"
|
|
||||||
print >>sys.stderr, "not seen"
|
|
||||||
out, err = cap.reset()
|
|
||||||
assert out == "hello"
|
|
||||||
assert err == "world"
|
|
||||||
assert sys.stdout == oldout
|
|
||||||
assert sys.stderr == olderr
|
|
||||||
|
|
||||||
def test_capturing_error_recursive(self):
|
|
||||||
cap1 = self.getcapture()
|
|
||||||
print "cap1"
|
|
||||||
cap2 = self.getcapture()
|
|
||||||
print "cap2"
|
|
||||||
out2, err2 = cap2.reset()
|
|
||||||
py.test.raises(EnvironmentError, "cap2.reset()")
|
|
||||||
out1, err1 = cap1.reset()
|
|
||||||
assert out1 == "cap1\n"
|
|
||||||
assert out2 == "cap2\n"
|
|
||||||
|
|
||||||
def test_just_out_capture(self):
|
|
||||||
cap = self.getcapture(out=True, err=False)
|
|
||||||
print >>sys.stdout, "hello"
|
|
||||||
print >>sys.stderr, "world"
|
|
||||||
out, err = cap.reset()
|
|
||||||
assert out == "hello\n"
|
|
||||||
assert not err
|
|
||||||
|
|
||||||
def test_just_err_capture(self):
|
|
||||||
cap = self.getcapture(out=False, err=True)
|
|
||||||
print >>sys.stdout, "hello"
|
|
||||||
print >>sys.stderr, "world"
|
|
||||||
out, err = cap.reset()
|
|
||||||
assert err == "world\n"
|
|
||||||
assert not out
|
|
||||||
|
|
||||||
def test_stdin_restored(self):
|
|
||||||
old = sys.stdin
|
|
||||||
cap = self.getcapture(in_=True)
|
|
||||||
newstdin = sys.stdin
|
|
||||||
out, err = cap.reset()
|
|
||||||
assert newstdin != sys.stdin
|
|
||||||
assert sys.stdin is old
|
|
||||||
|
|
||||||
def test_stdin_nulled_by_default(self):
|
|
||||||
print "XXX this test may well hang instead of crashing"
|
|
||||||
print "XXX which indicates an error in the underlying capturing"
|
|
||||||
print "XXX mechanisms"
|
|
||||||
cap = self.getcapture()
|
|
||||||
py.test.raises(IOError, "sys.stdin.read()")
|
|
||||||
out, err = cap.reset()
|
|
||||||
|
|
||||||
class TestStdCaptureFD(TestStdCapture):
|
|
||||||
def getcapture(self, **kw):
|
|
||||||
return py.io.StdCaptureFD(**kw)
|
|
||||||
|
|
||||||
def test_intermingling(self):
|
|
||||||
cap = self.getcapture()
|
|
||||||
os.write(1, "1")
|
|
||||||
print >>sys.stdout, 2,
|
|
||||||
os.write(1, "3")
|
|
||||||
os.write(2, "a")
|
|
||||||
print >>sys.stderr, "b",
|
|
||||||
os.write(2, "c")
|
|
||||||
out, err = cap.reset()
|
|
||||||
assert out == "123"
|
|
||||||
assert err == "abc"
|
|
||||||
|
|
||||||
def test_callcapture(self):
|
|
||||||
def func(x, y):
|
|
||||||
print x
|
|
||||||
print >>py.std.sys.stderr, y
|
|
||||||
return 42
|
|
||||||
|
|
||||||
res, out, err = py.io.StdCaptureFD.call(func, 3, y=4)
|
|
||||||
assert res == 42
|
|
||||||
assert out.startswith("3")
|
|
||||||
assert err.startswith("4")
|
|
||||||
|
|
||||||
def test_capture_no_sys():
|
|
||||||
capsys = py.io.StdCapture()
|
|
||||||
try:
|
|
||||||
cap = py.io.StdCaptureFD(patchsys=False)
|
|
||||||
print >>sys.stdout, "hello"
|
|
||||||
print >>sys.stderr, "world"
|
|
||||||
os.write(1, "1")
|
|
||||||
os.write(2, "2")
|
|
||||||
out, err = cap.reset()
|
|
||||||
assert out == "1"
|
|
||||||
assert err == "2"
|
|
||||||
finally:
|
|
||||||
capsys.reset()
|
|
||||||
|
|
||||||
def test_callcapture_nofd():
|
|
||||||
def func(x, y):
|
|
||||||
os.write(1, "hello")
|
|
||||||
os.write(2, "hello")
|
|
||||||
print x
|
|
||||||
print >>py.std.sys.stderr, y
|
|
||||||
return 42
|
|
||||||
|
|
||||||
res, out, err = py.io.StdCapture.call(func, 3, y=4)
|
|
||||||
assert res == 42
|
|
||||||
assert out.startswith("3")
|
|
||||||
assert err.startswith("4")
|
|
|
@ -1 +0,0 @@
|
||||||
#
|
|
203
py/misc/_dist.py
203
py/misc/_dist.py
|
@ -1,203 +0,0 @@
|
||||||
import py
|
|
||||||
import sys, os, re
|
|
||||||
from distutils import sysconfig
|
|
||||||
from distutils import core
|
|
||||||
|
|
||||||
winextensions = 1
|
|
||||||
if sys.platform == 'win32':
|
|
||||||
try:
|
|
||||||
import _winreg, win32gui, win32con
|
|
||||||
except ImportError:
|
|
||||||
winextensions = 0
|
|
||||||
|
|
||||||
class Params:
|
|
||||||
""" a crazy hack to convince distutils to please
|
|
||||||
install all of our files inside the package.
|
|
||||||
"""
|
|
||||||
_sitepackages = py.path.local(sysconfig.get_python_lib())
|
|
||||||
def __init__(self, pkgmod):
|
|
||||||
name = pkgmod.__name__
|
|
||||||
self._pkgdir = py.path.local(pkgmod.__file__).dirpath()
|
|
||||||
self._rootdir = self._pkgdir.dirpath()
|
|
||||||
self._pkgtarget = self._sitepackages.join(name)
|
|
||||||
self._datadict = {}
|
|
||||||
self.packages = []
|
|
||||||
self.scripts = []
|
|
||||||
self.hacktree()
|
|
||||||
self.data_files = self._datadict.items()
|
|
||||||
self.data_files.sort()
|
|
||||||
self.packages.sort()
|
|
||||||
self.scripts.sort()
|
|
||||||
|
|
||||||
def hacktree(self):
|
|
||||||
for p in self._pkgdir.visit(None, lambda x: x.basename != '.svn'):
|
|
||||||
if p.check(file=1):
|
|
||||||
if p.ext in ('.pyc', '.pyo'):
|
|
||||||
continue
|
|
||||||
if p.dirpath().basename == 'bin':
|
|
||||||
self.scripts.append(p.relto(self._rootdir))
|
|
||||||
self.adddatafile(p)
|
|
||||||
elif p.ext == '.py':
|
|
||||||
self.addpythonfile(p)
|
|
||||||
else:
|
|
||||||
self.adddatafile(p)
|
|
||||||
#else:
|
|
||||||
# if not p.listdir():
|
|
||||||
# self.adddatafile(p.ensure('dummy'))
|
|
||||||
|
|
||||||
def adddatafile(self, p):
|
|
||||||
if p.ext in ('.pyc', 'pyo'):
|
|
||||||
return
|
|
||||||
target = self._pkgtarget.join(p.dirpath().relto(self._pkgdir))
|
|
||||||
l = self._datadict.setdefault(str(target), [])
|
|
||||||
l.append(p.relto(self._rootdir))
|
|
||||||
|
|
||||||
def addpythonfile(self, p):
|
|
||||||
parts = p.parts()
|
|
||||||
for above in p.parts(reverse=True)[1:]:
|
|
||||||
if self._pkgdir.relto(above):
|
|
||||||
dottedname = p.dirpath().relto(self._rootdir).replace(p.sep, '.')
|
|
||||||
if dottedname not in self.packages:
|
|
||||||
self.packages.append(dottedname)
|
|
||||||
break
|
|
||||||
if not above.join('__init__.py').check():
|
|
||||||
self.adddatafile(p)
|
|
||||||
#print "warning, added data file", p
|
|
||||||
break
|
|
||||||
|
|
||||||
#if sys.platform != 'win32':
|
|
||||||
# scripts.remove('py/bin/pytest.cmd')
|
|
||||||
#else:
|
|
||||||
# scripts.remove('py/bin/py.test')
|
|
||||||
#
|
|
||||||
|
|
||||||
### helpers:
|
|
||||||
def checknonsvndir(p):
|
|
||||||
if p.basename != '.svn' and p.check(dir=1):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def dump(params):
|
|
||||||
print "packages"
|
|
||||||
for x in params.packages:
|
|
||||||
print "package ", x
|
|
||||||
print
|
|
||||||
print "scripts"
|
|
||||||
for x in params.scripts:
|
|
||||||
print "script ", x
|
|
||||||
print
|
|
||||||
|
|
||||||
print "data files"
|
|
||||||
for x in params.data_files:
|
|
||||||
print "data file ", x
|
|
||||||
print
|
|
||||||
|
|
||||||
def addbindir2path():
|
|
||||||
if sys.platform != 'win32' or not winextensions:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Add py/bin to PATH environment variable
|
|
||||||
bindir = os.path.join(sysconfig.get_python_lib(), "py", "bin", "win32")
|
|
||||||
|
|
||||||
# check for the user path
|
|
||||||
ureg = _winreg.ConnectRegistry(None, _winreg.HKEY_CURRENT_USER)
|
|
||||||
ukey = r"Environment"
|
|
||||||
|
|
||||||
# not every user has his own path on windows
|
|
||||||
try:
|
|
||||||
upath = get_registry_value(ureg, ukey, "PATH")
|
|
||||||
except WindowsError:
|
|
||||||
upath=""
|
|
||||||
# if bindir allready in userpath -> do nothing
|
|
||||||
if bindir in upath:
|
|
||||||
return
|
|
||||||
|
|
||||||
reg = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
|
|
||||||
key = r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment"
|
|
||||||
path = get_registry_value(reg, key, "Path")
|
|
||||||
# if bindir allready in systempath -> do nothing
|
|
||||||
if bindir in path:
|
|
||||||
return
|
|
||||||
path += ";" + bindir
|
|
||||||
print "Setting PATH to:", path
|
|
||||||
|
|
||||||
pathset=False
|
|
||||||
try:
|
|
||||||
set_registry_value(reg, key, "PATH", path)
|
|
||||||
pathset=True
|
|
||||||
except WindowsError:
|
|
||||||
print "cannot set systempath, falling back to userpath"
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not pathset:
|
|
||||||
try:
|
|
||||||
if len(upath)>0: #if no user path present
|
|
||||||
upath += ";"
|
|
||||||
upath+=bindir
|
|
||||||
set_registry_value(ureg, ukey, "Path", upath)
|
|
||||||
pathset=True
|
|
||||||
except WindowsError:
|
|
||||||
print "cannot set userpath, please add %s to your path" % (bindir,)
|
|
||||||
return
|
|
||||||
|
|
||||||
#print "Current PATH is:", get_registry_value(reg, key, "Path")
|
|
||||||
|
|
||||||
# Propagate changes throughout the system
|
|
||||||
win32gui.SendMessageTimeout(win32con.HWND_BROADCAST,
|
|
||||||
win32con.WM_SETTINGCHANGE, 0, "Environment",
|
|
||||||
win32con.SMTO_ABORTIFHUNG, 5000)
|
|
||||||
|
|
||||||
# Propagate changes to current command prompt
|
|
||||||
os.system("set PATH=%s" % path)
|
|
||||||
|
|
||||||
def get_registry_value(reg, key, value_name):
|
|
||||||
k = _winreg.OpenKey(reg, key)
|
|
||||||
value = _winreg.QueryValueEx(k, value_name)[0]
|
|
||||||
_winreg.CloseKey(k)
|
|
||||||
return value
|
|
||||||
|
|
||||||
def set_registry_value(reg, key, value_name, value):
|
|
||||||
k = _winreg.OpenKey(reg, key, 0, _winreg.KEY_WRITE)
|
|
||||||
value_type = _winreg.REG_SZ
|
|
||||||
# if we handle the Path value, then set its type to REG_EXPAND_SZ
|
|
||||||
# so that things like %SystemRoot% get automatically expanded by the
|
|
||||||
# command prompt
|
|
||||||
if value_name == "Path":
|
|
||||||
value_type = _winreg.REG_EXPAND_SZ
|
|
||||||
_winreg.SetValueEx(k, value_name, 0, value_type, value)
|
|
||||||
_winreg.CloseKey(k)
|
|
||||||
|
|
||||||
### end helpers
|
|
||||||
|
|
||||||
def setup(pkg, **kw):
|
|
||||||
""" invoke distutils on a given package.
|
|
||||||
"""
|
|
||||||
if 'install' in sys.argv[1:]:
|
|
||||||
print "precompiling greenlet module"
|
|
||||||
try:
|
|
||||||
x = py.magic.greenlet()
|
|
||||||
except (RuntimeError, ImportError):
|
|
||||||
print "could not precompile greenlet module, skipping"
|
|
||||||
|
|
||||||
params = Params(pkg)
|
|
||||||
#dump(params)
|
|
||||||
source = getattr(pkg, '__pkg__', pkg)
|
|
||||||
namelist = list(core.setup_keywords)
|
|
||||||
namelist.extend(['packages', 'scripts', 'data_files'])
|
|
||||||
for name in namelist:
|
|
||||||
for ns in (source, params):
|
|
||||||
if hasattr(ns, name):
|
|
||||||
kw[name] = getattr(ns, name)
|
|
||||||
break
|
|
||||||
|
|
||||||
#script_args = sys.argv[1:]
|
|
||||||
#if 'install' in script_args:
|
|
||||||
# script_args = ['--quiet'] + script_args
|
|
||||||
# #print "installing", py
|
|
||||||
#py.std.pprint.pprint(kw)
|
|
||||||
core.setup(**kw)
|
|
||||||
if 'install' in sys.argv[1:]:
|
|
||||||
addbindir2path()
|
|
||||||
x = params._rootdir.join('build')
|
|
||||||
if x.check():
|
|
||||||
print "removing", x
|
|
||||||
x.remove()
|
|
|
@ -1,53 +0,0 @@
|
||||||
""" create a py/test2 hierarchy copied from py/test.
|
|
||||||
useful for refactoring py.test itself and still
|
|
||||||
use py.test itself.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from _findpy import py
|
|
||||||
|
|
||||||
def change_init(initfile):
|
|
||||||
l = []
|
|
||||||
for line in initfile.readlines():
|
|
||||||
newline = line
|
|
||||||
l.append(line)
|
|
||||||
newline = newline.replace("'test.", "'test2.")
|
|
||||||
newline = newline.replace("'./test/", "'./test2/")
|
|
||||||
if newline != line:
|
|
||||||
l.append(newline)
|
|
||||||
initfile.write("".join(l))
|
|
||||||
|
|
||||||
def perform_replace(directory):
|
|
||||||
for x in directory.visit("*.py",
|
|
||||||
rec=lambda x: x.check(dir=1, dotfile=0)):
|
|
||||||
s = n = x.read()
|
|
||||||
n = n.replace("py.test", "py.test2")
|
|
||||||
n = n.replace("py.__.test.", "py.__.test2.")
|
|
||||||
n = n.replace("py.__.test ", "py.__.test2 ")
|
|
||||||
if s != n:
|
|
||||||
print "writing modified", x
|
|
||||||
x.write(n)
|
|
||||||
|
|
||||||
def cmd(command):
|
|
||||||
print "* executing:", command
|
|
||||||
return py.process.cmdexec(command)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
basedir = py.path.local(py.__file__).dirpath()
|
|
||||||
#st = py.path.svnwc(basedir).status()
|
|
||||||
#assert not st.modified
|
|
||||||
olddir = basedir.chdir()
|
|
||||||
try:
|
|
||||||
initfile = basedir.join("__init__.py")
|
|
||||||
cmd("svn revert %s" % initfile)
|
|
||||||
change_init(initfile)
|
|
||||||
|
|
||||||
test2dir = basedir.join("test2")
|
|
||||||
cmd("svn revert -R test2")
|
|
||||||
cmd("rm -rf test2")
|
|
||||||
cmd("svn cp test test2")
|
|
||||||
perform_replace(test2dir)
|
|
||||||
|
|
||||||
finally:
|
|
||||||
olddir.chdir()
|
|
||||||
|
|
||||||
|
|
|
@ -1,84 +0,0 @@
|
||||||
"""
|
|
||||||
A utility to build a Python extension module from C, wrapping distutils.
|
|
||||||
"""
|
|
||||||
import py
|
|
||||||
|
|
||||||
# XXX we should distutils in a subprocess, because it messes up the
|
|
||||||
# environment and who knows what else. Currently we just save
|
|
||||||
# and restore os.environ.
|
|
||||||
|
|
||||||
def make_module_from_c(cfile):
|
|
||||||
import os, sys, imp
|
|
||||||
from distutils.core import setup
|
|
||||||
from distutils.extension import Extension
|
|
||||||
debug = 0
|
|
||||||
|
|
||||||
#try:
|
|
||||||
# from distutils.log import set_threshold
|
|
||||||
# set_threshold(10000)
|
|
||||||
#except ImportError:
|
|
||||||
# print "ERROR IMPORTING"
|
|
||||||
# pass
|
|
||||||
|
|
||||||
dirpath = cfile.dirpath()
|
|
||||||
modname = cfile.purebasename
|
|
||||||
|
|
||||||
# find the expected extension of the compiled C module
|
|
||||||
for ext, mode, filetype in imp.get_suffixes():
|
|
||||||
if filetype == imp.C_EXTENSION:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise ImportError, "cannot find the file name suffix of C ext modules"
|
|
||||||
lib = dirpath.join(modname+ext)
|
|
||||||
|
|
||||||
# XXX argl! we need better "build"-locations alltogether!
|
|
||||||
if lib.check():
|
|
||||||
try:
|
|
||||||
lib.remove()
|
|
||||||
except EnvironmentError:
|
|
||||||
pass # XXX we just use the existing version, bah
|
|
||||||
|
|
||||||
if not lib.check():
|
|
||||||
c = py.io.StdCaptureFD()
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
saved_environ = os.environ.items()
|
|
||||||
try:
|
|
||||||
lastdir = dirpath.chdir()
|
|
||||||
try:
|
|
||||||
setup(
|
|
||||||
name = "pylibmodules",
|
|
||||||
ext_modules=[
|
|
||||||
Extension(modname, [str(cfile)])
|
|
||||||
],
|
|
||||||
script_name = 'setup.py',
|
|
||||||
script_args = ['-q', 'build_ext', '--inplace']
|
|
||||||
#script_args = ['build_ext', '--inplace']
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
lastdir.chdir()
|
|
||||||
finally:
|
|
||||||
for key, value in saved_environ:
|
|
||||||
if os.environ.get(key) != value:
|
|
||||||
os.environ[key] = value
|
|
||||||
finally:
|
|
||||||
foutput, foutput = c.done()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
raise
|
|
||||||
except SystemExit, e:
|
|
||||||
raise RuntimeError("cannot compile %s: %s\n%s" % (cfile, e,
|
|
||||||
foutput.read()))
|
|
||||||
# XXX do we need to do some check on fout/ferr?
|
|
||||||
# XXX not a nice way to import a module
|
|
||||||
if debug:
|
|
||||||
print "inserting path to sys.path", dirpath
|
|
||||||
sys.path.insert(0, str(dirpath))
|
|
||||||
if debug:
|
|
||||||
print "import %(modname)s as testmodule" % locals()
|
|
||||||
exec py.code.compile("import %(modname)s as testmodule" % locals())
|
|
||||||
try:
|
|
||||||
sys.path.remove(str(dirpath))
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return testmodule
|
|
157
py/misc/cache.py
157
py/misc/cache.py
|
@ -1,157 +0,0 @@
|
||||||
"""
|
|
||||||
This module contains multithread-safe cache implementations.
|
|
||||||
|
|
||||||
Caches mainly have a
|
|
||||||
|
|
||||||
__getitem__ and getorbuild() method
|
|
||||||
|
|
||||||
where the latter either just return a cached value or
|
|
||||||
first builds the value.
|
|
||||||
|
|
||||||
These are the current cache implementations:
|
|
||||||
|
|
||||||
BuildcostAccessCache tracks building-time and accesses. Evicts
|
|
||||||
by product of num-accesses * build-time.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import py
|
|
||||||
gettime = py.std.time.time
|
|
||||||
|
|
||||||
class WeightedCountingEntry(object):
|
|
||||||
def __init__(self, value, oneweight):
|
|
||||||
self.num = 1
|
|
||||||
self._value = value
|
|
||||||
self.oneweight = oneweight
|
|
||||||
|
|
||||||
def weight():
|
|
||||||
def fget(self):
|
|
||||||
return (self.num * self.oneweight, self.num)
|
|
||||||
return property(fget, None, None, "cumulative weight")
|
|
||||||
weight = weight()
|
|
||||||
|
|
||||||
def value():
|
|
||||||
def fget(self):
|
|
||||||
# you need to protect against mt-access at caller side!
|
|
||||||
self.num += 1
|
|
||||||
return self._value
|
|
||||||
return property(fget, None, None)
|
|
||||||
value = value()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<%s weight=%s>" % (self.__class__.__name__, self.weight)
|
|
||||||
|
|
||||||
class BasicCache(object):
|
|
||||||
def __init__(self, maxentries=128):
|
|
||||||
self.maxentries = maxentries
|
|
||||||
self.prunenum = int(maxentries - maxentries/8)
|
|
||||||
self._lock = py.std.threading.RLock()
|
|
||||||
self._dict = {}
|
|
||||||
|
|
||||||
def getentry(self, key):
|
|
||||||
lock = self._lock
|
|
||||||
lock.acquire()
|
|
||||||
try:
|
|
||||||
return self._dict.get(key, None)
|
|
||||||
finally:
|
|
||||||
lock.release()
|
|
||||||
|
|
||||||
def putentry(self, key, entry):
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
self._prunelowestweight()
|
|
||||||
self._dict[key] = entry
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
def delentry(self, key, raising=False):
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
del self._dict[key]
|
|
||||||
except KeyError:
|
|
||||||
if raising:
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
def getorbuild(self, key, builder, *args, **kwargs):
|
|
||||||
entry = self.getentry(key)
|
|
||||||
if entry is None:
|
|
||||||
entry = self.build(key, builder, *args, **kwargs)
|
|
||||||
return entry.value
|
|
||||||
|
|
||||||
def _prunelowestweight(self):
|
|
||||||
""" prune out entries with lowest weight. """
|
|
||||||
# note: must be called with acquired self._lock!
|
|
||||||
numentries = len(self._dict)
|
|
||||||
if numentries >= self.maxentries:
|
|
||||||
# evict according to entry's weight
|
|
||||||
items = [(entry.weight, key) for key, entry in self._dict.iteritems()]
|
|
||||||
items.sort()
|
|
||||||
index = numentries - self.prunenum
|
|
||||||
if index > 0:
|
|
||||||
for weight, key in items[:index]:
|
|
||||||
del self._dict[key]
|
|
||||||
|
|
||||||
class BuildcostAccessCache(BasicCache):
|
|
||||||
""" A BuildTime/Access-counting cache implementation.
|
|
||||||
the weight of a value is computed as the product of
|
|
||||||
|
|
||||||
num-accesses-of-a-value * time-to-build-the-value
|
|
||||||
|
|
||||||
The values with the least such weights are evicted
|
|
||||||
if the cache maxentries threshold is superceded.
|
|
||||||
For implementation flexibility more than one object
|
|
||||||
might be evicted at a time.
|
|
||||||
"""
|
|
||||||
# time function to use for measuring build-times
|
|
||||||
_time = gettime
|
|
||||||
|
|
||||||
def __init__(self, maxentries=64):
|
|
||||||
super(BuildcostAccessCache, self).__init__(maxentries)
|
|
||||||
|
|
||||||
def build(self, key, builder, *args, **kwargs):
|
|
||||||
start = self._time()
|
|
||||||
val = builder(*args, **kwargs)
|
|
||||||
end = self._time()
|
|
||||||
entry = WeightedCountingEntry(val, end-start)
|
|
||||||
self.putentry(key, entry)
|
|
||||||
return entry
|
|
||||||
|
|
||||||
class AgingCache(BasicCache):
|
|
||||||
""" This cache prunes out cache entries that are too old.
|
|
||||||
"""
|
|
||||||
def __init__(self, maxentries=128, maxseconds=10.0):
|
|
||||||
super(AgingCache, self).__init__(maxentries)
|
|
||||||
self.maxseconds = maxseconds
|
|
||||||
|
|
||||||
def getentry(self, key):
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
entry = self._dict[key]
|
|
||||||
except KeyError:
|
|
||||||
entry = None
|
|
||||||
else:
|
|
||||||
if entry.isexpired():
|
|
||||||
del self._dict[key]
|
|
||||||
entry = None
|
|
||||||
return entry
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
def build(self, key, builder, *args, **kwargs):
|
|
||||||
ctime = gettime()
|
|
||||||
val = builder(*args, **kwargs)
|
|
||||||
entry = AgingEntry(val, ctime + self.maxseconds)
|
|
||||||
self.putentry(key, entry)
|
|
||||||
return entry
|
|
||||||
|
|
||||||
class AgingEntry(object):
|
|
||||||
def __init__(self, value, expirationtime):
|
|
||||||
self.value = value
|
|
||||||
self.weight = expirationtime
|
|
||||||
|
|
||||||
def isexpired(self):
|
|
||||||
t = py.std.time.time()
|
|
||||||
return t >= self.weight
|
|
|
@ -1 +0,0 @@
|
||||||
#
|
|
|
@ -1,84 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
# hands on script to compute the non-empty Lines of Code
|
|
||||||
# for tests and non-test code
|
|
||||||
|
|
||||||
import py
|
|
||||||
|
|
||||||
|
|
||||||
curdir = py.path.local()
|
|
||||||
|
|
||||||
|
|
||||||
def nodot(p):
|
|
||||||
return p.check(dotfile=0)
|
|
||||||
|
|
||||||
class FileCounter(object):
|
|
||||||
def __init__(self):
|
|
||||||
self.file2numlines = {}
|
|
||||||
self.numlines = 0
|
|
||||||
self.numfiles = 0
|
|
||||||
|
|
||||||
def addrecursive(self, directory, fil="*.py", rec=nodot):
|
|
||||||
for x in directory.visit(fil, rec):
|
|
||||||
self.addfile(x)
|
|
||||||
|
|
||||||
def addfile(self, fn, emptylines=False):
|
|
||||||
if emptylines:
|
|
||||||
s = len(p.readlines())
|
|
||||||
else:
|
|
||||||
s = 0
|
|
||||||
for i in fn.readlines():
|
|
||||||
if i.strip():
|
|
||||||
s += 1
|
|
||||||
self.file2numlines[fn] = s
|
|
||||||
self.numfiles += 1
|
|
||||||
self.numlines += s
|
|
||||||
|
|
||||||
def getnumlines(self, fil):
|
|
||||||
numlines = 0
|
|
||||||
for path, value in self.file2numlines.items():
|
|
||||||
if fil(path):
|
|
||||||
numlines += value
|
|
||||||
return numlines
|
|
||||||
|
|
||||||
def getnumfiles(self, fil):
|
|
||||||
numfiles = 0
|
|
||||||
for path in self.file2numlines:
|
|
||||||
if fil(path):
|
|
||||||
numfiles += 1
|
|
||||||
return numfiles
|
|
||||||
|
|
||||||
def get_loccount(locations=None):
|
|
||||||
if locations is None:
|
|
||||||
localtions = [py.path.local()]
|
|
||||||
counter = FileCounter()
|
|
||||||
for loc in locations:
|
|
||||||
counter.addrecursive(loc, '*.py', rec=nodot)
|
|
||||||
|
|
||||||
def istestfile(p):
|
|
||||||
return p.check(fnmatch='test_*.py')
|
|
||||||
isnottestfile = lambda x: not istestfile(x)
|
|
||||||
|
|
||||||
numfiles = counter.getnumfiles(isnottestfile)
|
|
||||||
numlines = counter.getnumlines(isnottestfile)
|
|
||||||
numtestfiles = counter.getnumfiles(istestfile)
|
|
||||||
numtestlines = counter.getnumlines(istestfile)
|
|
||||||
|
|
||||||
return counter, numfiles, numlines, numtestfiles, numtestlines
|
|
||||||
|
|
||||||
def countloc(paths=None):
|
|
||||||
if not paths:
|
|
||||||
paths = ['.']
|
|
||||||
locations = [py.path.local(x) for x in paths]
|
|
||||||
(counter, numfiles, numlines, numtestfiles,
|
|
||||||
numtestlines) = get_loccount(locations)
|
|
||||||
|
|
||||||
items = counter.file2numlines.items()
|
|
||||||
items.sort(lambda x,y: cmp(x[1], y[1]))
|
|
||||||
for x, y in items:
|
|
||||||
print "%3d %30s" % (y,x)
|
|
||||||
|
|
||||||
print "%30s %3d" %("number of testfiles", numtestfiles)
|
|
||||||
print "%30s %3d" %("number of non-empty testlines", numtestlines)
|
|
||||||
print "%30s %3d" %("number of files", numfiles)
|
|
||||||
print "%30s %3d" %("number of non-empty lines", numlines)
|
|
|
@ -1,63 +0,0 @@
|
||||||
"""
|
|
||||||
|
|
||||||
Put this file as 'conftest.py' somewhere upwards from py-trunk,
|
|
||||||
modify the "socketserveradr" below to point to a windows/linux
|
|
||||||
host running "py/execnet/script/loop_socketserver.py"
|
|
||||||
and invoke e.g. from linux:
|
|
||||||
|
|
||||||
py.test --session=MySession some_path_to_what_you_want_to_test
|
|
||||||
|
|
||||||
This should ad-hoc distribute the running of tests to
|
|
||||||
the remote machine (including rsyncing your WC).
|
|
||||||
|
|
||||||
"""
|
|
||||||
import py
|
|
||||||
from py.__.test.terminal.remote import RemoteTerminalSession
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
class MyRSync(py.execnet.RSync):
|
|
||||||
def filter(self, path):
|
|
||||||
if path.endswith('.pyc') or path.endswith('~'):
|
|
||||||
return False
|
|
||||||
dir, base = os.path.split(path)
|
|
||||||
# we may want to have revision info on the other side,
|
|
||||||
# so let's not exclude .svn directories
|
|
||||||
#if base == '.svn':
|
|
||||||
# return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
class MySession(RemoteTerminalSession):
|
|
||||||
socketserveradr = ('10.9.2.62', 8888)
|
|
||||||
socketserveradr = ('10.9.4.148', 8888)
|
|
||||||
|
|
||||||
def _initslavegateway(self):
|
|
||||||
print "MASTER: initializing remote socket gateway"
|
|
||||||
gw = py.execnet.SocketGateway(*self.socketserveradr)
|
|
||||||
pkgname = 'py' # xxx flexibilize
|
|
||||||
channel = gw.remote_exec("""
|
|
||||||
import os
|
|
||||||
topdir = os.path.join(os.environ['HOMEPATH'], 'pytestcache')
|
|
||||||
pkgdir = os.path.join(topdir, %r)
|
|
||||||
channel.send((topdir, pkgdir))
|
|
||||||
""" % (pkgname,))
|
|
||||||
remotetopdir, remotepkgdir = channel.receive()
|
|
||||||
sendpath = py.path.local(py.__file__).dirpath()
|
|
||||||
rsync = MyRSync(sendpath)
|
|
||||||
rsync.add_target(gw, remotepkgdir, delete=True)
|
|
||||||
rsync.send()
|
|
||||||
channel = gw.remote_exec("""
|
|
||||||
import os, sys
|
|
||||||
path = %r # os.path.abspath
|
|
||||||
sys.path.insert(0, path)
|
|
||||||
os.chdir(path)
|
|
||||||
import py
|
|
||||||
channel.send((path, py.__file__))
|
|
||||||
""" % remotetopdir)
|
|
||||||
topdir, remotepypath = channel.receive()
|
|
||||||
assert topdir == remotetopdir, (topdir, remotetopdir)
|
|
||||||
assert remotepypath.startswith(topdir), (remotepypath, topdir)
|
|
||||||
#print "remote side has rsynced pythonpath ready: %r" %(topdir,)
|
|
||||||
return gw, topdir
|
|
||||||
|
|
||||||
dist_hosts = ['localhost', 'cobra', 'cobra']
|
|
|
@ -1,32 +0,0 @@
|
||||||
import py
|
|
||||||
|
|
||||||
_time_desc = {
|
|
||||||
1 : 'second', 60 : 'minute', 3600 : 'hour', 86400 : 'day',
|
|
||||||
2628000 : 'month', 31536000 : 'year', }
|
|
||||||
|
|
||||||
def worded_diff_time(ctime):
|
|
||||||
difftime = py.std.time.time() - ctime
|
|
||||||
keys = _time_desc.keys()
|
|
||||||
keys.sort()
|
|
||||||
for i, key in py.builtin.enumerate(keys):
|
|
||||||
if key >=difftime:
|
|
||||||
break
|
|
||||||
l = []
|
|
||||||
keylist = keys[:i]
|
|
||||||
|
|
||||||
keylist.reverse()
|
|
||||||
for key in keylist[:1]:
|
|
||||||
div = int(difftime / key)
|
|
||||||
if div==0:
|
|
||||||
break
|
|
||||||
difftime -= div * key
|
|
||||||
plural = div > 1 and 's' or ''
|
|
||||||
l.append('%d %s%s' %(div, _time_desc[key], plural))
|
|
||||||
return ", ".join(l) + " ago "
|
|
||||||
|
|
||||||
_months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
|
||||||
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
|
|
||||||
|
|
||||||
def worded_time(ctime):
|
|
||||||
tm = py.std.time.gmtime(ctime)
|
|
||||||
return "%s %d, %d" % (_months[tm.tm_mon-1], tm.tm_mday, tm.tm_year)
|
|
|
@ -1,84 +0,0 @@
|
||||||
"""
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import py
|
|
||||||
import sys
|
|
||||||
|
|
||||||
log = py.log.get("dynpkg",
|
|
||||||
info=py.log.STDOUT,
|
|
||||||
debug=py.log.STDOUT,
|
|
||||||
command=None) # py.log.STDOUT)
|
|
||||||
|
|
||||||
from distutils import util
|
|
||||||
|
|
||||||
class DistPython:
|
|
||||||
def __init__(self, location=None, python=None):
|
|
||||||
if python is None:
|
|
||||||
python = py.std.sys.executable
|
|
||||||
self.python = python
|
|
||||||
if location is None:
|
|
||||||
location = py.path.local()
|
|
||||||
self.location = location
|
|
||||||
self.plat_specifier = '.%s-%s' % (util.get_platform(), sys.version[0:3])
|
|
||||||
|
|
||||||
def clean(self):
|
|
||||||
out = self._exec("clean -a")
|
|
||||||
#print out
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
out = self._exec("build")
|
|
||||||
#print out
|
|
||||||
|
|
||||||
def _exec(self, cmd):
|
|
||||||
python = self.python
|
|
||||||
old = self.location.chdir()
|
|
||||||
try:
|
|
||||||
cmd = "%(python)s setup.py %(cmd)s" % locals()
|
|
||||||
log.command(cmd)
|
|
||||||
out = py.process.cmdexec(cmd)
|
|
||||||
finally:
|
|
||||||
old.chdir()
|
|
||||||
return out
|
|
||||||
|
|
||||||
def get_package_path(self, pkgname):
|
|
||||||
pkg = self._get_package_path(pkgname)
|
|
||||||
if pkg is None:
|
|
||||||
#self.clean()
|
|
||||||
self.build()
|
|
||||||
pkg = self._get_package_path(pkgname)
|
|
||||||
assert pkg is not None
|
|
||||||
return pkg
|
|
||||||
|
|
||||||
def _get_package_path(self, pkgname):
|
|
||||||
major, minor = py.std.sys.version_info[:2]
|
|
||||||
#assert major >=2 and minor in (3,4,5)
|
|
||||||
suffix = "%s.%s" %(major, minor)
|
|
||||||
location = self.location
|
|
||||||
for base in [location.join('build', 'lib'),
|
|
||||||
location.join('build', 'lib'+ self.plat_specifier)]:
|
|
||||||
if base.check(dir=1):
|
|
||||||
for pkg in base.visit(lambda x: x.check(dir=1)):
|
|
||||||
if pkg.basename == pkgname:
|
|
||||||
#
|
|
||||||
if pkg.dirpath().basename == 'lib'+ self.plat_specifier or \
|
|
||||||
pkg.dirpath().basename == 'lib':
|
|
||||||
return pkg
|
|
||||||
|
|
||||||
def setpkg(finalpkgname, distdir):
|
|
||||||
assert distdir.check(dir=1)
|
|
||||||
dist = DistPython(distdir)
|
|
||||||
pkg = dist.get_package_path(finalpkgname)
|
|
||||||
assert pkg.check(dir=1)
|
|
||||||
sys.path.insert(0, str(pkg.dirpath()))
|
|
||||||
try:
|
|
||||||
modname = pkg.purebasename
|
|
||||||
if modname in sys.modules:
|
|
||||||
log.debug("removing from sys.modules:", modname)
|
|
||||||
del sys.modules[modname]
|
|
||||||
sys.modules[modname] = mod = __import__(modname)
|
|
||||||
finally:
|
|
||||||
sys.path[0] # XXX
|
|
||||||
log.info("module is at", mod.__file__)
|
|
||||||
return mod
|
|
||||||
|
|
|
@ -1,79 +0,0 @@
|
||||||
|
|
||||||
import py
|
|
||||||
import errno
|
|
||||||
|
|
||||||
class Error(EnvironmentError):
|
|
||||||
__module__ = 'py.error'
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "%s.%s %r: %s " %(self.__class__.__module__,
|
|
||||||
self.__class__.__name__,
|
|
||||||
self.__class__.__doc__,
|
|
||||||
" ".join(map(str, self.args)),
|
|
||||||
#repr(self.args)
|
|
||||||
)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return "[%s]: %s" %(self.__class__.__doc__,
|
|
||||||
" ".join(map(str, self.args)),
|
|
||||||
)
|
|
||||||
|
|
||||||
_winerrnomap = {
|
|
||||||
2: errno.ENOENT,
|
|
||||||
3: errno.ENOENT,
|
|
||||||
17: errno.EEXIST,
|
|
||||||
22: errno.ENOTDIR,
|
|
||||||
267: errno.ENOTDIR,
|
|
||||||
5: errno.EACCES, # anything better?
|
|
||||||
}
|
|
||||||
# note: 'py.std' may not be imported yet at all, because
|
|
||||||
# the 'error' module in this file is imported very early.
|
|
||||||
# This is dependent on dict order.
|
|
||||||
|
|
||||||
ModuleType = type(py)
|
|
||||||
|
|
||||||
class py_error(ModuleType):
|
|
||||||
""" py.error lazily provides higher level Exception classes
|
|
||||||
for each possible POSIX errno (as defined per
|
|
||||||
the 'errno' module. All such Exceptions derive
|
|
||||||
from py.error.Error, which itself is a subclass
|
|
||||||
of EnvironmentError.
|
|
||||||
"""
|
|
||||||
Error = Error
|
|
||||||
|
|
||||||
def _getwinerrnoclass(cls, eno):
|
|
||||||
return cls._geterrnoclass(_winerrnomap[eno])
|
|
||||||
_getwinerrnoclass = classmethod(_getwinerrnoclass)
|
|
||||||
|
|
||||||
def _geterrnoclass(eno, _errno2class = {}):
|
|
||||||
try:
|
|
||||||
return _errno2class[eno]
|
|
||||||
except KeyError:
|
|
||||||
clsname = py.std.errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
|
|
||||||
cls = type(Error)(clsname, (Error,),
|
|
||||||
{'__module__':'py.error',
|
|
||||||
'__doc__': py.std.os.strerror(eno)})
|
|
||||||
_errno2class[eno] = cls
|
|
||||||
return cls
|
|
||||||
_geterrnoclass = staticmethod(_geterrnoclass)
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
eno = getattr(py.std.errno, name)
|
|
||||||
cls = self._geterrnoclass(eno)
|
|
||||||
setattr(self, name, cls)
|
|
||||||
return cls
|
|
||||||
|
|
||||||
def getdict(self, done=[]):
|
|
||||||
try:
|
|
||||||
return done[0]
|
|
||||||
except IndexError:
|
|
||||||
for name in py.std.errno.errorcode.values():
|
|
||||||
hasattr(self, name) # force attribute to be loaded, ignore errors
|
|
||||||
dictdescr = ModuleType.__dict__['__dict__']
|
|
||||||
done.append(dictdescr.__get__(self))
|
|
||||||
return done[0]
|
|
||||||
|
|
||||||
__dict__ = property(getdict)
|
|
||||||
del getdict
|
|
||||||
|
|
||||||
error = py_error('py.error', py_error.__doc__)
|
|
|
@ -1,76 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import py
|
|
||||||
import inspect
|
|
||||||
import types
|
|
||||||
|
|
||||||
def report_strange_docstring(name, obj):
|
|
||||||
if obj.__doc__ is None:
|
|
||||||
print "%s misses a docstring" % (name, )
|
|
||||||
elif obj.__doc__ == "":
|
|
||||||
print "%s has an empty" % (name, )
|
|
||||||
elif "XXX" in obj.__doc__:
|
|
||||||
print "%s has an 'XXX' in its docstring" % (name, )
|
|
||||||
|
|
||||||
def find_code(method):
|
|
||||||
return getattr(getattr(method, "im_func", None), "func_code", None)
|
|
||||||
|
|
||||||
def report_different_parameter_names(name, cls):
|
|
||||||
bases = cls.__mro__
|
|
||||||
for base in bases:
|
|
||||||
for attr in dir(base):
|
|
||||||
meth1 = getattr(base, attr)
|
|
||||||
code1 = find_code(meth1)
|
|
||||||
if code1 is None:
|
|
||||||
continue
|
|
||||||
if not callable(meth1):
|
|
||||||
continue
|
|
||||||
if not hasattr(cls, attr):
|
|
||||||
continue
|
|
||||||
meth2 = getattr(cls, attr)
|
|
||||||
code2 = find_code(meth2)
|
|
||||||
if not callable(meth2):
|
|
||||||
continue
|
|
||||||
if code2 is None:
|
|
||||||
continue
|
|
||||||
args1 = inspect.getargs(code1)[0]
|
|
||||||
args2 = inspect.getargs(code2)[0]
|
|
||||||
for a1, a2 in zip(args1, args2):
|
|
||||||
if a1 != a2:
|
|
||||||
print "%s.%s have different argument names %s, %s than the version in %s" % (name, attr, a1, a2, base)
|
|
||||||
|
|
||||||
|
|
||||||
def find_all_exported():
|
|
||||||
stack = [(name, getattr(py, name)) for name in dir(py)[::-1]
|
|
||||||
if not name.startswith("_") and name != "compat"]
|
|
||||||
seen = {}
|
|
||||||
exported = []
|
|
||||||
while stack:
|
|
||||||
name, obj = stack.pop()
|
|
||||||
if id(obj) in seen:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
seen[id(obj)] = True
|
|
||||||
exported.append((name, obj))
|
|
||||||
if isinstance(obj, type) or isinstance(obj, type(py)):
|
|
||||||
stack.extend([("%s.%s" % (name, s), getattr(obj, s)) for s in dir(obj)
|
|
||||||
if len(s) <= 1 or not (s[0] == '_' and s[1] != '_')])
|
|
||||||
return exported
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
all_exported = find_all_exported()
|
|
||||||
print "strange docstrings"
|
|
||||||
print "=================="
|
|
||||||
print
|
|
||||||
for name, obj in all_exported:
|
|
||||||
if callable(obj):
|
|
||||||
report_strange_docstring(name, obj)
|
|
||||||
print "\n\ndifferent parameters"
|
|
||||||
print "===================="
|
|
||||||
print
|
|
||||||
for name, obj in all_exported:
|
|
||||||
if isinstance(obj, type):
|
|
||||||
report_different_parameter_names(name, obj)
|
|
||||||
|
|
|
@ -1,10 +0,0 @@
|
||||||
|
|
||||||
import py
|
|
||||||
import os, sys
|
|
||||||
|
|
||||||
def killproc(pid):
|
|
||||||
if sys.platform == "win32":
|
|
||||||
py.process.cmdexec("taskkill /F /PID %d" %(pid,))
|
|
||||||
else:
|
|
||||||
os.kill(pid, 15)
|
|
||||||
|
|
|
@ -1,73 +0,0 @@
|
||||||
import py
|
|
||||||
import sys, os, traceback
|
|
||||||
import re
|
|
||||||
|
|
||||||
if hasattr(sys.stdout, 'fileno') and os.isatty(sys.stdout.fileno()):
|
|
||||||
def log(msg):
|
|
||||||
print msg
|
|
||||||
else:
|
|
||||||
def log(msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def convert_rest_html(source, source_path, stylesheet=None, encoding='latin1'):
|
|
||||||
from py.__.rest import directive
|
|
||||||
""" return html latin1-encoded document for the given input.
|
|
||||||
source a ReST-string
|
|
||||||
sourcepath where to look for includes (basically)
|
|
||||||
stylesheet path (to be used if any)
|
|
||||||
"""
|
|
||||||
from docutils.core import publish_string
|
|
||||||
directive.set_backend_and_register_directives("html")
|
|
||||||
kwargs = {
|
|
||||||
'stylesheet' : stylesheet,
|
|
||||||
'stylesheet_path': None,
|
|
||||||
'traceback' : 1,
|
|
||||||
'embed_stylesheet': 0,
|
|
||||||
'output_encoding' : encoding,
|
|
||||||
#'halt' : 0, # 'info',
|
|
||||||
'halt_level' : 2,
|
|
||||||
}
|
|
||||||
# docutils uses os.getcwd() :-(
|
|
||||||
source_path = os.path.abspath(str(source_path))
|
|
||||||
prevdir = os.getcwd()
|
|
||||||
try:
|
|
||||||
os.chdir(os.path.dirname(source_path))
|
|
||||||
return publish_string(source, source_path, writer_name='html',
|
|
||||||
settings_overrides=kwargs)
|
|
||||||
finally:
|
|
||||||
os.chdir(prevdir)
|
|
||||||
|
|
||||||
def process(txtpath, encoding='latin1'):
|
|
||||||
""" process a textfile """
|
|
||||||
log("processing %s" % txtpath)
|
|
||||||
assert txtpath.check(ext='.txt')
|
|
||||||
if isinstance(txtpath, py.path.svnwc):
|
|
||||||
txtpath = txtpath.localpath
|
|
||||||
htmlpath = txtpath.new(ext='.html')
|
|
||||||
#svninfopath = txtpath.localpath.new(ext='.svninfo')
|
|
||||||
|
|
||||||
style = txtpath.dirpath('style.css')
|
|
||||||
if style.check():
|
|
||||||
stylesheet = style.basename
|
|
||||||
else:
|
|
||||||
stylesheet = None
|
|
||||||
content = unicode(txtpath.read(), encoding)
|
|
||||||
doc = convert_rest_html(content, txtpath, stylesheet=stylesheet, encoding=encoding)
|
|
||||||
htmlpath.write(doc)
|
|
||||||
#log("wrote %r" % htmlpath)
|
|
||||||
#if txtpath.check(svnwc=1, versioned=1):
|
|
||||||
# info = txtpath.info()
|
|
||||||
# svninfopath.dump(info)
|
|
||||||
|
|
||||||
rex1 = re.compile(ur'.*<body>(.*)</body>.*', re.MULTILINE | re.DOTALL)
|
|
||||||
rex2 = re.compile(ur'.*<div class="document">(.*)</div>.*', re.MULTILINE | re.DOTALL)
|
|
||||||
|
|
||||||
def strip_html_header(string, encoding='utf8'):
|
|
||||||
""" return the content of the body-tag """
|
|
||||||
uni = unicode(string, encoding)
|
|
||||||
for rex in rex1,rex2:
|
|
||||||
match = rex.search(uni)
|
|
||||||
if not match:
|
|
||||||
break
|
|
||||||
uni = match.group(1)
|
|
||||||
return uni
|
|
|
@ -1,19 +0,0 @@
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
class Std(object):
|
|
||||||
""" makes all standard python modules available as a lazily
|
|
||||||
computed attribute.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.__dict__ = sys.modules
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
try:
|
|
||||||
m = __import__(name)
|
|
||||||
except ImportError:
|
|
||||||
raise AttributeError("py.std: could not import %s" % name)
|
|
||||||
return m
|
|
||||||
|
|
||||||
std = Std()
|
|
|
@ -1,34 +0,0 @@
|
||||||
|
|
||||||
import py
|
|
||||||
|
|
||||||
class ChangeItem:
|
|
||||||
def __init__(self, repo, revision, line):
|
|
||||||
self.repo = py.path.local(repo)
|
|
||||||
self.revision = int(revision)
|
|
||||||
self.action = action = line[:4]
|
|
||||||
self.path = line[4:].strip()
|
|
||||||
self.added = action[0] == "A"
|
|
||||||
self.modified = action[0] == "M"
|
|
||||||
self.propchanged = action[1] == "U"
|
|
||||||
self.deleted = action[0] == "D"
|
|
||||||
|
|
||||||
def svnurl(self):
|
|
||||||
return py.path.svnurl("file://%s/%s" %(self.repo, self.path), self.revision)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<ChangeItem %r>" %(self.action + self.path)
|
|
||||||
|
|
||||||
def changed(repo, revision):
|
|
||||||
out = py.process.cmdexec("svnlook changed -r %s %s" %(revision, repo))
|
|
||||||
l = []
|
|
||||||
for line in out.strip().split('\n'):
|
|
||||||
l.append(ChangeItem(repo, revision, line))
|
|
||||||
return l
|
|
||||||
|
|
||||||
def author(repo, revision):
|
|
||||||
out = py.process.cmdexec("svnlook author -r %s %s" %(revision, repo))
|
|
||||||
return out.strip()
|
|
||||||
|
|
||||||
def youngest(repo):
|
|
||||||
out = py.process.cmdexec("svnlook youngest %s" %(repo,))
|
|
||||||
return int(out)
|
|
|
@ -1,34 +0,0 @@
|
||||||
import sys, os
|
|
||||||
|
|
||||||
def get_terminal_width():
|
|
||||||
try:
|
|
||||||
import termios,fcntl,struct
|
|
||||||
call = fcntl.ioctl(0,termios.TIOCGWINSZ,"\000"*8)
|
|
||||||
height,width = struct.unpack( "hhhh", call ) [:2]
|
|
||||||
terminal_width = width
|
|
||||||
except (SystemExit, KeyboardInterrupt), e:
|
|
||||||
raise
|
|
||||||
except:
|
|
||||||
# FALLBACK
|
|
||||||
terminal_width = int(os.environ.get('COLUMNS', 80))-1
|
|
||||||
return terminal_width
|
|
||||||
|
|
||||||
terminal_width = get_terminal_width()
|
|
||||||
|
|
||||||
def ansi_print(text, esc, file=None, newline=True, flush=False):
|
|
||||||
if file is None:
|
|
||||||
file = sys.stderr
|
|
||||||
text = text.rstrip()
|
|
||||||
if esc and sys.platform != "win32" and file.isatty():
|
|
||||||
if not isinstance(esc, tuple):
|
|
||||||
esc = (esc,)
|
|
||||||
text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
|
|
||||||
text +
|
|
||||||
'\x1b[0m') # ANSI color code "reset"
|
|
||||||
if newline:
|
|
||||||
text += '\n'
|
|
||||||
file.write(text)
|
|
||||||
if flush:
|
|
||||||
file.flush()
|
|
||||||
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
#
|
|
|
@ -1,160 +0,0 @@
|
||||||
SVN-fs-dump-format-version: 2
|
|
||||||
|
|
||||||
UUID: 9cb23565-b10c-0410-b2e2-dde77f08022e
|
|
||||||
|
|
||||||
Revision-number: 0
|
|
||||||
Prop-content-length: 56
|
|
||||||
Content-length: 56
|
|
||||||
|
|
||||||
K 8
|
|
||||||
svn:date
|
|
||||||
V 27
|
|
||||||
2006-02-13T18:39:13.605561Z
|
|
||||||
PROPS-END
|
|
||||||
|
|
||||||
Revision-number: 1
|
|
||||||
Prop-content-length: 111
|
|
||||||
Content-length: 111
|
|
||||||
|
|
||||||
K 7
|
|
||||||
svn:log
|
|
||||||
V 13
|
|
||||||
A testdir
|
|
||||||
|
|
||||||
K 10
|
|
||||||
svn:author
|
|
||||||
V 3
|
|
||||||
hpk
|
|
||||||
K 8
|
|
||||||
svn:date
|
|
||||||
V 27
|
|
||||||
2006-02-13T18:39:27.723346Z
|
|
||||||
PROPS-END
|
|
||||||
|
|
||||||
Node-path: testdir
|
|
||||||
Node-kind: dir
|
|
||||||
Node-action: add
|
|
||||||
Prop-content-length: 10
|
|
||||||
Content-length: 10
|
|
||||||
|
|
||||||
PROPS-END
|
|
||||||
|
|
||||||
|
|
||||||
Revision-number: 2
|
|
||||||
Prop-content-length: 111
|
|
||||||
Content-length: 111
|
|
||||||
|
|
||||||
K 7
|
|
||||||
svn:log
|
|
||||||
V 13
|
|
||||||
_M testdir
|
|
||||||
|
|
||||||
K 10
|
|
||||||
svn:author
|
|
||||||
V 3
|
|
||||||
hpk
|
|
||||||
K 8
|
|
||||||
svn:date
|
|
||||||
V 27
|
|
||||||
2006-02-13T18:39:48.595729Z
|
|
||||||
PROPS-END
|
|
||||||
|
|
||||||
Node-path: testdir
|
|
||||||
Node-kind: dir
|
|
||||||
Node-action: change
|
|
||||||
Prop-content-length: 28
|
|
||||||
Content-length: 28
|
|
||||||
|
|
||||||
K 4
|
|
||||||
key1
|
|
||||||
V 4
|
|
||||||
val2
|
|
||||||
PROPS-END
|
|
||||||
|
|
||||||
|
|
||||||
Revision-number: 3
|
|
||||||
Prop-content-length: 113
|
|
||||||
Content-length: 113
|
|
||||||
|
|
||||||
K 7
|
|
||||||
svn:log
|
|
||||||
V 15
|
|
||||||
AM testdir2
|
|
||||||
|
|
||||||
|
|
||||||
K 10
|
|
||||||
svn:author
|
|
||||||
V 3
|
|
||||||
hpk
|
|
||||||
K 8
|
|
||||||
svn:date
|
|
||||||
V 27
|
|
||||||
2006-02-13T18:40:53.307540Z
|
|
||||||
PROPS-END
|
|
||||||
|
|
||||||
Node-path: testdir2
|
|
||||||
Node-kind: dir
|
|
||||||
Node-action: add
|
|
||||||
Prop-content-length: 28
|
|
||||||
Content-length: 28
|
|
||||||
|
|
||||||
K 4
|
|
||||||
key2
|
|
||||||
V 4
|
|
||||||
val2
|
|
||||||
PROPS-END
|
|
||||||
|
|
||||||
|
|
||||||
Revision-number: 4
|
|
||||||
Prop-content-length: 113
|
|
||||||
Content-length: 113
|
|
||||||
|
|
||||||
K 7
|
|
||||||
svn:log
|
|
||||||
V 15
|
|
||||||
D testdir2
|
|
||||||
|
|
||||||
|
|
||||||
K 10
|
|
||||||
svn:author
|
|
||||||
V 3
|
|
||||||
hpk
|
|
||||||
K 8
|
|
||||||
svn:date
|
|
||||||
V 27
|
|
||||||
2006-02-13T18:41:07.188024Z
|
|
||||||
PROPS-END
|
|
||||||
|
|
||||||
Node-path: testdir2
|
|
||||||
Node-action: delete
|
|
||||||
|
|
||||||
|
|
||||||
Revision-number: 5
|
|
||||||
Prop-content-length: 112
|
|
||||||
Content-length: 112
|
|
||||||
|
|
||||||
K 7
|
|
||||||
svn:log
|
|
||||||
V 14
|
|
||||||
_M testdir
|
|
||||||
|
|
||||||
|
|
||||||
K 10
|
|
||||||
svn:author
|
|
||||||
V 3
|
|
||||||
hpk
|
|
||||||
K 8
|
|
||||||
svn:date
|
|
||||||
V 27
|
|
||||||
2006-02-13T18:42:03.179177Z
|
|
||||||
PROPS-END
|
|
||||||
|
|
||||||
Node-path: testdir
|
|
||||||
Node-kind: dir
|
|
||||||
Node-action: change
|
|
||||||
Prop-content-length: 10
|
|
||||||
Content-length: 10
|
|
||||||
|
|
||||||
PROPS-END
|
|
||||||
|
|
||||||
|
|
|
@ -1,44 +0,0 @@
|
||||||
|
|
||||||
from py.test import raises
|
|
||||||
import py
|
|
||||||
import sys
|
|
||||||
import inspect
|
|
||||||
|
|
||||||
class TestAPI_V0_namespace_consistence:
|
|
||||||
def test_path_entrypoints(self):
|
|
||||||
assert inspect.ismodule(py.path)
|
|
||||||
assert_class('py.path', 'local')
|
|
||||||
assert_class('py.path', 'svnwc')
|
|
||||||
assert_class('py.path', 'svnurl')
|
|
||||||
|
|
||||||
def test_magic_entrypoints(self):
|
|
||||||
assert_function('py.magic', 'invoke')
|
|
||||||
assert_function('py.magic', 'revoke')
|
|
||||||
assert_function('py.magic', 'patch')
|
|
||||||
assert_function('py.magic', 'revoke')
|
|
||||||
|
|
||||||
def test_process_entrypoints(self):
|
|
||||||
assert_function('py.process', 'cmdexec')
|
|
||||||
|
|
||||||
def XXXtest_utest_entrypoints(self):
|
|
||||||
# XXX TOBECOMPLETED
|
|
||||||
assert_function('py.test', 'main')
|
|
||||||
#assert_module('std.utest', 'collect')
|
|
||||||
|
|
||||||
def assert_class(modpath, name):
|
|
||||||
mod = __import__(modpath, None, None, [name])
|
|
||||||
obj = getattr(mod, name)
|
|
||||||
assert inspect.isclass(obj)
|
|
||||||
|
|
||||||
# we don't test anymore that the exported classes have
|
|
||||||
# the exported module path and name on them.
|
|
||||||
#fullpath = modpath + '.' + name
|
|
||||||
#assert obj.__module__ == modpath
|
|
||||||
#if sys.version_info >= (2,3):
|
|
||||||
# assert obj.__name__ == name
|
|
||||||
|
|
||||||
def assert_function(modpath, name):
|
|
||||||
mod = __import__(modpath, None, None, [name])
|
|
||||||
obj = getattr(mod, name)
|
|
||||||
assert hasattr(obj, 'func_doc')
|
|
||||||
#assert obj.func_name == name
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue