* fix some syntax and 3k issues for py/path and py/process, tests only partially working
* have py.process.cmdexec return unicode/text (for now) * rename py.builtin.basestring to _basestring --HG-- branch : trunk
This commit is contained in:
parent
0f7a9e2da2
commit
b930565d56
|
@ -147,7 +147,7 @@ initpkg(__name__,
|
||||||
'builtin.print_' : ('./builtin/builtin31.py', 'print_'),
|
'builtin.print_' : ('./builtin/builtin31.py', 'print_'),
|
||||||
'builtin._reraise' : ('./builtin/builtin31.py', '_reraise'),
|
'builtin._reraise' : ('./builtin/builtin31.py', '_reraise'),
|
||||||
'builtin.exec_' : ('./builtin/builtin31.py', 'exec_'),
|
'builtin.exec_' : ('./builtin/builtin31.py', 'exec_'),
|
||||||
'builtin.basestring' : ('./builtin/builtin31.py', 'basestring'),
|
'builtin._basestring' : ('./builtin/builtin31.py', '_basestring'),
|
||||||
'builtin._totext' : ('./builtin/builtin31.py', '_totext'),
|
'builtin._totext' : ('./builtin/builtin31.py', '_totext'),
|
||||||
'builtin.builtins' : ('./builtin/builtin31.py', 'builtins'),
|
'builtin.builtins' : ('./builtin/builtin31.py', 'builtins'),
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,9 @@ import sys
|
||||||
if sys.version_info >= (3, 0):
|
if sys.version_info >= (3, 0):
|
||||||
exec ("print_ = print ; exec_=exec")
|
exec ("print_ = print ; exec_=exec")
|
||||||
import builtins
|
import builtins
|
||||||
basestring = str
|
|
||||||
|
# some backward compatibility helpers
|
||||||
|
_basestring = str
|
||||||
def _totext(obj, encoding):
|
def _totext(obj, encoding):
|
||||||
if isinstance(obj, str):
|
if isinstance(obj, str):
|
||||||
obj = obj.encode(encoding)
|
obj = obj.encode(encoding)
|
||||||
|
@ -11,7 +13,8 @@ if sys.version_info >= (3, 0):
|
||||||
|
|
||||||
else:
|
else:
|
||||||
_totext = unicode
|
_totext = unicode
|
||||||
basestring = basestring
|
_basestring = basestring
|
||||||
|
|
||||||
import __builtin__ as builtins
|
import __builtin__ as builtins
|
||||||
def print_(*args, **kwargs):
|
def print_(*args, **kwargs):
|
||||||
""" minimal backport of py3k print statement. """
|
""" minimal backport of py3k print statement. """
|
||||||
|
|
|
@ -25,7 +25,7 @@ class Source(object):
|
||||||
partlines = []
|
partlines = []
|
||||||
if isinstance(part, Source):
|
if isinstance(part, Source):
|
||||||
partlines = part.lines
|
partlines = part.lines
|
||||||
elif isinstance(part, py.builtin.basestring):
|
elif isinstance(part, py.builtin._basestring):
|
||||||
partlines = part.split('\n')
|
partlines = part.split('\n')
|
||||||
if rstrip:
|
if rstrip:
|
||||||
while partlines:
|
while partlines:
|
||||||
|
|
|
@ -133,7 +133,7 @@ class LocalPath(FSBase):
|
||||||
self = object.__new__(cls)
|
self = object.__new__(cls)
|
||||||
if not path:
|
if not path:
|
||||||
self.strpath = os.getcwd()
|
self.strpath = os.getcwd()
|
||||||
elif isinstance(path, str):
|
elif isinstance(path, py.builtin._basestring):
|
||||||
self.strpath = os.path.abspath(os.path.normpath(str(path)))
|
self.strpath = os.path.abspath(os.path.normpath(str(path)))
|
||||||
else:
|
else:
|
||||||
raise ValueError("can only pass None, Path instances "
|
raise ValueError("can only pass None, Path instances "
|
||||||
|
|
|
@ -56,7 +56,7 @@ class SvnCommandPath(svncommon.SvnPathBase):
|
||||||
# fixing the locale because we can't otherwise parse
|
# fixing the locale because we can't otherwise parse
|
||||||
string = " ".join(l)
|
string = " ".join(l)
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
print "execing", string
|
print("execing %s" % string)
|
||||||
out = self._svncmdexecauth(string)
|
out = self._svncmdexecauth(string)
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
@ -70,7 +70,8 @@ class SvnCommandPath(svncommon.SvnPathBase):
|
||||||
def _cmdexec(self, cmd):
|
def _cmdexec(self, cmd):
|
||||||
try:
|
try:
|
||||||
out = process.cmdexec(cmd)
|
out = process.cmdexec(cmd)
|
||||||
except py.process.cmdexec.Error, e:
|
except py.process.cmdexec.Error:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
if (e.err.find('File Exists') != -1 or
|
if (e.err.find('File Exists') != -1 or
|
||||||
e.err.find('File already exists') != -1):
|
e.err.find('File already exists') != -1):
|
||||||
raise py.error.EEXIST(self)
|
raise py.error.EEXIST(self)
|
||||||
|
@ -207,7 +208,7 @@ checkin message msg."""
|
||||||
def _proplist(self):
|
def _proplist(self):
|
||||||
res = self._svnwithrev('proplist')
|
res = self._svnwithrev('proplist')
|
||||||
lines = res.split('\n')
|
lines = res.split('\n')
|
||||||
lines = map(str.strip, lines[1:])
|
lines = [x.strip() for x in lines[1:]]
|
||||||
return svncommon.PropListDict(self, lines)
|
return svncommon.PropListDict(self, lines)
|
||||||
|
|
||||||
def _listdir_nameinfo(self):
|
def _listdir_nameinfo(self):
|
||||||
|
@ -215,7 +216,8 @@ checkin message msg."""
|
||||||
def builder():
|
def builder():
|
||||||
try:
|
try:
|
||||||
res = self._svnwithrev('ls', '-v')
|
res = self._svnwithrev('ls', '-v')
|
||||||
except process.cmdexec.Error, e:
|
except process.cmdexec.Error:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
if e.err.find('non-existent in that revision') != -1:
|
if e.err.find('non-existent in that revision') != -1:
|
||||||
raise py.error.ENOENT(self, e.err)
|
raise py.error.ENOENT(self, e.err)
|
||||||
elif e.err.find('File not found') != -1:
|
elif e.err.find('File not found') != -1:
|
||||||
|
|
|
@ -201,7 +201,7 @@ class SvnPathBase(common.PathBase):
|
||||||
elif name == 'ext':
|
elif name == 'ext':
|
||||||
res.append(ext)
|
res.append(ext)
|
||||||
else:
|
else:
|
||||||
raise NameError, "Don't know part %r" % name
|
raise NameError("Don't know part %r" % name)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
|
@ -251,7 +251,7 @@ class SvnPathBase(common.PathBase):
|
||||||
if fil or sort:
|
if fil or sort:
|
||||||
paths = filter(fil, paths)
|
paths = filter(fil, paths)
|
||||||
paths = isinstance(paths, list) and paths or list(paths)
|
paths = isinstance(paths, list) and paths or list(paths)
|
||||||
if callable(sort):
|
if hasattr(sort, '__call__'):
|
||||||
paths.sort(sort)
|
paths.sort(sort)
|
||||||
elif sort:
|
elif sort:
|
||||||
paths.sort()
|
paths.sort()
|
||||||
|
@ -345,7 +345,7 @@ class SvnPathBase(common.PathBase):
|
||||||
def parse_apr_time(timestr):
|
def parse_apr_time(timestr):
|
||||||
i = timestr.rfind('.')
|
i = timestr.rfind('.')
|
||||||
if i == -1:
|
if i == -1:
|
||||||
raise ValueError, "could not parse %s" % timestr
|
raise ValueError("could not parse %s" % timestr)
|
||||||
timestr = timestr[:i]
|
timestr = timestr[:i]
|
||||||
parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
|
parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
|
||||||
return time.mktime(parsedtime)
|
return time.mktime(parsedtime)
|
||||||
|
@ -469,7 +469,7 @@ class SvnWCCommandPath(common.PathBase):
|
||||||
if getattr(self, '_url', None) is None:
|
if getattr(self, '_url', None) is None:
|
||||||
info = self.info()
|
info = self.info()
|
||||||
self._url = info.url #SvnPath(info.url, info.rev)
|
self._url = info.url #SvnPath(info.url, info.rev)
|
||||||
assert isinstance(self._url, str)
|
assert isinstance(self._url, py.builtin._basestring)
|
||||||
return self._url
|
return self._url
|
||||||
|
|
||||||
url = property(_geturl, None, None, "url of this WC item")
|
url = property(_geturl, None, None, "url of this WC item")
|
||||||
|
@ -520,7 +520,8 @@ class SvnWCCommandPath(common.PathBase):
|
||||||
os.environ[key] = hold
|
os.environ[key] = hold
|
||||||
else:
|
else:
|
||||||
del os.environ[key]
|
del os.environ[key]
|
||||||
except py.process.cmdexec.Error, e:
|
except py.process.cmdexec.Error:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
strerr = e.err.lower()
|
strerr = e.err.lower()
|
||||||
if strerr.find('file not found') != -1:
|
if strerr.find('file not found') != -1:
|
||||||
raise py.error.ENOENT(self)
|
raise py.error.ENOENT(self)
|
||||||
|
@ -577,21 +578,18 @@ class SvnWCCommandPath(common.PathBase):
|
||||||
a file). if you specify a keyword argument 'directory=True'
|
a file). if you specify a keyword argument 'directory=True'
|
||||||
then the path is forced to be a directory path.
|
then the path is forced to be a directory path.
|
||||||
"""
|
"""
|
||||||
try:
|
p = self.join(*args)
|
||||||
p = self.join(*args)
|
if p.check():
|
||||||
if p.check():
|
if p.check(versioned=False):
|
||||||
if p.check(versioned=False):
|
p.add()
|
||||||
p.add()
|
return p
|
||||||
return p
|
if kwargs.get('dir', 0):
|
||||||
if kwargs.get('dir', 0):
|
return p._ensuredirs()
|
||||||
return p._ensuredirs()
|
parent = p.dirpath()
|
||||||
parent = p.dirpath()
|
parent._ensuredirs()
|
||||||
parent._ensuredirs()
|
p.write("")
|
||||||
p.write("")
|
p.add()
|
||||||
p.add()
|
return p
|
||||||
return p
|
|
||||||
except:
|
|
||||||
error_enhance(sys.exc_info())
|
|
||||||
|
|
||||||
def mkdir(self, *args):
|
def mkdir(self, *args):
|
||||||
""" create & return the directory joined with args. """
|
""" create & return the directory joined with args. """
|
||||||
|
@ -762,7 +760,7 @@ If rec is True, then return a dictionary mapping sub-paths to such mappings.
|
||||||
else:
|
else:
|
||||||
res = self._svn('proplist')
|
res = self._svn('proplist')
|
||||||
lines = res.split('\n')
|
lines = res.split('\n')
|
||||||
lines = map(str.strip, lines[1:])
|
lines = [x.strip() for x in lines[1:]]
|
||||||
return PropListDict(self, lines)
|
return PropListDict(self, lines)
|
||||||
|
|
||||||
def revert(self, rec=0):
|
def revert(self, rec=0):
|
||||||
|
@ -806,7 +804,8 @@ recursively. """
|
||||||
if not info:
|
if not info:
|
||||||
try:
|
try:
|
||||||
output = self._svn('info')
|
output = self._svn('info')
|
||||||
except py.process.cmdexec.Error, e:
|
except py.process.cmdexec.Error:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
if e.err.find('Path is not a working copy directory') != -1:
|
if e.err.find('Path is not a working copy directory') != -1:
|
||||||
raise py.error.ENOENT(self, e.err)
|
raise py.error.ENOENT(self, e.err)
|
||||||
elif e.err.find("is not under version control") != -1:
|
elif e.err.find("is not under version control") != -1:
|
||||||
|
@ -849,7 +848,7 @@ recursively. """
|
||||||
if fil or sort:
|
if fil or sort:
|
||||||
paths = filter(fil, paths)
|
paths = filter(fil, paths)
|
||||||
paths = isinstance(paths, list) and paths or list(paths)
|
paths = isinstance(paths, list) and paths or list(paths)
|
||||||
if callable(sort):
|
if hasattr(sort, '__call__'):
|
||||||
paths.sort(sort)
|
paths.sort(sort)
|
||||||
elif sort:
|
elif sort:
|
||||||
paths.sort()
|
paths.sort()
|
||||||
|
@ -871,7 +870,8 @@ recursively. """
|
||||||
s = self.svnwcpath.info()
|
s = self.svnwcpath.info()
|
||||||
except (py.error.ENOENT, py.error.EEXIST):
|
except (py.error.ENOENT, py.error.EEXIST):
|
||||||
return False
|
return False
|
||||||
except py.process.cmdexec.Error, e:
|
except py.process.cmdexec.Error:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
if e.err.find('is not a working copy')!=-1:
|
if e.err.find('is not a working copy')!=-1:
|
||||||
return False
|
return False
|
||||||
if e.err.lower().find('not a versioned resource') != -1:
|
if e.err.lower().find('not a versioned resource') != -1:
|
||||||
|
@ -1007,7 +1007,7 @@ class WCStatus:
|
||||||
# because of the way SVN presents external items
|
# because of the way SVN presents external items
|
||||||
continue
|
continue
|
||||||
# keep trying
|
# keep trying
|
||||||
raise ValueError, "could not parse line %r" % line
|
raise ValueError("could not parse line %r" % line)
|
||||||
else:
|
else:
|
||||||
rev, modrev, author, fn = m.groups()
|
rev, modrev, author, fn = m.groups()
|
||||||
wcpath = rootwcpath.join(fn, abs=1)
|
wcpath = rootwcpath.join(fn, abs=1)
|
||||||
|
@ -1065,7 +1065,8 @@ class XMLWCStatus(WCStatus):
|
||||||
minidom, ExpatError = importxml()
|
minidom, ExpatError = importxml()
|
||||||
try:
|
try:
|
||||||
doc = minidom.parseString(data)
|
doc = minidom.parseString(data)
|
||||||
except ExpatError, e:
|
except ExpatError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
raise ValueError(str(e))
|
raise ValueError(str(e))
|
||||||
urevels = doc.getElementsByTagName('against')
|
urevels = doc.getElementsByTagName('against')
|
||||||
if urevels:
|
if urevels:
|
||||||
|
@ -1179,7 +1180,7 @@ class InfoSvnWCCommand:
|
||||||
try:
|
try:
|
||||||
self.url = d['url']
|
self.url = d['url']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ValueError, "Not a versioned resource"
|
raise ValueError("Not a versioned resource")
|
||||||
#raise ValueError, "Not a versioned resource %r" % path
|
#raise ValueError, "Not a versioned resource %r" % path
|
||||||
self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
|
self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
|
||||||
self.rev = int(d['revision'])
|
self.rev = int(d['revision'])
|
||||||
|
@ -1201,7 +1202,7 @@ def parse_wcinfotime(timestr):
|
||||||
# example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
|
# example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
|
||||||
m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
|
m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
|
||||||
if not m:
|
if not m:
|
||||||
raise ValueError, "timestring %r does not match" % timestr
|
raise ValueError("timestring %r does not match" % timestr)
|
||||||
timestr, timezone = m.groups()
|
timestr, timezone = m.groups()
|
||||||
# do not handle timezone specially, return value should be UTC
|
# do not handle timezone specially, return value should be UTC
|
||||||
parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
|
parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
|
||||||
|
@ -1217,7 +1218,7 @@ def make_recursive_propdict(wcroot,
|
||||||
line = lines.pop(0)
|
line = lines.pop(0)
|
||||||
m = rex.match(line)
|
m = rex.match(line)
|
||||||
if not m:
|
if not m:
|
||||||
raise ValueError, "could not parse propget-line: %r" % line
|
raise ValueError("could not parse propget-line: %r" % line)
|
||||||
path = m.groups()[0]
|
path = m.groups()[0]
|
||||||
wcpath = wcroot.join(path, abs=1)
|
wcpath = wcroot.join(path, abs=1)
|
||||||
propnames = []
|
propnames = []
|
||||||
|
@ -1228,8 +1229,6 @@ def make_recursive_propdict(wcroot,
|
||||||
pdict[wcpath] = PropListDict(wcpath, propnames)
|
pdict[wcpath] = PropListDict(wcpath, propnames)
|
||||||
return pdict
|
return pdict
|
||||||
|
|
||||||
def error_enhance((cls, error, tb)):
|
|
||||||
raise cls, error, tb
|
|
||||||
|
|
||||||
def importxml(cache=[]):
|
def importxml(cache=[]):
|
||||||
if cache:
|
if cache:
|
||||||
|
@ -1244,18 +1243,18 @@ class LogEntry:
|
||||||
self.rev = int(logentry.getAttribute('revision'))
|
self.rev = int(logentry.getAttribute('revision'))
|
||||||
for lpart in filter(None, logentry.childNodes):
|
for lpart in filter(None, logentry.childNodes):
|
||||||
if lpart.nodeType == lpart.ELEMENT_NODE:
|
if lpart.nodeType == lpart.ELEMENT_NODE:
|
||||||
if lpart.nodeName == u'author':
|
if lpart.nodeName == 'author':
|
||||||
self.author = lpart.firstChild.nodeValue.encode('UTF-8')
|
self.author = lpart.firstChild.nodeValue.encode('UTF-8')
|
||||||
elif lpart.nodeName == u'msg':
|
elif lpart.nodeName == 'msg':
|
||||||
if lpart.firstChild:
|
if lpart.firstChild:
|
||||||
self.msg = lpart.firstChild.nodeValue.encode('UTF-8')
|
self.msg = lpart.firstChild.nodeValue.encode('UTF-8')
|
||||||
else:
|
else:
|
||||||
self.msg = ''
|
self.msg = ''
|
||||||
elif lpart.nodeName == u'date':
|
elif lpart.nodeName == 'date':
|
||||||
#2003-07-29T20:05:11.598637Z
|
#2003-07-29T20:05:11.598637Z
|
||||||
timestr = lpart.firstChild.nodeValue.encode('UTF-8')
|
timestr = lpart.firstChild.nodeValue.encode('UTF-8')
|
||||||
self.date = parse_apr_time(timestr)
|
self.date = parse_apr_time(timestr)
|
||||||
elif lpart.nodeName == u'paths':
|
elif lpart.nodeName == 'paths':
|
||||||
self.strpaths = []
|
self.strpaths = []
|
||||||
for ppart in filter(None, lpart.childNodes):
|
for ppart in filter(None, lpart.childNodes):
|
||||||
if ppart.nodeType == ppart.ELEMENT_NODE:
|
if ppart.nodeType == ppart.ELEMENT_NODE:
|
||||||
|
|
|
@ -3,6 +3,7 @@ import py
|
||||||
from py import path, test, process
|
from py import path, test, process
|
||||||
from py.__.path.testing.fscommon import CommonFSTests, setuptestfs
|
from py.__.path.testing.fscommon import CommonFSTests, setuptestfs
|
||||||
from py.__.path import svnwc as svncommon
|
from py.__.path import svnwc as svncommon
|
||||||
|
from py.builtin import print_
|
||||||
|
|
||||||
repodump = py.path.local(__file__).dirpath('repotest.dump')
|
repodump = py.path.local(__file__).dirpath('repotest.dump')
|
||||||
|
|
||||||
|
@ -26,15 +27,15 @@ def getrepowc(reponame='basetestrepo', wcname='wc'):
|
||||||
svncommon._escape_helper(repo))
|
svncommon._escape_helper(repo))
|
||||||
py.process.cmdexec('svnadmin load -q "%s" <"%s"' %
|
py.process.cmdexec('svnadmin load -q "%s" <"%s"' %
|
||||||
(svncommon._escape_helper(repo), repodump))
|
(svncommon._escape_helper(repo), repodump))
|
||||||
print "created svn repository", repo
|
print_("created svn repository", repo)
|
||||||
wcdir.ensure(dir=1)
|
wcdir.ensure(dir=1)
|
||||||
wc = py.path.svnwc(wcdir)
|
wc = py.path.svnwc(wcdir)
|
||||||
if py.std.sys.platform == 'win32':
|
if py.std.sys.platform == 'win32':
|
||||||
repo = '/' + str(repo).replace('\\', '/')
|
repo = '/' + str(repo).replace('\\', '/')
|
||||||
wc.checkout(url='file://%s' % repo)
|
wc.checkout(url='file://%s' % repo)
|
||||||
print "checked out new repo into", wc
|
print_("checked out new repo into", wc)
|
||||||
else:
|
else:
|
||||||
print "using repository at", repo
|
print_("using repository at", repo)
|
||||||
wc = py.path.svnwc(wcdir)
|
wc = py.path.svnwc(wcdir)
|
||||||
return ("file://%s" % repo, wc)
|
return ("file://%s" % repo, wc)
|
||||||
|
|
||||||
|
@ -49,12 +50,13 @@ def save_repowc():
|
||||||
wc.localpath.copy(savedwc.localpath)
|
wc.localpath.copy(savedwc.localpath)
|
||||||
return savedrepo, savedwc
|
return savedrepo, savedwc
|
||||||
|
|
||||||
def restore_repowc((savedrepo, savedwc)):
|
def restore_repowc(obj):
|
||||||
|
savedrepo, savedwc = obj
|
||||||
repo, wc = getrepowc()
|
repo, wc = getrepowc()
|
||||||
print repo
|
print (repo)
|
||||||
print repo[len("file://"):]
|
print (repo[len("file://"):])
|
||||||
repo = py.path.local(repo[len("file://"):])
|
repo = py.path.local(repo[len("file://"):])
|
||||||
print repo
|
print (repo)
|
||||||
assert repo.check()
|
assert repo.check()
|
||||||
# repositories have read only files on windows
|
# repositories have read only files on windows
|
||||||
#repo.chmod(0777, rec=True)
|
#repo.chmod(0777, rec=True)
|
||||||
|
@ -79,7 +81,7 @@ def make_test_repo(name="test-repository"):
|
||||||
class CommonSvnTests(CommonFSTests):
|
class CommonSvnTests(CommonFSTests):
|
||||||
|
|
||||||
def setup_method(self, meth):
|
def setup_method(self, meth):
|
||||||
bn = meth.func_name
|
bn = meth.__name__
|
||||||
for x in 'test_remove', 'test_move', 'test_status_deleted':
|
for x in 'test_remove', 'test_move', 'test_status_deleted':
|
||||||
if bn.startswith(x):
|
if bn.startswith(x):
|
||||||
self._savedrepowc = save_repowc()
|
self._savedrepowc = save_repowc()
|
||||||
|
|
|
@ -78,7 +78,6 @@ class TestSvnURLCommandPath(CommonCommandAndBindingTests):
|
||||||
bar = foo.join('bar').ensure(file=True)
|
bar = foo.join('bar').ensure(file=True)
|
||||||
bar.write('bar\n')
|
bar.write('bar\n')
|
||||||
rev1 = foo.commit('testing something')
|
rev1 = foo.commit('testing something')
|
||||||
print 'rev1:', rev1
|
|
||||||
baz = foo.join('baz').ensure(file=True)
|
baz = foo.join('baz').ensure(file=True)
|
||||||
baz.write('baz\n')
|
baz.write('baz\n')
|
||||||
rev2 = foo.commit('testing more')
|
rev2 = foo.commit('testing more')
|
||||||
|
|
|
@ -247,7 +247,7 @@ class TestWCSvnCommandPath(CommonSvnTests):
|
||||||
|
|
||||||
def test_status_wrong_xml(self):
|
def test_status_wrong_xml(self):
|
||||||
# testing for XML without author - this used to raise an exception
|
# testing for XML without author - this used to raise an exception
|
||||||
xml = u'<entry path="/home/jean/zope/venv/projectdb/parts/development-products/DataGridField">\n<wc-status item="incomplete" props="none" revision="784">\n</wc-status>\n</entry>'
|
xml = '<entry path="/home/jean/zope/venv/projectdb/parts/development-products/DataGridField">\n<wc-status item="incomplete" props="none" revision="784">\n</wc-status>\n</entry>'
|
||||||
st = XMLWCStatus.fromstring(xml, self.root)
|
st = XMLWCStatus.fromstring(xml, self.root)
|
||||||
assert len(st.incomplete) == 1
|
assert len(st.incomplete) == 1
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ def posix_exec_cmd(cmd):
|
||||||
the exception will provide an 'err' attribute containing
|
the exception will provide an 'err' attribute containing
|
||||||
the error-output from the command.
|
the error-output from the command.
|
||||||
"""
|
"""
|
||||||
__tracebackhide__ = True
|
#__tracebackhide__ = True
|
||||||
try:
|
try:
|
||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -31,7 +31,6 @@ def posix_exec_cmd(cmd):
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
|
|
||||||
#print "execing", cmd
|
|
||||||
child = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE,
|
child = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE,
|
||||||
close_fds=True)
|
close_fds=True)
|
||||||
stdin, stdout, stderr = child.stdin, child.stdout, child.stderr
|
stdin, stdout, stderr = child.stdin, child.stdout, child.stderr
|
||||||
|
@ -53,12 +52,13 @@ def posix_exec_cmd(cmd):
|
||||||
import select
|
import select
|
||||||
out, err = [], []
|
out, err = [], []
|
||||||
while 1:
|
while 1:
|
||||||
r_list = filter(lambda x: x and not x.closed, [stdout, stderr])
|
r_list = [x for x in [stdout, stderr] if x and not x.closed]
|
||||||
if not r_list:
|
if not r_list:
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
r_list = select.select(r_list, [], [])[0]
|
r_list = select.select(r_list, [], [])[0]
|
||||||
except (select.error, IOError), se:
|
except (select.error, IOError):
|
||||||
|
se = sys.exc_info()[1]
|
||||||
if se.args[0] == errno.EINTR:
|
if se.args[0] == errno.EINTR:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
|
@ -66,12 +66,14 @@ def posix_exec_cmd(cmd):
|
||||||
for r in r_list:
|
for r in r_list:
|
||||||
try:
|
try:
|
||||||
data = r.read() # XXX see XXX above
|
data = r.read() # XXX see XXX above
|
||||||
except IOError, io:
|
except IOError:
|
||||||
|
io = sys.exc_info()[1]
|
||||||
if io.args[0] == errno.EAGAIN:
|
if io.args[0] == errno.EAGAIN:
|
||||||
continue
|
continue
|
||||||
# Connection Lost
|
# Connection Lost
|
||||||
raise
|
raise
|
||||||
except OSError, ose:
|
except OSError:
|
||||||
|
ose = sys.exc_info()[1]
|
||||||
if ose.errno == errno.EPIPE:
|
if ose.errno == errno.EPIPE:
|
||||||
# Connection Lost
|
# Connection Lost
|
||||||
raise
|
raise
|
||||||
|
@ -88,15 +90,19 @@ def posix_exec_cmd(cmd):
|
||||||
err.append(data)
|
err.append(data)
|
||||||
pid, systemstatus = os.waitpid(child.pid, 0)
|
pid, systemstatus = os.waitpid(child.pid, 0)
|
||||||
if pid != child.pid:
|
if pid != child.pid:
|
||||||
raise ExecutionFailed, "child process disappeared during: "+ cmd
|
raise ExecutionFailed("child process disappeared during: "+ cmd)
|
||||||
if systemstatus:
|
if systemstatus:
|
||||||
if os.WIFSIGNALED(systemstatus):
|
if os.WIFSIGNALED(systemstatus):
|
||||||
status = os.WTERMSIG(systemstatus) + 128
|
status = os.WTERMSIG(systemstatus) + 128
|
||||||
else:
|
else:
|
||||||
status = os.WEXITSTATUS(systemstatus)
|
status = os.WEXITSTATUS(systemstatus)
|
||||||
raise ExecutionFailed(status, systemstatus, cmd,
|
raise ExecutionFailed(status, systemstatus, cmd,
|
||||||
''.join(out), ''.join(err))
|
joiner(out), joiner(err))
|
||||||
return "".join(out)
|
return joiner(out)
|
||||||
|
|
||||||
|
def joiner(out):
|
||||||
|
encoding = sys.getdefaultencoding()
|
||||||
|
return "".join([py.builtin._totext(x, encoding) for x in out])
|
||||||
|
|
||||||
#-----------------------------------------------------------
|
#-----------------------------------------------------------
|
||||||
# simple win32 external command execution
|
# simple win32 external command execution
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
import py
|
import py
|
||||||
from py.process import cmdexec
|
from py.process import cmdexec
|
||||||
|
|
||||||
|
def exvalue():
|
||||||
|
return py.std.sys.exc_info()[1]
|
||||||
|
|
||||||
class Test_exec_cmd:
|
class Test_exec_cmd:
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
out = cmdexec('echo hallo')
|
out = cmdexec('echo hallo')
|
||||||
|
@ -12,14 +15,16 @@ class Test_exec_cmd:
|
||||||
def test_simple_error_exact_status(self):
|
def test_simple_error_exact_status(self):
|
||||||
try:
|
try:
|
||||||
cmdexec('exit 1')
|
cmdexec('exit 1')
|
||||||
except cmdexec.Error, e:
|
except cmdexec.Error:
|
||||||
|
e = exvalue()
|
||||||
assert e.status == 1
|
assert e.status == 1
|
||||||
|
|
||||||
def test_err(self):
|
def test_err(self):
|
||||||
try:
|
try:
|
||||||
cmdexec('echoqweqwe123 hallo')
|
cmdexec('echoqweqwe123 hallo')
|
||||||
raise AssertionError, "command succeeded but shouldn't"
|
raise AssertionError("command succeeded but shouldn't")
|
||||||
except cmdexec.Error, e:
|
except cmdexec.Error:
|
||||||
|
e = exvalue()
|
||||||
assert hasattr(e, 'err')
|
assert hasattr(e, 'err')
|
||||||
assert hasattr(e, 'out')
|
assert hasattr(e, 'out')
|
||||||
assert e.err or e.out
|
assert e.err or e.out
|
||||||
|
|
|
@ -69,7 +69,7 @@ def test_forkedfunc_huge_data():
|
||||||
|
|
||||||
def test_box_seq():
|
def test_box_seq():
|
||||||
# we run many boxes with huge data, just one after another
|
# we run many boxes with huge data, just one after another
|
||||||
for i in xrange(50):
|
for i in range(50):
|
||||||
result = py.process.ForkedFunc(boxhuge).waitfinish()
|
result = py.process.ForkedFunc(boxhuge).waitfinish()
|
||||||
assert result.out
|
assert result.out
|
||||||
assert result.exitstatus == 0
|
assert result.exitstatus == 0
|
||||||
|
@ -79,8 +79,8 @@ def test_box_seq():
|
||||||
def test_box_in_a_box():
|
def test_box_in_a_box():
|
||||||
def boxfun():
|
def boxfun():
|
||||||
result = py.process.ForkedFunc(boxf2).waitfinish()
|
result = py.process.ForkedFunc(boxf2).waitfinish()
|
||||||
print result.out
|
print (result.out)
|
||||||
print >>sys.stderr, result.err
|
sys.stderr.write(result.err + "\n")
|
||||||
return result.retval
|
return result.retval
|
||||||
|
|
||||||
result = py.process.ForkedFunc(boxfun).waitfinish()
|
result = py.process.ForkedFunc(boxfun).waitfinish()
|
||||||
|
@ -114,25 +114,26 @@ def test_kill_func_forked():
|
||||||
#
|
#
|
||||||
|
|
||||||
def boxf1():
|
def boxf1():
|
||||||
print "some out"
|
sys.stdout.write("some out\n")
|
||||||
print >>sys.stderr, "some err"
|
sys.stderr.write("some err\n")
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
def boxf2():
|
def boxf2():
|
||||||
os.write(1, "someout")
|
os.write(1, "someout".encode('ascii'))
|
||||||
os.write(2, "someerr")
|
os.write(2, "someerr".encode('ascii'))
|
||||||
return 2
|
return 2
|
||||||
|
|
||||||
def boxseg():
|
def boxseg():
|
||||||
os.kill(os.getpid(), 11)
|
os.kill(os.getpid(), 11)
|
||||||
|
|
||||||
def boxhuge():
|
def boxhuge():
|
||||||
os.write(1, " " * 10000)
|
s = " ".encode('ascii')
|
||||||
os.write(2, " " * 10000)
|
os.write(1, s * 10000)
|
||||||
os.write(1, " " * 10000)
|
os.write(2, s * 10000)
|
||||||
|
os.write(1, s * 10000)
|
||||||
|
|
||||||
os.write(1, " " * 10000)
|
os.write(1, s * 10000)
|
||||||
os.write(2, " " * 10000)
|
os.write(2, s * 10000)
|
||||||
os.write(2, " " * 10000)
|
os.write(2, s * 10000)
|
||||||
os.write(1, " " * 10000)
|
os.write(1, s * 10000)
|
||||||
return 3
|
return 3
|
||||||
|
|
|
@ -305,7 +305,7 @@ class Generator(FunctionMixin, PyCollectorMixin, py.test.collect.Collector):
|
||||||
if not isinstance(obj, (tuple, list)):
|
if not isinstance(obj, (tuple, list)):
|
||||||
obj = (obj,)
|
obj = (obj,)
|
||||||
# explict naming
|
# explict naming
|
||||||
if isinstance(obj[0], basestring):
|
if isinstance(obj[0], py.builtin._basestring):
|
||||||
name = obj[0]
|
name = obj[0]
|
||||||
obj = obj[1:]
|
obj = obj[1:]
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Reference in New Issue