Skip to content
Snippets Groups Projects
Commit d6390c15 authored by Todd Gamblin's avatar Todd Gamblin Committed by GitHub
Browse files

Fix issues related to the switchover to pytest. (#2685)

- Remove stale reference to `import nose` from `bin/spack` script.
- Add `py` to externals (missing dependency for `pytest`)
parent 7ea10e76
No related branches found
No related tags found
No related merge requests found
Showing
with 3539 additions and 10 deletions
......@@ -46,14 +46,6 @@ sys.path.insert(0, SPACK_LIB_PATH)
SPACK_EXTERNAL_LIBS = os.path.join(SPACK_LIB_PATH, "external")
sys.path.insert(0, SPACK_EXTERNAL_LIBS)
import warnings
# Avoid warnings when nose is installed with the python exe being used to run
# spack. Note this must be done after Spack's external libs directory is added
# to sys.path.
with warnings.catch_warnings():
warnings.filterwarnings("ignore", ".*nose was already imported")
import nose
# Quick and dirty check to clean orphaned .pyc files left over from
# previous revisions. These files were present in earlier versions of
# Spack, were removed, but shadow system modules that Spack still
......
......@@ -35,12 +35,15 @@
jsonschema: An implementation of JSON Schema for Python.
pytest: Testing framework used by Spack.
ordereddict: We include our own version to be Python 2.6 compatible.
py: Needed by pytest. Library with cross-python path,
ini-parsing, io, code, and log facilities.
pyqver2: External script to query required python version of
python source code. Used for ensuring 2.6 compatibility.
pytest: Testing framework used by Spack.
yaml: Used for config files.
"""
Holger Krekel, holger at merlinux eu
Benjamin Peterson, benjamin at python org
Ronny Pfannschmidt, Ronny.Pfannschmidt at gmx de
Guido Wesdorp, johnny at johnnydebris net
Samuele Pedroni, pedronis at openend se
Carl Friedrich Bolz, cfbolz at gmx de
Armin Rigo, arigo at tunes org
Maciek Fijalkowski, fijal at genesilico pl
Brian Dorsey, briandorsey at gmail com
Floris Bruynooghe, flub at devork be
merlinux GmbH, Germany, office at merlinux eu
Contributors include::
Ross Lawley
Ralf Schmitt
Chris Lamb
Harald Armin Massa
Martijn Faassen
Ian Bicking
Jan Balster
Grig Gheorghiu
Bob Ippolito
Christian Tismer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
.. image:: https://img.shields.io/pypi/pyversions/pytest.svg
:target: https://pypi.org/project/py
.. image:: https://img.shields.io/travis/pytest-dev/py.svg
:target: https://travis-ci.org/pytest-dev/py
The py lib is a Python development support library featuring
the following tools and modules:
* ``py.path``: uniform local and svn path objects
* ``py.apipkg``: explicit API control and lazy-importing
* ``py.iniconfig``: easy parsing of .ini files
* ``py.code``: dynamic code generation and introspection
NOTE: prior to the 1.4 release this distribution used to
contain py.test which is now its own package, see http://pytest.org
For questions and more information please visit http://pylib.readthedocs.org
Bugs and issues: https://github.com/pytest-dev/py
Authors: Holger Krekel and others, 2004-2016
"""
py.test and pylib: rapid testing and development utils
this module uses apipkg.py for lazy-loading sub modules
and classes. The initpkg-dictionary below specifies
name->value mappings where value can be another namespace
dictionary or an import path.
(c) Holger Krekel and others, 2004-2014
"""
__version__ = '1.4.32'
from py import _apipkg
# so that py.error.* instances are picklable
import sys
sys.modules['py.error'] = _apipkg.AliasModule("py.error", "py._error", 'error')
_apipkg.initpkg(__name__, attr={'_apipkg': _apipkg}, exportdefs={
# access to all standard lib modules
'std': '._std:std',
# access to all posix errno's as classes
'error': '._error:error',
'_pydir' : '.__metainfo:pydir',
'version': 'py:__version__', # backward compatibility
# pytest-2.0 has a flat namespace, we use alias modules
# to keep old references compatible
'test' : 'pytest',
'test.collect' : 'pytest',
'test.cmdline' : 'pytest',
# hook into the top-level standard library
'process' : {
'__doc__' : '._process:__doc__',
'cmdexec' : '._process.cmdexec:cmdexec',
'kill' : '._process.killproc:kill',
'ForkedFunc' : '._process.forkedfunc:ForkedFunc',
},
'apipkg' : {
'initpkg' : '._apipkg:initpkg',
'ApiModule' : '._apipkg:ApiModule',
},
'iniconfig' : {
'IniConfig' : '._iniconfig:IniConfig',
'ParseError' : '._iniconfig:ParseError',
},
'path' : {
'__doc__' : '._path:__doc__',
'svnwc' : '._path.svnwc:SvnWCCommandPath',
'svnurl' : '._path.svnurl:SvnCommandPath',
'local' : '._path.local:LocalPath',
'SvnAuth' : '._path.svnwc:SvnAuth',
},
# python inspection/code-generation API
'code' : {
'__doc__' : '._code:__doc__',
'compile' : '._code.source:compile_',
'Source' : '._code.source:Source',
'Code' : '._code.code:Code',
'Frame' : '._code.code:Frame',
'ExceptionInfo' : '._code.code:ExceptionInfo',
'Traceback' : '._code.code:Traceback',
'getfslineno' : '._code.source:getfslineno',
'getrawcode' : '._code.code:getrawcode',
'patch_builtins' : '._code.code:patch_builtins',
'unpatch_builtins' : '._code.code:unpatch_builtins',
'_AssertionError' : '._code.assertion:AssertionError',
'_reinterpret_old' : '._code.assertion:reinterpret_old',
'_reinterpret' : '._code.assertion:reinterpret',
'_reprcompare' : '._code.assertion:_reprcompare',
'_format_explanation' : '._code.assertion:_format_explanation',
},
# backports and additions of builtins
'builtin' : {
'__doc__' : '._builtin:__doc__',
'enumerate' : '._builtin:enumerate',
'reversed' : '._builtin:reversed',
'sorted' : '._builtin:sorted',
'any' : '._builtin:any',
'all' : '._builtin:all',
'set' : '._builtin:set',
'frozenset' : '._builtin:frozenset',
'BaseException' : '._builtin:BaseException',
'GeneratorExit' : '._builtin:GeneratorExit',
'_sysex' : '._builtin:_sysex',
'print_' : '._builtin:print_',
'_reraise' : '._builtin:_reraise',
'_tryimport' : '._builtin:_tryimport',
'exec_' : '._builtin:exec_',
'_basestring' : '._builtin:_basestring',
'_totext' : '._builtin:_totext',
'_isbytes' : '._builtin:_isbytes',
'_istext' : '._builtin:_istext',
'_getimself' : '._builtin:_getimself',
'_getfuncdict' : '._builtin:_getfuncdict',
'_getcode' : '._builtin:_getcode',
'builtins' : '._builtin:builtins',
'execfile' : '._builtin:execfile',
'callable' : '._builtin:callable',
'bytes' : '._builtin:bytes',
'text' : '._builtin:text',
},
# input-output helping
'io' : {
'__doc__' : '._io:__doc__',
'dupfile' : '._io.capture:dupfile',
'TextIO' : '._io.capture:TextIO',
'BytesIO' : '._io.capture:BytesIO',
'FDCapture' : '._io.capture:FDCapture',
'StdCapture' : '._io.capture:StdCapture',
'StdCaptureFD' : '._io.capture:StdCaptureFD',
'TerminalWriter' : '._io.terminalwriter:TerminalWriter',
'ansi_print' : '._io.terminalwriter:ansi_print',
'get_terminal_width' : '._io.terminalwriter:get_terminal_width',
'saferepr' : '._io.saferepr:saferepr',
},
# small and mean xml/html generation
'xml' : {
'__doc__' : '._xmlgen:__doc__',
'html' : '._xmlgen:html',
'Tag' : '._xmlgen:Tag',
'raw' : '._xmlgen:raw',
'Namespace' : '._xmlgen:Namespace',
'escape' : '._xmlgen:escape',
},
'log' : {
# logging API ('producers' and 'consumers' connected via keywords)
'__doc__' : '._log:__doc__',
'_apiwarn' : '._log.warning:_apiwarn',
'Producer' : '._log.log:Producer',
'setconsumer' : '._log.log:setconsumer',
'_setstate' : '._log.log:setstate',
'_getstate' : '._log.log:getstate',
'Path' : '._log.log:Path',
'STDOUT' : '._log.log:STDOUT',
'STDERR' : '._log.log:STDERR',
'Syslog' : '._log.log:Syslog',
},
})
import py
pydir = py.path.local(py.__file__).dirpath()
"""
apipkg: control the exported namespace of a python package.
see http://pypi.python.org/pypi/apipkg
(c) holger krekel, 2009 - MIT license
"""
import os
import sys
from types import ModuleType
__version__ = '1.3.dev'
def _py_abspath(path):
"""
special version of abspath
that will leave paths from jython jars alone
"""
if path.startswith('__pyclasspath__'):
return path
else:
return os.path.abspath(path)
def initpkg(pkgname, exportdefs, attr=dict()):
""" initialize given package from the export definitions. """
oldmod = sys.modules.get(pkgname)
d = {}
f = getattr(oldmod, '__file__', None)
if f:
f = _py_abspath(f)
d['__file__'] = f
if hasattr(oldmod, '__version__'):
d['__version__'] = oldmod.__version__
if hasattr(oldmod, '__loader__'):
d['__loader__'] = oldmod.__loader__
if hasattr(oldmod, '__path__'):
d['__path__'] = [_py_abspath(p) for p in oldmod.__path__]
if '__doc__' not in exportdefs and getattr(oldmod, '__doc__', None):
d['__doc__'] = oldmod.__doc__
d.update(attr)
if hasattr(oldmod, "__dict__"):
oldmod.__dict__.update(d)
mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d)
sys.modules[pkgname] = mod
def importobj(modpath, attrname):
module = __import__(modpath, None, None, ['__doc__'])
if not attrname:
return module
retval = module
names = attrname.split(".")
for x in names:
retval = getattr(retval, x)
return retval
class ApiModule(ModuleType):
def __docget(self):
try:
return self.__doc
except AttributeError:
if '__doc__' in self.__map__:
return self.__makeattr('__doc__')
def __docset(self, value):
self.__doc = value
__doc__ = property(__docget, __docset)
def __init__(self, name, importspec, implprefix=None, attr=None):
self.__name__ = name
self.__all__ = [x for x in importspec if x != '__onfirstaccess__']
self.__map__ = {}
self.__implprefix__ = implprefix or name
if attr:
for name, val in attr.items():
# print "setting", self.__name__, name, val
setattr(self, name, val)
for name, importspec in importspec.items():
if isinstance(importspec, dict):
subname = '%s.%s' % (self.__name__, name)
apimod = ApiModule(subname, importspec, implprefix)
sys.modules[subname] = apimod
setattr(self, name, apimod)
else:
parts = importspec.split(':')
modpath = parts.pop(0)
attrname = parts and parts[0] or ""
if modpath[0] == '.':
modpath = implprefix + modpath
if not attrname:
subname = '%s.%s' % (self.__name__, name)
apimod = AliasModule(subname, modpath)
sys.modules[subname] = apimod
if '.' not in name:
setattr(self, name, apimod)
else:
self.__map__[name] = (modpath, attrname)
def __repr__(self):
l = []
if hasattr(self, '__version__'):
l.append("version=" + repr(self.__version__))
if hasattr(self, '__file__'):
l.append('from ' + repr(self.__file__))
if l:
return '<ApiModule %r %s>' % (self.__name__, " ".join(l))
return '<ApiModule %r>' % (self.__name__,)
def __makeattr(self, name):
"""lazily compute value for name or raise AttributeError if unknown."""
# print "makeattr", self.__name__, name
target = None
if '__onfirstaccess__' in self.__map__:
target = self.__map__.pop('__onfirstaccess__')
importobj(*target)()
try:
modpath, attrname = self.__map__[name]
except KeyError:
if target is not None and name != '__onfirstaccess__':
# retry, onfirstaccess might have set attrs
return getattr(self, name)
raise AttributeError(name)
else:
result = importobj(modpath, attrname)
setattr(self, name, result)
try:
del self.__map__[name]
except KeyError:
pass # in a recursive-import situation a double-del can happen
return result
__getattr__ = __makeattr
def __dict__(self):
# force all the content of the module to be loaded when __dict__ is read
dictdescr = ModuleType.__dict__['__dict__']
dict = dictdescr.__get__(self)
if dict is not None:
hasattr(self, 'some')
for name in self.__all__:
try:
self.__makeattr(name)
except AttributeError:
pass
return dict
__dict__ = property(__dict__)
def AliasModule(modname, modpath, attrname=None):
mod = []
def getmod():
if not mod:
x = importobj(modpath, None)
if attrname is not None:
x = getattr(x, attrname)
mod.append(x)
return mod[0]
class AliasModule(ModuleType):
def __repr__(self):
x = modpath
if attrname:
x += "." + attrname
return '<AliasModule %r for %r>' % (modname, x)
def __getattribute__(self, name):
try:
return getattr(getmod(), name)
except ImportError:
return None
def __setattr__(self, name, value):
setattr(getmod(), name, value)
def __delattr__(self, name):
delattr(getmod(), name)
return AliasModule(str(modname))
import sys
try:
reversed = reversed
except NameError:
def reversed(sequence):
"""reversed(sequence) -> reverse iterator over values of the sequence
Return a reverse iterator
"""
if hasattr(sequence, '__reversed__'):
return sequence.__reversed__()
if not hasattr(sequence, '__getitem__'):
raise TypeError("argument to reversed() must be a sequence")
return reversed_iterator(sequence)
class reversed_iterator(object):
def __init__(self, seq):
self.seq = seq
self.remaining = len(seq)
def __iter__(self):
return self
def next(self):
i = self.remaining
if i > 0:
i -= 1
item = self.seq[i]
self.remaining = i
return item
raise StopIteration
def __length_hint__(self):
return self.remaining
try:
any = any
except NameError:
def any(iterable):
for x in iterable:
if x:
return True
return False
try:
all = all
except NameError:
def all(iterable):
for x in iterable:
if not x:
return False
return True
try:
sorted = sorted
except NameError:
builtin_cmp = cmp # need to use cmp as keyword arg
def sorted(iterable, cmp=None, key=None, reverse=0):
use_cmp = None
if key is not None:
if cmp is None:
def use_cmp(x, y):
return builtin_cmp(x[0], y[0])
else:
def use_cmp(x, y):
return cmp(x[0], y[0])
l = [(key(element), element) for element in iterable]
else:
if cmp is not None:
use_cmp = cmp
l = list(iterable)
if use_cmp is not None:
l.sort(use_cmp)
else:
l.sort()
if reverse:
l.reverse()
if key is not None:
return [element for (_, element) in l]
return l
try:
set, frozenset = set, frozenset
except NameError:
from sets import set, frozenset
# pass through
enumerate = enumerate
try:
BaseException = BaseException
except NameError:
BaseException = Exception
try:
GeneratorExit = GeneratorExit
except NameError:
class GeneratorExit(Exception):
""" This exception is never raised, it is there to make it possible to
write code compatible with CPython 2.5 even in lower CPython
versions."""
pass
GeneratorExit.__module__ = 'exceptions'
_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit)
try:
callable = callable
except NameError:
def callable(obj):
return hasattr(obj, "__call__")
if sys.version_info >= (3, 0):
exec ("print_ = print ; exec_=exec")
import builtins
# some backward compatibility helpers
_basestring = str
def _totext(obj, encoding=None, errors=None):
if isinstance(obj, bytes):
if errors is None:
obj = obj.decode(encoding)
else:
obj = obj.decode(encoding, errors)
elif not isinstance(obj, str):
obj = str(obj)
return obj
def _isbytes(x):
return isinstance(x, bytes)
def _istext(x):
return isinstance(x, str)
text = str
bytes = bytes
def _getimself(function):
return getattr(function, '__self__', None)
def _getfuncdict(function):
return getattr(function, "__dict__", None)
def _getcode(function):
return getattr(function, "__code__", None)
def execfile(fn, globs=None, locs=None):
if globs is None:
back = sys._getframe(1)
globs = back.f_globals
locs = back.f_locals
del back
elif locs is None:
locs = globs
fp = open(fn, "r")
try:
source = fp.read()
finally:
fp.close()
co = compile(source, fn, "exec", dont_inherit=True)
exec_(co, globs, locs)
else:
import __builtin__ as builtins
_totext = unicode
_basestring = basestring
text = unicode
bytes = str
execfile = execfile
callable = callable
def _isbytes(x):
return isinstance(x, str)
def _istext(x):
return isinstance(x, unicode)
def _getimself(function):
return getattr(function, 'im_self', None)
def _getfuncdict(function):
return getattr(function, "__dict__", None)
def _getcode(function):
try:
return getattr(function, "__code__")
except AttributeError:
return getattr(function, "func_code", None)
def print_(*args, **kwargs):
""" minimal backport of py3k print statement. """
sep = ' '
if 'sep' in kwargs:
sep = kwargs.pop('sep')
end = '\n'
if 'end' in kwargs:
end = kwargs.pop('end')
file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
if kwargs:
args = ", ".join([str(x) for x in kwargs])
raise TypeError("invalid keyword arguments: %s" % args)
at_start = True
for x in args:
if not at_start:
file.write(sep)
file.write(str(x))
at_start = False
file.write(end)
def exec_(obj, globals=None, locals=None):
""" minimal backport of py3k exec statement. """
__tracebackhide__ = True
if globals is None:
frame = sys._getframe(1)
globals = frame.f_globals
if locals is None:
locals = frame.f_locals
elif locals is None:
locals = globals
exec2(obj, globals, locals)
if sys.version_info >= (3, 0):
def _reraise(cls, val, tb):
__tracebackhide__ = True
assert hasattr(val, '__traceback__')
raise cls.with_traceback(val, tb)
else:
exec ("""
def _reraise(cls, val, tb):
__tracebackhide__ = True
raise cls, val, tb
def exec2(obj, globals, locals):
__tracebackhide__ = True
exec obj in globals, locals
""")
def _tryimport(*names):
""" return the first successfully imported module. """
assert names
for name in names:
try:
__import__(name)
except ImportError:
excinfo = sys.exc_info()
else:
return sys.modules[name]
_reraise(*excinfo)
""" python inspection/code generation API """
"""
Find intermediate evalutation results in assert statements through builtin AST.
This should replace _assertionold.py eventually.
"""
import sys
import ast
import py
from py._code.assertion import _format_explanation, BuiltinAssertionError
if sys.platform.startswith("java") and sys.version_info < (2, 5, 2):
# See http://bugs.jython.org/issue1497
_exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
"ListComp", "GeneratorExp", "Yield", "Compare", "Call",
"Repr", "Num", "Str", "Attribute", "Subscript", "Name",
"List", "Tuple")
_stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
"AugAssign", "Print", "For", "While", "If", "With", "Raise",
"TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
"Exec", "Global", "Expr", "Pass", "Break", "Continue")
_expr_nodes = set(getattr(ast, name) for name in _exprs)
_stmt_nodes = set(getattr(ast, name) for name in _stmts)
def _is_ast_expr(node):
return node.__class__ in _expr_nodes
def _is_ast_stmt(node):
return node.__class__ in _stmt_nodes
else:
def _is_ast_expr(node):
return isinstance(node, ast.expr)
def _is_ast_stmt(node):
return isinstance(node, ast.stmt)
class Failure(Exception):
"""Error found while interpreting AST."""
def __init__(self, explanation=""):
self.cause = sys.exc_info()
self.explanation = explanation
def interpret(source, frame, should_fail=False):
mod = ast.parse(source)
visitor = DebugInterpreter(frame)
try:
visitor.visit(mod)
except Failure:
failure = sys.exc_info()[1]
return getfailure(failure)
if should_fail:
return ("(assertion failed, but when it was re-run for "
"printing intermediate values, it did not fail. Suggestions: "
"compute assert expression before the assert or use --no-assert)")
def run(offending_line, frame=None):
if frame is None:
frame = py.code.Frame(sys._getframe(1))
return interpret(offending_line, frame)
def getfailure(failure):
explanation = _format_explanation(failure.explanation)
value = failure.cause[1]
if str(value):
lines = explanation.splitlines()
if not lines:
lines.append("")
lines[0] += " << %s" % (value,)
explanation = "\n".join(lines)
text = "%s: %s" % (failure.cause[0].__name__, explanation)
if text.startswith("AssertionError: assert "):
text = text[16:]
return text
operator_map = {
ast.BitOr : "|",
ast.BitXor : "^",
ast.BitAnd : "&",
ast.LShift : "<<",
ast.RShift : ">>",
ast.Add : "+",
ast.Sub : "-",
ast.Mult : "*",
ast.Div : "/",
ast.FloorDiv : "//",
ast.Mod : "%",
ast.Eq : "==",
ast.NotEq : "!=",
ast.Lt : "<",
ast.LtE : "<=",
ast.Gt : ">",
ast.GtE : ">=",
ast.Pow : "**",
ast.Is : "is",
ast.IsNot : "is not",
ast.In : "in",
ast.NotIn : "not in"
}
unary_map = {
ast.Not : "not %s",
ast.Invert : "~%s",
ast.USub : "-%s",
ast.UAdd : "+%s"
}
class DebugInterpreter(ast.NodeVisitor):
"""Interpret AST nodes to gleam useful debugging information. """
def __init__(self, frame):
self.frame = frame
def generic_visit(self, node):
# Fallback when we don't have a special implementation.
if _is_ast_expr(node):
mod = ast.Expression(node)
co = self._compile(mod)
try:
result = self.frame.eval(co)
except Exception:
raise Failure()
explanation = self.frame.repr(result)
return explanation, result
elif _is_ast_stmt(node):
mod = ast.Module([node])
co = self._compile(mod, "exec")
try:
self.frame.exec_(co)
except Exception:
raise Failure()
return None, None
else:
raise AssertionError("can't handle %s" %(node,))
def _compile(self, source, mode="eval"):
return compile(source, "<assertion interpretation>", mode)
def visit_Expr(self, expr):
return self.visit(expr.value)
def visit_Module(self, mod):
for stmt in mod.body:
self.visit(stmt)
def visit_Name(self, name):
explanation, result = self.generic_visit(name)
# See if the name is local.
source = "%r in locals() is not globals()" % (name.id,)
co = self._compile(source)
try:
local = self.frame.eval(co)
except Exception:
# have to assume it isn't
local = False
if not local:
return name.id, result
return explanation, result
def visit_Compare(self, comp):
left = comp.left
left_explanation, left_result = self.visit(left)
for op, next_op in zip(comp.ops, comp.comparators):
next_explanation, next_result = self.visit(next_op)
op_symbol = operator_map[op.__class__]
explanation = "%s %s %s" % (left_explanation, op_symbol,
next_explanation)
source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
co = self._compile(source)
try:
result = self.frame.eval(co, __exprinfo_left=left_result,
__exprinfo_right=next_result)
except Exception:
raise Failure(explanation)
try:
if not result:
break
except KeyboardInterrupt:
raise
except:
break
left_explanation, left_result = next_explanation, next_result
rcomp = py.code._reprcompare
if rcomp:
res = rcomp(op_symbol, left_result, next_result)
if res:
explanation = res
return explanation, result
def visit_BoolOp(self, boolop):
is_or = isinstance(boolop.op, ast.Or)
explanations = []
for operand in boolop.values:
explanation, result = self.visit(operand)
explanations.append(explanation)
if result == is_or:
break
name = is_or and " or " or " and "
explanation = "(" + name.join(explanations) + ")"
return explanation, result
def visit_UnaryOp(self, unary):
pattern = unary_map[unary.op.__class__]
operand_explanation, operand_result = self.visit(unary.operand)
explanation = pattern % (operand_explanation,)
co = self._compile(pattern % ("__exprinfo_expr",))
try:
result = self.frame.eval(co, __exprinfo_expr=operand_result)
except Exception:
raise Failure(explanation)
return explanation, result
def visit_BinOp(self, binop):
left_explanation, left_result = self.visit(binop.left)
right_explanation, right_result = self.visit(binop.right)
symbol = operator_map[binop.op.__class__]
explanation = "(%s %s %s)" % (left_explanation, symbol,
right_explanation)
source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
co = self._compile(source)
try:
result = self.frame.eval(co, __exprinfo_left=left_result,
__exprinfo_right=right_result)
except Exception:
raise Failure(explanation)
return explanation, result
def visit_Call(self, call):
func_explanation, func = self.visit(call.func)
arg_explanations = []
ns = {"__exprinfo_func" : func}
arguments = []
for arg in call.args:
arg_explanation, arg_result = self.visit(arg)
arg_name = "__exprinfo_%s" % (len(ns),)
ns[arg_name] = arg_result
arguments.append(arg_name)
arg_explanations.append(arg_explanation)
for keyword in call.keywords:
arg_explanation, arg_result = self.visit(keyword.value)
arg_name = "__exprinfo_%s" % (len(ns),)
ns[arg_name] = arg_result
keyword_source = "%s=%%s" % (keyword.arg)
arguments.append(keyword_source % (arg_name,))
arg_explanations.append(keyword_source % (arg_explanation,))
if call.starargs:
arg_explanation, arg_result = self.visit(call.starargs)
arg_name = "__exprinfo_star"
ns[arg_name] = arg_result
arguments.append("*%s" % (arg_name,))
arg_explanations.append("*%s" % (arg_explanation,))
if call.kwargs:
arg_explanation, arg_result = self.visit(call.kwargs)
arg_name = "__exprinfo_kwds"
ns[arg_name] = arg_result
arguments.append("**%s" % (arg_name,))
arg_explanations.append("**%s" % (arg_explanation,))
args_explained = ", ".join(arg_explanations)
explanation = "%s(%s)" % (func_explanation, args_explained)
args = ", ".join(arguments)
source = "__exprinfo_func(%s)" % (args,)
co = self._compile(source)
try:
result = self.frame.eval(co, **ns)
except Exception:
raise Failure(explanation)
pattern = "%s\n{%s = %s\n}"
rep = self.frame.repr(result)
explanation = pattern % (rep, rep, explanation)
return explanation, result
def _is_builtin_name(self, name):
pattern = "%r not in globals() and %r not in locals()"
source = pattern % (name.id, name.id)
co = self._compile(source)
try:
return self.frame.eval(co)
except Exception:
return False
def visit_Attribute(self, attr):
if not isinstance(attr.ctx, ast.Load):
return self.generic_visit(attr)
source_explanation, source_result = self.visit(attr.value)
explanation = "%s.%s" % (source_explanation, attr.attr)
source = "__exprinfo_expr.%s" % (attr.attr,)
co = self._compile(source)
try:
result = self.frame.eval(co, __exprinfo_expr=source_result)
except Exception:
raise Failure(explanation)
explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
self.frame.repr(result),
source_explanation, attr.attr)
# Check if the attr is from an instance.
source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
source = source % (attr.attr,)
co = self._compile(source)
try:
from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
except Exception:
from_instance = True
if from_instance:
rep = self.frame.repr(result)
pattern = "%s\n{%s = %s\n}"
explanation = pattern % (rep, rep, explanation)
return explanation, result
def visit_Assert(self, assrt):
test_explanation, test_result = self.visit(assrt.test)
if test_explanation.startswith("False\n{False =") and \
test_explanation.endswith("\n"):
test_explanation = test_explanation[15:-2]
explanation = "assert %s" % (test_explanation,)
if not test_result:
try:
raise BuiltinAssertionError
except Exception:
raise Failure(explanation)
return explanation, test_result
def visit_Assign(self, assign):
value_explanation, value_result = self.visit(assign.value)
explanation = "... = %s" % (value_explanation,)
name = ast.Name("__exprinfo_expr", ast.Load(),
lineno=assign.value.lineno,
col_offset=assign.value.col_offset)
new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
col_offset=assign.col_offset)
mod = ast.Module([new_assign])
co = self._compile(mod, "exec")
try:
self.frame.exec_(co, __exprinfo_expr=value_result)
except Exception:
raise Failure(explanation)
return explanation, value_result
import py
import sys, inspect
from compiler import parse, ast, pycodegen
from py._code.assertion import BuiltinAssertionError, _format_explanation
passthroughex = py.builtin._sysex
class Failure:
def __init__(self, node):
self.exc, self.value, self.tb = sys.exc_info()
self.node = node
class View(object):
"""View base class.
If C is a subclass of View, then C(x) creates a proxy object around
the object x. The actual class of the proxy is not C in general,
but a *subclass* of C determined by the rules below. To avoid confusion
we call view class the class of the proxy (a subclass of C, so of View)
and object class the class of x.
Attributes and methods not found in the proxy are automatically read on x.
Other operations like setting attributes are performed on the proxy, as
determined by its view class. The object x is available from the proxy
as its __obj__ attribute.
The view class selection is determined by the __view__ tuples and the
optional __viewkey__ method. By default, the selected view class is the
most specific subclass of C whose __view__ mentions the class of x.
If no such subclass is found, the search proceeds with the parent
object classes. For example, C(True) will first look for a subclass
of C with __view__ = (..., bool, ...) and only if it doesn't find any
look for one with __view__ = (..., int, ...), and then ..., object,...
If everything fails the class C itself is considered to be the default.
Alternatively, the view class selection can be driven by another aspect
of the object x, instead of the class of x, by overriding __viewkey__.
See last example at the end of this module.
"""
_viewcache = {}
__view__ = ()
def __new__(rootclass, obj, *args, **kwds):
self = object.__new__(rootclass)
self.__obj__ = obj
self.__rootclass__ = rootclass
key = self.__viewkey__()
try:
self.__class__ = self._viewcache[key]
except KeyError:
self.__class__ = self._selectsubclass(key)
return self
def __getattr__(self, attr):
# attributes not found in the normal hierarchy rooted on View
# are looked up in the object's real class
return getattr(self.__obj__, attr)
def __viewkey__(self):
return self.__obj__.__class__
def __matchkey__(self, key, subclasses):
if inspect.isclass(key):
keys = inspect.getmro(key)
else:
keys = [key]
for key in keys:
result = [C for C in subclasses if key in C.__view__]
if result:
return result
return []
def _selectsubclass(self, key):
subclasses = list(enumsubclasses(self.__rootclass__))
for C in subclasses:
if not isinstance(C.__view__, tuple):
C.__view__ = (C.__view__,)
choices = self.__matchkey__(key, subclasses)
if not choices:
return self.__rootclass__
elif len(choices) == 1:
return choices[0]
else:
# combine the multiple choices
return type('?', tuple(choices), {})
def __repr__(self):
return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
def enumsubclasses(cls):
for subcls in cls.__subclasses__():
for subsubclass in enumsubclasses(subcls):
yield subsubclass
yield cls
class Interpretable(View):
"""A parse tree node with a few extra methods."""
explanation = None
def is_builtin(self, frame):
return False
def eval(self, frame):
# fall-back for unknown expression nodes
try:
expr = ast.Expression(self.__obj__)
expr.filename = '<eval>'
self.__obj__.filename = '<eval>'
co = pycodegen.ExpressionCodeGenerator(expr).getCode()
result = frame.eval(co)
except passthroughex:
raise
except:
raise Failure(self)
self.result = result
self.explanation = self.explanation or frame.repr(self.result)
def run(self, frame):
# fall-back for unknown statement nodes
try:
expr = ast.Module(None, ast.Stmt([self.__obj__]))
expr.filename = '<run>'
co = pycodegen.ModuleCodeGenerator(expr).getCode()
frame.exec_(co)
except passthroughex:
raise
except:
raise Failure(self)
def nice_explanation(self):
return _format_explanation(self.explanation)
class Name(Interpretable):
__view__ = ast.Name
def is_local(self, frame):
source = '%r in locals() is not globals()' % self.name
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def is_global(self, frame):
source = '%r in globals()' % self.name
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def is_builtin(self, frame):
source = '%r not in locals() and %r not in globals()' % (
self.name, self.name)
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def eval(self, frame):
super(Name, self).eval(frame)
if not self.is_local(frame):
self.explanation = self.name
class Compare(Interpretable):
__view__ = ast.Compare
def eval(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
for operation, expr2 in self.ops:
if hasattr(self, 'result'):
# shortcutting in chained expressions
if not frame.is_true(self.result):
break
expr2 = Interpretable(expr2)
expr2.eval(frame)
self.explanation = "%s %s %s" % (
expr.explanation, operation, expr2.explanation)
source = "__exprinfo_left %s __exprinfo_right" % operation
try:
self.result = frame.eval(source,
__exprinfo_left=expr.result,
__exprinfo_right=expr2.result)
except passthroughex:
raise
except:
raise Failure(self)
expr = expr2
class And(Interpretable):
__view__ = ast.And
def eval(self, frame):
explanations = []
for expr in self.nodes:
expr = Interpretable(expr)
expr.eval(frame)
explanations.append(expr.explanation)
self.result = expr.result
if not frame.is_true(expr.result):
break
self.explanation = '(' + ' and '.join(explanations) + ')'
class Or(Interpretable):
__view__ = ast.Or
def eval(self, frame):
explanations = []
for expr in self.nodes:
expr = Interpretable(expr)
expr.eval(frame)
explanations.append(expr.explanation)
self.result = expr.result
if frame.is_true(expr.result):
break
self.explanation = '(' + ' or '.join(explanations) + ')'
# == Unary operations ==
keepalive = []
for astclass, astpattern in {
ast.Not : 'not __exprinfo_expr',
ast.Invert : '(~__exprinfo_expr)',
}.items():
class UnaryArith(Interpretable):
__view__ = astclass
def eval(self, frame, astpattern=astpattern):
expr = Interpretable(self.expr)
expr.eval(frame)
self.explanation = astpattern.replace('__exprinfo_expr',
expr.explanation)
try:
self.result = frame.eval(astpattern,
__exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
keepalive.append(UnaryArith)
# == Binary operations ==
for astclass, astpattern in {
ast.Add : '(__exprinfo_left + __exprinfo_right)',
ast.Sub : '(__exprinfo_left - __exprinfo_right)',
ast.Mul : '(__exprinfo_left * __exprinfo_right)',
ast.Div : '(__exprinfo_left / __exprinfo_right)',
ast.Mod : '(__exprinfo_left % __exprinfo_right)',
ast.Power : '(__exprinfo_left ** __exprinfo_right)',
}.items():
class BinaryArith(Interpretable):
__view__ = astclass
def eval(self, frame, astpattern=astpattern):
left = Interpretable(self.left)
left.eval(frame)
right = Interpretable(self.right)
right.eval(frame)
self.explanation = (astpattern
.replace('__exprinfo_left', left .explanation)
.replace('__exprinfo_right', right.explanation))
try:
self.result = frame.eval(astpattern,
__exprinfo_left=left.result,
__exprinfo_right=right.result)
except passthroughex:
raise
except:
raise Failure(self)
keepalive.append(BinaryArith)
class CallFunc(Interpretable):
__view__ = ast.CallFunc
def is_bool(self, frame):
source = 'isinstance(__exprinfo_value, bool)'
try:
return frame.is_true(frame.eval(source,
__exprinfo_value=self.result))
except passthroughex:
raise
except:
return False
def eval(self, frame):
node = Interpretable(self.node)
node.eval(frame)
explanations = []
vars = {'__exprinfo_fn': node.result}
source = '__exprinfo_fn('
for a in self.args:
if isinstance(a, ast.Keyword):
keyword = a.name
a = a.expr
else:
keyword = None
a = Interpretable(a)
a.eval(frame)
argname = '__exprinfo_%d' % len(vars)
vars[argname] = a.result
if keyword is None:
source += argname + ','
explanations.append(a.explanation)
else:
source += '%s=%s,' % (keyword, argname)
explanations.append('%s=%s' % (keyword, a.explanation))
if self.star_args:
star_args = Interpretable(self.star_args)
star_args.eval(frame)
argname = '__exprinfo_star'
vars[argname] = star_args.result
source += '*' + argname + ','
explanations.append('*' + star_args.explanation)
if self.dstar_args:
dstar_args = Interpretable(self.dstar_args)
dstar_args.eval(frame)
argname = '__exprinfo_kwds'
vars[argname] = dstar_args.result
source += '**' + argname + ','
explanations.append('**' + dstar_args.explanation)
self.explanation = "%s(%s)" % (
node.explanation, ', '.join(explanations))
if source.endswith(','):
source = source[:-1]
source += ')'
try:
self.result = frame.eval(source, **vars)
except passthroughex:
raise
except:
raise Failure(self)
if not node.is_builtin(frame) or not self.is_bool(frame):
r = frame.repr(self.result)
self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
class Getattr(Interpretable):
__view__ = ast.Getattr
def eval(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
source = '__exprinfo_expr.%s' % self.attrname
try:
self.result = frame.eval(source, __exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
self.explanation = '%s.%s' % (expr.explanation, self.attrname)
# if the attribute comes from the instance, its value is interesting
source = ('hasattr(__exprinfo_expr, "__dict__") and '
'%r in __exprinfo_expr.__dict__' % self.attrname)
try:
from_instance = frame.is_true(
frame.eval(source, __exprinfo_expr=expr.result))
except passthroughex:
raise
except:
from_instance = True
if from_instance:
r = frame.repr(self.result)
self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
# == Re-interpretation of full statements ==
class Assert(Interpretable):
__view__ = ast.Assert
def run(self, frame):
test = Interpretable(self.test)
test.eval(frame)
# simplify 'assert False where False = ...'
if (test.explanation.startswith('False\n{False = ') and
test.explanation.endswith('\n}')):
test.explanation = test.explanation[15:-2]
# print the result as 'assert <explanation>'
self.result = test.result
self.explanation = 'assert ' + test.explanation
if not frame.is_true(test.result):
try:
raise BuiltinAssertionError
except passthroughex:
raise
except:
raise Failure(self)
class Assign(Interpretable):
__view__ = ast.Assign
def run(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
self.result = expr.result
self.explanation = '... = ' + expr.explanation
# fall-back-run the rest of the assignment
ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
mod = ast.Module(None, ast.Stmt([ass]))
mod.filename = '<run>'
co = pycodegen.ModuleCodeGenerator(mod).getCode()
try:
frame.exec_(co, __exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
class Discard(Interpretable):
__view__ = ast.Discard
def run(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
self.result = expr.result
self.explanation = expr.explanation
class Stmt(Interpretable):
__view__ = ast.Stmt
def run(self, frame):
for stmt in self.nodes:
stmt = Interpretable(stmt)
stmt.run(frame)
def report_failure(e):
explanation = e.node.nice_explanation()
if explanation:
explanation = ", in: " + explanation
else:
explanation = ""
sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
def check(s, frame=None):
if frame is None:
frame = sys._getframe(1)
frame = py.code.Frame(frame)
expr = parse(s, 'eval')
assert isinstance(expr, ast.Expression)
node = Interpretable(expr.node)
try:
node.eval(frame)
except passthroughex:
raise
except Failure:
e = sys.exc_info()[1]
report_failure(e)
else:
if not frame.is_true(node.result):
sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
###########################################################
# API / Entry points
# #########################################################
def interpret(source, frame, should_fail=False):
module = Interpretable(parse(source, 'exec').node)
#print "got module", module
if isinstance(frame, py.std.types.FrameType):
frame = py.code.Frame(frame)
try:
module.run(frame)
except Failure:
e = sys.exc_info()[1]
return getfailure(e)
except passthroughex:
raise
except:
import traceback
traceback.print_exc()
if should_fail:
return ("(assertion failed, but when it was re-run for "
"printing intermediate values, it did not fail. Suggestions: "
"compute assert expression before the assert or use --nomagic)")
else:
return None
def getmsg(excinfo):
if isinstance(excinfo, tuple):
excinfo = py.code.ExceptionInfo(excinfo)
#frame, line = gettbline(tb)
#frame = py.code.Frame(frame)
#return interpret(line, frame)
tb = excinfo.traceback[-1]
source = str(tb.statement).strip()
x = interpret(source, tb.frame, should_fail=True)
if not isinstance(x, str):
raise TypeError("interpret returned non-string %r" % (x,))
return x
def getfailure(e):
explanation = e.node.nice_explanation()
if str(e.value):
lines = explanation.split('\n')
lines[0] += " << %s" % (e.value,)
explanation = '\n'.join(lines)
text = "%s: %s" % (e.exc.__name__, explanation)
if text.startswith('AssertionError: assert '):
text = text[16:]
return text
def run(s, frame=None):
if frame is None:
frame = sys._getframe(1)
frame = py.code.Frame(frame)
module = Interpretable(parse(s, 'exec').node)
try:
module.run(frame)
except Failure:
e = sys.exc_info()[1]
report_failure(e)
if __name__ == '__main__':
# example:
def f():
return 5
def g():
return 3
def h(x):
return 'never'
check("f() * g() == 5")
check("not f()")
check("not (f() and g() or 0)")
check("f() == g()")
i = 4
check("i == f()")
check("len(f()) == 0")
check("isinstance(2+3+4, float)")
run("x = i")
check("x == 5")
run("assert not f(), 'oops'")
run("a, b, c = 1, 2")
run("a, b, c = f()")
check("max([f(),g()]) == 4")
check("'hello'[g()] == 'h'")
run("'guk%d' % h(f())")
# copied from python-2.7.3's traceback.py
# CHANGES:
# - some_str is replaced, trying to create unicode strings
#
import types
def format_exception_only(etype, value):
"""Format the exception part of a traceback.
The arguments are the exception type and value such as given by
sys.last_type and sys.last_value. The return value is a list of
strings, each ending in a newline.
Normally, the list contains a single string; however, for
SyntaxError exceptions, it contains several lines that (when
printed) display detailed information about where the syntax
error occurred.
The message indicating which exception occurred is always the last
string in the list.
"""
# An instance should not have a meaningful value parameter, but
# sometimes does, particularly for string exceptions, such as
# >>> raise string1, string2 # deprecated
#
# Clear these out first because issubtype(string1, SyntaxError)
# would throw another exception and mask the original problem.
if (isinstance(etype, BaseException) or
isinstance(etype, types.InstanceType) or
etype is None or type(etype) is str):
return [_format_final_exc_line(etype, value)]
stype = etype.__name__
if not issubclass(etype, SyntaxError):
return [_format_final_exc_line(stype, value)]
# It was a syntax error; show exactly where the problem was found.
lines = []
try:
msg, (filename, lineno, offset, badline) = value.args
except Exception:
pass
else:
filename = filename or "<string>"
lines.append(' File "%s", line %d\n' % (filename, lineno))
if badline is not None:
lines.append(' %s\n' % badline.strip())
if offset is not None:
caretspace = badline.rstrip('\n')[:offset].lstrip()
# non-space whitespace (likes tabs) must be kept for alignment
caretspace = ((c.isspace() and c or ' ') for c in caretspace)
# only three spaces to account for offset1 == pos 0
lines.append(' %s^\n' % ''.join(caretspace))
value = msg
lines.append(_format_final_exc_line(stype, value))
return lines
def _format_final_exc_line(etype, value):
"""Return a list of a single line -- normal case for format_exception_only"""
valuestr = _some_str(value)
if value is None or not valuestr:
line = "%s\n" % etype
else:
line = "%s: %s\n" % (etype, valuestr)
return line
def _some_str(value):
try:
return unicode(value)
except Exception:
try:
return str(value)
except Exception:
pass
return '<unprintable %s object>' % type(value).__name__
import sys
import py
BuiltinAssertionError = py.builtin.builtins.AssertionError
_reprcompare = None # if set, will be called by assert reinterp for comparison ops
def _format_explanation(explanation):
"""This formats an explanation
Normally all embedded newlines are escaped, however there are
three exceptions: \n{, \n} and \n~. The first two are intended
cover nested explanations, see function and attribute explanations
for examples (.visit_Call(), visit_Attribute()). The last one is
for when one explanation needs to span multiple lines, e.g. when
displaying diffs.
"""
raw_lines = (explanation or '').split('\n')
# escape newlines not followed by {, } and ~
lines = [raw_lines[0]]
for l in raw_lines[1:]:
if l.startswith('{') or l.startswith('}') or l.startswith('~'):
lines.append(l)
else:
lines[-1] += '\\n' + l
result = lines[:1]
stack = [0]
stackcnt = [0]
for line in lines[1:]:
if line.startswith('{'):
if stackcnt[-1]:
s = 'and '
else:
s = 'where '
stack.append(len(result))
stackcnt[-1] += 1
stackcnt.append(0)
result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
elif line.startswith('}'):
assert line.startswith('}')
stack.pop()
stackcnt.pop()
result[stack[-1]] += line[1:]
else:
assert line.startswith('~')
result.append(' '*len(stack) + line[1:])
assert len(stack) == 1
return '\n'.join(result)
class AssertionError(BuiltinAssertionError):
def __init__(self, *args):
BuiltinAssertionError.__init__(self, *args)
if args:
try:
self.msg = str(args[0])
except py.builtin._sysex:
raise
except:
self.msg = "<[broken __repr__] %s at %0xd>" %(
args[0].__class__, id(args[0]))
else:
f = py.code.Frame(sys._getframe(1))
try:
source = f.code.fullsource
if source is not None:
try:
source = source.getstatement(f.lineno, assertion=True)
except IndexError:
source = None
else:
source = str(source.deindent()).strip()
except py.error.ENOENT:
source = None
# this can also occur during reinterpretation, when the
# co_filename is set to "<run>".
if source:
self.msg = reinterpret(source, f, should_fail=True)
else:
self.msg = "<could not determine information>"
if not self.args:
self.args = (self.msg,)
if sys.version_info > (3, 0):
AssertionError.__module__ = "builtins"
reinterpret_old = "old reinterpretation not available for py3"
else:
from py._code._assertionold import interpret as reinterpret_old
if sys.version_info >= (2, 6) or (sys.platform.startswith("java")):
from py._code._assertionnew import interpret as reinterpret
else:
reinterpret = reinterpret_old
This diff is collapsed.
from __future__ import generators
from bisect import bisect_right
import sys
import inspect, tokenize
import py
from types import ModuleType
cpy_compile = compile
try:
import _ast
from _ast import PyCF_ONLY_AST as _AST_FLAG
except ImportError:
_AST_FLAG = 0
_ast = None
class Source(object):
""" a immutable object holding a source code fragment,
possibly deindenting it.
"""
_compilecounter = 0
def __init__(self, *parts, **kwargs):
self.lines = lines = []
de = kwargs.get('deindent', True)
rstrip = kwargs.get('rstrip', True)
for part in parts:
if not part:
partlines = []
if isinstance(part, Source):
partlines = part.lines
elif isinstance(part, (tuple, list)):
partlines = [x.rstrip("\n") for x in part]
elif isinstance(part, py.builtin._basestring):
partlines = part.split('\n')
if rstrip:
while partlines:
if partlines[-1].strip():
break
partlines.pop()
else:
partlines = getsource(part, deindent=de).lines
if de:
partlines = deindent(partlines)
lines.extend(partlines)
def __eq__(self, other):
try:
return self.lines == other.lines
except AttributeError:
if isinstance(other, str):
return str(self) == other
return False
def __getitem__(self, key):
if isinstance(key, int):
return self.lines[key]
else:
if key.step not in (None, 1):
raise IndexError("cannot slice a Source with a step")
return self.__getslice__(key.start, key.stop)
def __len__(self):
return len(self.lines)
def __getslice__(self, start, end):
newsource = Source()
newsource.lines = self.lines[start:end]
return newsource
def strip(self):
""" return new source object with trailing
and leading blank lines removed.
"""
start, end = 0, len(self)
while start < end and not self.lines[start].strip():
start += 1
while end > start and not self.lines[end-1].strip():
end -= 1
source = Source()
source.lines[:] = self.lines[start:end]
return source
def putaround(self, before='', after='', indent=' ' * 4):
""" return a copy of the source object with
'before' and 'after' wrapped around it.
"""
before = Source(before)
after = Source(after)
newsource = Source()
lines = [ (indent + line) for line in self.lines]
newsource.lines = before.lines + lines + after.lines
return newsource
def indent(self, indent=' ' * 4):
""" return a copy of the source object with
all lines indented by the given indent-string.
"""
newsource = Source()
newsource.lines = [(indent+line) for line in self.lines]
return newsource
def getstatement(self, lineno, assertion=False):
""" return Source statement which contains the
given linenumber (counted from 0).
"""
start, end = self.getstatementrange(lineno, assertion)
return self[start:end]
def getstatementrange(self, lineno, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
"""
if not (0 <= lineno < len(self)):
raise IndexError("lineno out of range")
ast, start, end = getstatementrange_ast(lineno, self)
return start, end
def deindent(self, offset=None):
""" return a new source object deindented by offset.
If offset is None then guess an indentation offset from
the first non-blank line. Subsequent lines which have a
lower indentation offset will be copied verbatim as
they are assumed to be part of multilines.
"""
# XXX maybe use the tokenizer to properly handle multiline
# strings etc.pp?
newsource = Source()
newsource.lines[:] = deindent(self.lines, offset)
return newsource
def isparseable(self, deindent=True):
""" return True if source is parseable, heuristically
deindenting it by default.
"""
try:
import parser
except ImportError:
syntax_checker = lambda x: compile(x, 'asd', 'exec')
else:
syntax_checker = parser.suite
if deindent:
source = str(self.deindent())
else:
source = str(self)
try:
#compile(source+'\n', "x", "exec")
syntax_checker(source+'\n')
except KeyboardInterrupt:
raise
except Exception:
return False
else:
return True
def __str__(self):
return "\n".join(self.lines)
def compile(self, filename=None, mode='exec',
flag=generators.compiler_flag,
dont_inherit=0, _genframe=None):
""" return compiled code object. if filename is None
invent an artificial filename which displays
the source/line position of the caller frame.
"""
if not filename or py.path.local(filename).check(file=0):
if _genframe is None:
_genframe = sys._getframe(1) # the caller
fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
base = "<%d-codegen " % self._compilecounter
self.__class__._compilecounter += 1
if not filename:
filename = base + '%s:%d>' % (fn, lineno)
else:
filename = base + '%r %s:%d>' % (filename, fn, lineno)
source = "\n".join(self.lines) + '\n'
try:
co = cpy_compile(source, filename, mode, flag)
except SyntaxError:
ex = sys.exc_info()[1]
# re-represent syntax errors from parsing python strings
msglines = self.lines[:ex.lineno]
if ex.offset:
msglines.append(" "*ex.offset + '^')
msglines.append("(code was compiled probably from here: %s)" % filename)
newex = SyntaxError('\n'.join(msglines))
newex.offset = ex.offset
newex.lineno = ex.lineno
newex.text = ex.text
raise newex
else:
if flag & _AST_FLAG:
return co
lines = [(x + "\n") for x in self.lines]
py.std.linecache.cache[filename] = (1, None, lines, filename)
return co
#
# public API shortcut functions
#
def compile_(source, filename=None, mode='exec', flags=
generators.compiler_flag, dont_inherit=0):
""" compile the given source to a raw code object,
and maintain an internal cache which allows later
retrieval of the source code for the code object
and any recursively created code objects.
"""
if _ast is not None and isinstance(source, _ast.AST):
# XXX should Source support having AST?
return cpy_compile(source, filename, mode, flags, dont_inherit)
_genframe = sys._getframe(1) # the caller
s = Source(source)
co = s.compile(filename, mode, flags, _genframe=_genframe)
return co
def getfslineno(obj):
""" Return source location (path, lineno) for the given object.
If the source cannot be determined return ("", -1)
"""
try:
code = py.code.Code(obj)
except TypeError:
try:
fn = (py.std.inspect.getsourcefile(obj) or
py.std.inspect.getfile(obj))
except TypeError:
return "", -1
fspath = fn and py.path.local(fn) or None
lineno = -1
if fspath:
try:
_, lineno = findsource(obj)
except IOError:
pass
else:
fspath = code.path
lineno = code.firstlineno
assert isinstance(lineno, int)
return fspath, lineno
#
# helper functions
#
def findsource(obj):
try:
sourcelines, lineno = py.std.inspect.findsource(obj)
except py.builtin._sysex:
raise
except:
return None, -1
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
def getsource(obj, **kwargs):
obj = py.code.getrawcode(obj)
try:
strsrc = inspect.getsource(obj)
except IndentationError:
strsrc = "\"Buggy python version consider upgrading, cannot get source\""
assert isinstance(strsrc, str)
return Source(strsrc, **kwargs)
def deindent(lines, offset=None):
if offset is None:
for line in lines:
line = line.expandtabs()
s = line.lstrip()
if s:
offset = len(line)-len(s)
break
else:
offset = 0
if offset == 0:
return list(lines)
newlines = []
def readline_generator(lines):
for line in lines:
yield line + '\n'
while True:
yield ''
it = readline_generator(lines)
try:
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
if sline > len(lines):
break # End of input reached
if sline > len(newlines):
line = lines[sline - 1].expandtabs()
if line.lstrip() and line[:offset].isspace():
line = line[offset:] # Deindent
newlines.append(line)
for i in range(sline, eline):
# Don't deindent continuing lines of
# multiline tokens (i.e. multiline strings)
newlines.append(lines[i])
except (IndentationError, tokenize.TokenError):
pass
# Add any lines we didn't see. E.g. if an exception was raised.
newlines.extend(lines[len(newlines):])
return newlines
def get_statement_startend2(lineno, node):
import ast
# flatten all statements and except handlers into one lineno-list
# AST's line numbers start indexing at 1
l = []
for x in ast.walk(node):
if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
l.append(x.lineno - 1)
for name in "finalbody", "orelse":
val = getattr(x, name, None)
if val:
# treat the finally/orelse part as its own statement
l.append(val[0].lineno - 1 - 1)
l.sort()
insert_index = bisect_right(l, lineno)
start = l[insert_index - 1]
if insert_index >= len(l):
end = None
else:
end = l[insert_index]
return start, end
def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
if astnode is None:
content = str(source)
if sys.version_info < (2,7):
content += "\n"
try:
astnode = compile(content, "source", "exec", 1024) # 1024 for AST
except ValueError:
start, end = getstatementrange_old(lineno, source, assertion)
return None, start, end
start, end = get_statement_startend2(lineno, astnode)
# we need to correct the end:
# - ast-parsing strips comments
# - there might be empty lines
# - we might have lesser indented code blocks at the end
if end is None:
end = len(source.lines)
if end > start + 1:
# make sure we don't span differently indented code blocks
# by using the BlockFinder helper used which inspect.getsource() uses itself
block_finder = inspect.BlockFinder()
# if we start with an indented line, put blockfinder to "started" mode
block_finder.started = source.lines[start][0].isspace()
it = ((x + "\n") for x in source.lines[start:end])
try:
for tok in tokenize.generate_tokens(lambda: next(it)):
block_finder.tokeneater(*tok)
except (inspect.EndOfBlock, IndentationError):
end = block_finder.last + start
except Exception:
pass
# the end might still point to a comment or empty line, correct it
while end:
line = source.lines[end - 1].lstrip()
if line.startswith("#") or not line:
end -= 1
else:
break
return astnode, start, end
def getstatementrange_old(lineno, source, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
raise an IndexError if no such statementrange can be found.
"""
# XXX this logic is only used on python2.4 and below
# 1. find the start of the statement
from codeop import compile_command
for start in range(lineno, -1, -1):
if assertion:
line = source.lines[start]
# the following lines are not fully tested, change with care
if 'super' in line and 'self' in line and '__init__' in line:
raise IndexError("likely a subclass")
if "assert" not in line and "raise" not in line:
continue
trylines = source.lines[start:lineno+1]
# quick hack to prepare parsing an indented line with
# compile_command() (which errors on "return" outside defs)
trylines.insert(0, 'def xxx():')
trysource = '\n '.join(trylines)
# ^ space here
try:
compile_command(trysource)
except (SyntaxError, OverflowError, ValueError):
continue
# 2. find the end of the statement
for end in range(lineno+1, len(source)+1):
trysource = source[start:end]
if trysource.isparseable():
return start, end
raise SyntaxError("no valid source range around line %d " % (lineno,))
"""
create errno-specific classes for IO or os calls.
"""
import sys, os, errno
class Error(EnvironmentError):
def __repr__(self):
return "%s.%s %r: %s " %(self.__class__.__module__,
self.__class__.__name__,
self.__class__.__doc__,
" ".join(map(str, self.args)),
#repr(self.args)
)
def __str__(self):
s = "[%s]: %s" %(self.__class__.__doc__,
" ".join(map(str, self.args)),
)
return s
_winerrnomap = {
2: errno.ENOENT,
3: errno.ENOENT,
17: errno.EEXIST,
18: errno.EXDEV,
13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
22: errno.ENOTDIR,
20: errno.ENOTDIR,
267: errno.ENOTDIR,
5: errno.EACCES, # anything better?
}
class ErrorMaker(object):
""" lazily provides Exception classes for each possible POSIX errno
(as defined per the 'errno' module). All such instances
subclass EnvironmentError.
"""
Error = Error
_errno2class = {}
def __getattr__(self, name):
if name[0] == "_":
raise AttributeError(name)
eno = getattr(errno, name)
cls = self._geterrnoclass(eno)
setattr(self, name, cls)
return cls
def _geterrnoclass(self, eno):
try:
return self._errno2class[eno]
except KeyError:
clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
errorcls = type(Error)(clsname, (Error,),
{'__module__':'py.error',
'__doc__': os.strerror(eno)})
self._errno2class[eno] = errorcls
return errorcls
def checked_call(self, func, *args, **kwargs):
""" call a function and raise an errno-exception if applicable. """
__tracebackhide__ = True
try:
return func(*args, **kwargs)
except self.Error:
raise
except (OSError, EnvironmentError):
cls, value, tb = sys.exc_info()
if not hasattr(value, 'errno'):
raise
__tracebackhide__ = False
errno = value.errno
try:
if not isinstance(value, WindowsError):
raise NameError
except NameError:
# we are not on Windows, or we got a proper OSError
cls = self._geterrnoclass(errno)
else:
try:
cls = self._geterrnoclass(_winerrnomap[errno])
except KeyError:
raise value
raise cls("%s%r" % (func.__name__, args))
__tracebackhide__ = True
error = ErrorMaker()
""" brain-dead simple parser for ini-style files.
(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed
"""
__version__ = "0.2.dev2"
__all__ = ['IniConfig', 'ParseError']
COMMENTCHARS = "#;"
class ParseError(Exception):
def __init__(self, path, lineno, msg):
Exception.__init__(self, path, lineno, msg)
self.path = path
self.lineno = lineno
self.msg = msg
def __str__(self):
return "%s:%s: %s" %(self.path, self.lineno+1, self.msg)
class SectionWrapper(object):
def __init__(self, config, name):
self.config = config
self.name = name
def lineof(self, name):
return self.config.lineof(self.name, name)
def get(self, key, default=None, convert=str):
return self.config.get(self.name, key, convert=convert, default=default)
def __getitem__(self, key):
return self.config.sections[self.name][key]
def __iter__(self):
section = self.config.sections.get(self.name, [])
def lineof(key):
return self.config.lineof(self.name, key)
for name in sorted(section, key=lineof):
yield name
def items(self):
for name in self:
yield name, self[name]
class IniConfig(object):
def __init__(self, path, data=None):
self.path = str(path) # convenience
if data is None:
f = open(self.path)
try:
tokens = self._parse(iter(f))
finally:
f.close()
else:
tokens = self._parse(data.splitlines(True))
self._sources = {}
self.sections = {}
for lineno, section, name, value in tokens:
if section is None:
self._raise(lineno, 'no section header defined')
self._sources[section, name] = lineno
if name is None:
if section in self.sections:
self._raise(lineno, 'duplicate section %r'%(section, ))
self.sections[section] = {}
else:
if name in self.sections[section]:
self._raise(lineno, 'duplicate name %r'%(name, ))
self.sections[section][name] = value
def _raise(self, lineno, msg):
raise ParseError(self.path, lineno, msg)
def _parse(self, line_iter):
result = []
section = None
for lineno, line in enumerate(line_iter):
name, data = self._parseline(line, lineno)
# new value
if name is not None and data is not None:
result.append((lineno, section, name, data))
# new section
elif name is not None and data is None:
if not name:
self._raise(lineno, 'empty section name')
section = name
result.append((lineno, section, None, None))
# continuation
elif name is None and data is not None:
if not result:
self._raise(lineno, 'unexpected value continuation')
last = result.pop()
last_name, last_data = last[-2:]
if last_name is None:
self._raise(lineno, 'unexpected value continuation')
if last_data:
data = '%s\n%s' % (last_data, data)
result.append(last[:-1] + (data,))
return result
def _parseline(self, line, lineno):
# blank lines
if iscommentline(line):
line = ""
else:
line = line.rstrip()
if not line:
return None, None
# section
if line[0] == '[':
realline = line
for c in COMMENTCHARS:
line = line.split(c)[0].rstrip()
if line[-1] == "]":
return line[1:-1], None
return None, realline.strip()
# value
elif not line[0].isspace():
try:
name, value = line.split('=', 1)
if ":" in name:
raise ValueError()
except ValueError:
try:
name, value = line.split(":", 1)
except ValueError:
self._raise(lineno, 'unexpected line: %r' % line)
return name.strip(), value.strip()
# continuation
else:
return None, line.strip()
def lineof(self, section, name=None):
lineno = self._sources.get((section, name))
if lineno is not None:
return lineno + 1
def get(self, section, name, default=None, convert=str):
try:
return convert(self.sections[section][name])
except KeyError:
return default
def __getitem__(self, name):
if name not in self.sections:
raise KeyError(name)
return SectionWrapper(self, name)
def __iter__(self):
for name in sorted(self.sections, key=self.lineof):
yield SectionWrapper(self, name)
def __contains__(self, arg):
return arg in self.sections
def iscommentline(line):
c = line.lstrip()[:1]
return c in COMMENTCHARS
""" input/output helping """
import os
import sys
import py
import tempfile
try:
from io import StringIO
except ImportError:
from StringIO import StringIO
if sys.version_info < (3,0):
class TextIO(StringIO):
def write(self, data):
if not isinstance(data, unicode):
data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
StringIO.write(self, data)
else:
TextIO = StringIO
try:
from io import BytesIO
except ImportError:
class BytesIO(StringIO):
def write(self, data):
if isinstance(data, unicode):
raise TypeError("not a byte value: %r" %(data,))
StringIO.write(self, data)
patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
class FDCapture:
""" Capture IO to/from a given os-level filedescriptor. """
def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False):
""" save targetfd descriptor, and open a new
temporary file there. If no tmpfile is
specified a tempfile.Tempfile() will be opened
in text mode.
"""
self.targetfd = targetfd
if tmpfile is None and targetfd != 0:
f = tempfile.TemporaryFile('wb+')
tmpfile = dupfile(f, encoding="UTF-8")
f.close()
self.tmpfile = tmpfile
self._savefd = os.dup(self.targetfd)
if patchsys:
self._oldsys = getattr(sys, patchsysdict[targetfd])
if now:
self.start()
def start(self):
try:
os.fstat(self._savefd)
except OSError:
raise ValueError("saved filedescriptor not valid, "
"did you call start() twice?")
if self.targetfd == 0 and not self.tmpfile:
fd = os.open(devnullpath, os.O_RDONLY)
os.dup2(fd, 0)
os.close(fd)
if hasattr(self, '_oldsys'):
setattr(sys, patchsysdict[self.targetfd], DontReadFromInput())
else:
os.dup2(self.tmpfile.fileno(), self.targetfd)
if hasattr(self, '_oldsys'):
setattr(sys, patchsysdict[self.targetfd], self.tmpfile)
def done(self):
""" unpatch and clean up, returns the self.tmpfile (file object)
"""
os.dup2(self._savefd, self.targetfd)
os.close(self._savefd)
if self.targetfd != 0:
self.tmpfile.seek(0)
if hasattr(self, '_oldsys'):
setattr(sys, patchsysdict[self.targetfd], self._oldsys)
return self.tmpfile
def writeorg(self, data):
""" write a string to the original file descriptor
"""
tempfp = tempfile.TemporaryFile()
try:
os.dup2(self._savefd, tempfp.fileno())
tempfp.write(data)
finally:
tempfp.close()
def dupfile(f, mode=None, buffering=0, raising=False, encoding=None):
""" return a new open file object that's a duplicate of f
mode is duplicated if not given, 'buffering' controls
buffer size (defaulting to no buffering) and 'raising'
defines whether an exception is raised when an incompatible
file object is passed in (if raising is False, the file
object itself will be returned)
"""
try:
fd = f.fileno()
mode = mode or f.mode
except AttributeError:
if raising:
raise
return f
newfd = os.dup(fd)
if sys.version_info >= (3,0):
if encoding is not None:
mode = mode.replace("b", "")
buffering = True
return os.fdopen(newfd, mode, buffering, encoding, closefd=True)
else:
f = os.fdopen(newfd, mode, buffering)
if encoding is not None:
return EncodedFile(f, encoding)
return f
class EncodedFile(object):
def __init__(self, _stream, encoding):
self._stream = _stream
self.encoding = encoding
def write(self, obj):
if isinstance(obj, unicode):
obj = obj.encode(self.encoding)
elif isinstance(obj, str):
pass
else:
obj = str(obj)
self._stream.write(obj)
def writelines(self, linelist):
data = ''.join(linelist)
self.write(data)
def __getattr__(self, name):
return getattr(self._stream, name)
class Capture(object):
def call(cls, func, *args, **kwargs):
""" return a (res, out, err) tuple where
out and err represent the output/error output
during function execution.
call the given function with args/kwargs
and capture output/error during its execution.
"""
so = cls()
try:
res = func(*args, **kwargs)
finally:
out, err = so.reset()
return res, out, err
call = classmethod(call)
def reset(self):
""" reset sys.stdout/stderr and return captured output as strings. """
if hasattr(self, '_reset'):
raise ValueError("was already reset")
self._reset = True
outfile, errfile = self.done(save=False)
out, err = "", ""
if outfile and not outfile.closed:
out = outfile.read()
outfile.close()
if errfile and errfile != outfile and not errfile.closed:
err = errfile.read()
errfile.close()
return out, err
def suspend(self):
""" return current snapshot captures, memorize tempfiles. """
outerr = self.readouterr()
outfile, errfile = self.done()
return outerr
class StdCaptureFD(Capture):
""" This class allows to capture writes to FD1 and FD2
and may connect a NULL file to FD0 (and prevent
reads from sys.stdin). If any of the 0,1,2 file descriptors
is invalid it will not be captured.
"""
def __init__(self, out=True, err=True, mixed=False,
in_=True, patchsys=True, now=True):
self._options = {
"out": out,
"err": err,
"mixed": mixed,
"in_": in_,
"patchsys": patchsys,
"now": now,
}
self._save()
if now:
self.startall()
def _save(self):
in_ = self._options['in_']
out = self._options['out']
err = self._options['err']
mixed = self._options['mixed']
patchsys = self._options['patchsys']
if in_:
try:
self.in_ = FDCapture(0, tmpfile=None, now=False,
patchsys=patchsys)
except OSError:
pass
if out:
tmpfile = None
if hasattr(out, 'write'):
tmpfile = out
try:
self.out = FDCapture(1, tmpfile=tmpfile,
now=False, patchsys=patchsys)
self._options['out'] = self.out.tmpfile
except OSError:
pass
if err:
if out and mixed:
tmpfile = self.out.tmpfile
elif hasattr(err, 'write'):
tmpfile = err
else:
tmpfile = None
try:
self.err = FDCapture(2, tmpfile=tmpfile,
now=False, patchsys=patchsys)
self._options['err'] = self.err.tmpfile
except OSError:
pass
def startall(self):
if hasattr(self, 'in_'):
self.in_.start()
if hasattr(self, 'out'):
self.out.start()
if hasattr(self, 'err'):
self.err.start()
def resume(self):
""" resume capturing with original temp files. """
self.startall()
def done(self, save=True):
""" return (outfile, errfile) and stop capturing. """
outfile = errfile = None
if hasattr(self, 'out') and not self.out.tmpfile.closed:
outfile = self.out.done()
if hasattr(self, 'err') and not self.err.tmpfile.closed:
errfile = self.err.done()
if hasattr(self, 'in_'):
tmpfile = self.in_.done()
if save:
self._save()
return outfile, errfile
def readouterr(self):
""" return snapshot value of stdout/stderr capturings. """
if hasattr(self, "out"):
out = self._readsnapshot(self.out.tmpfile)
else:
out = ""
if hasattr(self, "err"):
err = self._readsnapshot(self.err.tmpfile)
else:
err = ""
return [out, err]
def _readsnapshot(self, f):
f.seek(0)
res = f.read()
enc = getattr(f, "encoding", None)
if enc:
res = py.builtin._totext(res, enc, "replace")
f.truncate(0)
f.seek(0)
return res
class StdCapture(Capture):
""" This class allows to capture writes to sys.stdout|stderr "in-memory"
and will raise errors on tries to read from sys.stdin. It only
modifies sys.stdout|stderr|stdin attributes and does not
touch underlying File Descriptors (use StdCaptureFD for that).
"""
def __init__(self, out=True, err=True, in_=True, mixed=False, now=True):
self._oldout = sys.stdout
self._olderr = sys.stderr
self._oldin = sys.stdin
if out and not hasattr(out, 'file'):
out = TextIO()
self.out = out
if err:
if mixed:
err = out
elif not hasattr(err, 'write'):
err = TextIO()
self.err = err
self.in_ = in_
if now:
self.startall()
def startall(self):
if self.out:
sys.stdout = self.out
if self.err:
sys.stderr = self.err
if self.in_:
sys.stdin = self.in_ = DontReadFromInput()
def done(self, save=True):
""" return (outfile, errfile) and stop capturing. """
outfile = errfile = None
if self.out and not self.out.closed:
sys.stdout = self._oldout
outfile = self.out
outfile.seek(0)
if self.err and not self.err.closed:
sys.stderr = self._olderr
errfile = self.err
errfile.seek(0)
if self.in_:
sys.stdin = self._oldin
return outfile, errfile
def resume(self):
""" resume capturing with original temp files. """
self.startall()
def readouterr(self):
""" return snapshot value of stdout/stderr capturings. """
out = err = ""
if self.out:
out = self.out.getvalue()
self.out.truncate(0)
self.out.seek(0)
if self.err:
err = self.err.getvalue()
self.err.truncate(0)
self.err.seek(0)
return out, err
class DontReadFromInput:
"""Temporary stub class. Ideally when stdin is accessed, the
capturing should be turned off, with possibly all data captured
so far sent to the screen. This should be configurable, though,
because in automated test runs it is better to crash than
hang indefinitely.
"""
def read(self, *args):
raise IOError("reading from stdin while output is captured")
readline = read
readlines = read
__iter__ = read
def fileno(self):
raise ValueError("redirected Stdin is pseudofile, has no fileno()")
def isatty(self):
return False
def close(self):
pass
try:
devnullpath = os.devnull
except AttributeError:
if os.name == 'nt':
devnullpath = 'NUL'
else:
devnullpath = '/dev/null'
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment