[pypy-commit] pypy arm-backend-2: merge default
bivab
noreply at buildbot.pypy.org
Fri Apr 13 11:00:51 CEST 2012
Author: David Schneider <david.schneider at picle.org>
Branch: arm-backend-2
Changeset: r54325:308075361821
Date: 2012-04-10 13:16 +0000
http://bitbucket.org/pypy/pypy/changeset/308075361821/
Log: merge default
diff --git a/_pytest/__init__.py b/_pytest/__init__.py
--- a/_pytest/__init__.py
+++ b/_pytest/__init__.py
@@ -1,2 +1,2 @@
#
-__version__ = '2.1.0.dev4'
+__version__ = '2.2.4.dev2'
diff --git a/_pytest/assertion/__init__.py b/_pytest/assertion/__init__.py
--- a/_pytest/assertion/__init__.py
+++ b/_pytest/assertion/__init__.py
@@ -2,35 +2,25 @@
support for presenting detailed information in failing assertions.
"""
import py
-import imp
-import marshal
-import struct
import sys
import pytest
from _pytest.monkeypatch import monkeypatch
-from _pytest.assertion import reinterpret, util
-
-try:
- from _pytest.assertion.rewrite import rewrite_asserts
-except ImportError:
- rewrite_asserts = None
-else:
- import ast
+from _pytest.assertion import util
def pytest_addoption(parser):
group = parser.getgroup("debugconfig")
- group.addoption('--assertmode', action="store", dest="assertmode",
- choices=("on", "old", "off", "default"), default="default",
- metavar="on|old|off",
+ group.addoption('--assert', action="store", dest="assertmode",
+ choices=("rewrite", "reinterp", "plain",),
+ default="rewrite", metavar="MODE",
help="""control assertion debugging tools.
-'off' performs no assertion debugging.
-'old' reinterprets the expressions in asserts to glean information.
-'on' (the default) rewrites the assert statements in test modules to provide
-sub-expression results.""")
+'plain' performs no assertion debugging.
+'reinterp' reinterprets assert statements after they failed to provide assertion expression information.
+'rewrite' (the default) rewrites assert statements in test modules on import
+to provide assert expression information. """)
group.addoption('--no-assert', action="store_true", default=False,
- dest="noassert", help="DEPRECATED equivalent to --assertmode=off")
+ dest="noassert", help="DEPRECATED equivalent to --assert=plain")
group.addoption('--nomagic', action="store_true", default=False,
- dest="nomagic", help="DEPRECATED equivalent to --assertmode=off")
+ dest="nomagic", help="DEPRECATED equivalent to --assert=plain")
class AssertionState:
"""State for the assertion plugin."""
@@ -40,89 +30,90 @@
self.trace = config.trace.root.get("assertion")
def pytest_configure(config):
- warn_about_missing_assertion()
mode = config.getvalue("assertmode")
if config.getvalue("noassert") or config.getvalue("nomagic"):
- if mode not in ("off", "default"):
- raise pytest.UsageError("assertion options conflict")
- mode = "off"
- elif mode == "default":
- mode = "on"
- if mode != "off":
- def callbinrepr(op, left, right):
- hook_result = config.hook.pytest_assertrepr_compare(
- config=config, op=op, left=left, right=right)
- for new_expl in hook_result:
- if new_expl:
- return '\n~'.join(new_expl)
+ mode = "plain"
+ if mode == "rewrite":
+ try:
+ import ast
+ except ImportError:
+ mode = "reinterp"
+ else:
+ if sys.platform.startswith('java'):
+ mode = "reinterp"
+ if mode != "plain":
+ _load_modules(mode)
m = monkeypatch()
config._cleanup.append(m.undo)
m.setattr(py.builtin.builtins, 'AssertionError',
reinterpret.AssertionError)
- m.setattr(util, '_reprcompare', callbinrepr)
- if mode == "on" and rewrite_asserts is None:
- mode = "old"
+ hook = None
+ if mode == "rewrite":
+ hook = rewrite.AssertionRewritingHook()
+ sys.meta_path.append(hook)
+ warn_about_missing_assertion(mode)
config._assertstate = AssertionState(config, mode)
+ config._assertstate.hook = hook
config._assertstate.trace("configured with mode set to %r" % (mode,))
-def _write_pyc(co, source_path):
- if hasattr(imp, "cache_from_source"):
- # Handle PEP 3147 pycs.
- pyc = py.path.local(imp.cache_from_source(str(source_path)))
- pyc.ensure()
- else:
- pyc = source_path + "c"
- mtime = int(source_path.mtime())
- fp = pyc.open("wb")
- try:
- fp.write(imp.get_magic())
- fp.write(struct.pack("<l", mtime))
- marshal.dump(co, fp)
- finally:
- fp.close()
- return pyc
+def pytest_unconfigure(config):
+ hook = config._assertstate.hook
+ if hook is not None:
+ sys.meta_path.remove(hook)
-def before_module_import(mod):
- if mod.config._assertstate.mode != "on":
- return
- # Some deep magic: load the source, rewrite the asserts, and write a
- # fake pyc, so that it'll be loaded when the module is imported.
- source = mod.fspath.read()
- try:
- tree = ast.parse(source)
- except SyntaxError:
- # Let this pop up again in the real import.
- mod.config._assertstate.trace("failed to parse: %r" % (mod.fspath,))
- return
- rewrite_asserts(tree)
- try:
- co = compile(tree, str(mod.fspath), "exec")
- except SyntaxError:
- # It's possible that this error is from some bug in the assertion
- # rewriting, but I don't know of a fast way to tell.
- mod.config._assertstate.trace("failed to compile: %r" % (mod.fspath,))
- return
- mod._pyc = _write_pyc(co, mod.fspath)
- mod.config._assertstate.trace("wrote pyc: %r" % (mod._pyc,))
+def pytest_collection(session):
+ # this hook is only called when test modules are collected
+ # so for example not in the master process of pytest-xdist
+ # (which does not collect test modules)
+ hook = session.config._assertstate.hook
+ if hook is not None:
+ hook.set_session(session)
-def after_module_import(mod):
- if not hasattr(mod, "_pyc"):
- return
- state = mod.config._assertstate
- try:
- mod._pyc.remove()
- except py.error.ENOENT:
- state.trace("couldn't find pyc: %r" % (mod._pyc,))
- else:
- state.trace("removed pyc: %r" % (mod._pyc,))
+def pytest_runtest_setup(item):
+ def callbinrepr(op, left, right):
+ hook_result = item.ihook.pytest_assertrepr_compare(
+ config=item.config, op=op, left=left, right=right)
+ for new_expl in hook_result:
+ if new_expl:
+ res = '\n~'.join(new_expl)
+ if item.config.getvalue("assertmode") == "rewrite":
+ # The result will be fed back a python % formatting
+ # operation, which will fail if there are extraneous
+ # '%'s in the string. Escape them here.
+ res = res.replace("%", "%%")
+ return res
+ util._reprcompare = callbinrepr
-def warn_about_missing_assertion():
+def pytest_runtest_teardown(item):
+ util._reprcompare = None
+
+def pytest_sessionfinish(session):
+ hook = session.config._assertstate.hook
+ if hook is not None:
+ hook.session = None
+
+def _load_modules(mode):
+ """Lazily import assertion related code."""
+ global rewrite, reinterpret
+ from _pytest.assertion import reinterpret
+ if mode == "rewrite":
+ from _pytest.assertion import rewrite
+
+def warn_about_missing_assertion(mode):
try:
assert False
except AssertionError:
pass
else:
- sys.stderr.write("WARNING: failing tests may report as passing because "
- "assertions are turned off! (are you using python -O?)\n")
+ if mode == "rewrite":
+ specifically = ("assertions which are not in test modules "
+ "will be ignored")
+ else:
+ specifically = "failing tests may report as passing"
+
+ sys.stderr.write("WARNING: " + specifically +
+ " because assert statements are not executed "
+ "by the underlying Python interpreter "
+ "(are you using python -O?)\n")
pytest_assertrepr_compare = util.assertrepr_compare
diff --git a/_pytest/assertion/newinterpret.py b/_pytest/assertion/newinterpret.py
--- a/_pytest/assertion/newinterpret.py
+++ b/_pytest/assertion/newinterpret.py
@@ -53,7 +53,7 @@
if should_fail:
return ("(assertion failed, but when it was re-run for "
"printing intermediate values, it did not fail. Suggestions: "
- "compute assert expression before the assert or use --no-assert)")
+ "compute assert expression before the assert or use --assert=plain)")
def run(offending_line, frame=None):
if frame is None:
diff --git a/_pytest/assertion/oldinterpret.py b/_pytest/assertion/oldinterpret.py
--- a/_pytest/assertion/oldinterpret.py
+++ b/_pytest/assertion/oldinterpret.py
@@ -482,7 +482,7 @@
if should_fail:
return ("(assertion failed, but when it was re-run for "
"printing intermediate values, it did not fail. Suggestions: "
- "compute assert expression before the assert or use --nomagic)")
+ "compute assert expression before the assert or use --assert=plain)")
else:
return None
diff --git a/_pytest/assertion/rewrite.py b/_pytest/assertion/rewrite.py
--- a/_pytest/assertion/rewrite.py
+++ b/_pytest/assertion/rewrite.py
@@ -1,14 +1,258 @@
"""Rewrite assertion AST to produce nice error messages"""
import ast
-import collections
+import errno
import itertools
+import imp
+import marshal
+import os
+import struct
import sys
+import types
import py
from _pytest.assertion import util
+# Windows gives ENOENT in places *nix gives ENOTDIR.
+if sys.platform.startswith("win"):
+ PATH_COMPONENT_NOT_DIR = errno.ENOENT
+else:
+ PATH_COMPONENT_NOT_DIR = errno.ENOTDIR
+
+# py.test caches rewritten pycs in __pycache__.
+if hasattr(imp, "get_tag"):
+ PYTEST_TAG = imp.get_tag() + "-PYTEST"
+else:
+ if hasattr(sys, "pypy_version_info"):
+ impl = "pypy"
+ elif sys.platform == "java":
+ impl = "jython"
+ else:
+ impl = "cpython"
+ ver = sys.version_info
+ PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
+ del ver, impl
+
+PYC_EXT = ".py" + "c" if __debug__ else "o"
+PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
+
+REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2)
+
+class AssertionRewritingHook(object):
+ """Import hook which rewrites asserts."""
+
+ def __init__(self):
+ self.session = None
+ self.modules = {}
+
+ def set_session(self, session):
+ self.fnpats = session.config.getini("python_files")
+ self.session = session
+
+ def find_module(self, name, path=None):
+ if self.session is None:
+ return None
+ sess = self.session
+ state = sess.config._assertstate
+ state.trace("find_module called for: %s" % name)
+ names = name.rsplit(".", 1)
+ lastname = names[-1]
+ pth = None
+ if path is not None and len(path) == 1:
+ pth = path[0]
+ if pth is None:
+ try:
+ fd, fn, desc = imp.find_module(lastname, path)
+ except ImportError:
+ return None
+ if fd is not None:
+ fd.close()
+ tp = desc[2]
+ if tp == imp.PY_COMPILED:
+ if hasattr(imp, "source_from_cache"):
+ fn = imp.source_from_cache(fn)
+ else:
+ fn = fn[:-1]
+ elif tp != imp.PY_SOURCE:
+ # Don't know what this is.
+ return None
+ else:
+ fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
+ fn_pypath = py.path.local(fn)
+ # Is this a test file?
+ if not sess.isinitpath(fn):
+ # We have to be very careful here because imports in this code can
+ # trigger a cycle.
+ self.session = None
+ try:
+ for pat in self.fnpats:
+ if fn_pypath.fnmatch(pat):
+ state.trace("matched test file %r" % (fn,))
+ break
+ else:
+ return None
+ finally:
+ self.session = sess
+ else:
+ state.trace("matched test file (was specified on cmdline): %r" % (fn,))
+ # The requested module looks like a test file, so rewrite it. This is
+ # the most magical part of the process: load the source, rewrite the
+ # asserts, and load the rewritten source. We also cache the rewritten
+ # module code in a special pyc. We must be aware of the possibility of
+ # concurrent py.test processes rewriting and loading pycs. To avoid
+ # tricky race conditions, we maintain the following invariant: The
+ # cached pyc is always a complete, valid pyc. Operations on it must be
+ # atomic. POSIX's atomic rename comes in handy.
+ write = not sys.dont_write_bytecode
+ cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
+ if write:
+ try:
+ os.mkdir(cache_dir)
+ except OSError:
+ e = sys.exc_info()[1].errno
+ if e == errno.EEXIST:
+ # Either the __pycache__ directory already exists (the
+ # common case) or it's blocked by a non-dir node. In the
+ # latter case, we'll ignore it in _write_pyc.
+ pass
+ elif e == PATH_COMPONENT_NOT_DIR:
+ # One of the path components was not a directory, likely
+ # because we're in a zip file.
+ write = False
+ elif e == errno.EACCES:
+ state.trace("read only directory: %r" % (fn_pypath.dirname,))
+ write = False
+ else:
+ raise
+ cache_name = fn_pypath.basename[:-3] + PYC_TAIL
+ pyc = os.path.join(cache_dir, cache_name)
+ # Notice that even if we're in a read-only directory, I'm going to check
+ # for a cached pyc. This may not be optimal...
+ co = _read_pyc(fn_pypath, pyc)
+ if co is None:
+ state.trace("rewriting %r" % (fn,))
+ co = _rewrite_test(state, fn_pypath)
+ if co is None:
+ # Probably a SyntaxError in the test.
+ return None
+ if write:
+ _make_rewritten_pyc(state, fn_pypath, pyc, co)
+ else:
+ state.trace("found cached rewritten pyc for %r" % (fn,))
+ self.modules[name] = co, pyc
+ return self
+
+ def load_module(self, name):
+ co, pyc = self.modules.pop(name)
+ # I wish I could just call imp.load_compiled here, but __file__ has to
+ # be set properly. In Python 3.2+, this all would be handled correctly
+ # by load_compiled.
+ mod = sys.modules[name] = imp.new_module(name)
+ try:
+ mod.__file__ = co.co_filename
+ # Normally, this attribute is 3.2+.
+ mod.__cached__ = pyc
+ py.builtin.exec_(co, mod.__dict__)
+ except:
+ del sys.modules[name]
+ raise
+ return sys.modules[name]
+
+def _write_pyc(co, source_path, pyc):
+ # Technically, we don't have to have the same pyc format as (C)Python, since
+ # these "pycs" should never be seen by builtin import. However, there's
+ # little reason deviate, and I hope sometime to be able to use
+ # imp.load_compiled to load them. (See the comment in load_module above.)
+ mtime = int(source_path.mtime())
+ try:
+ fp = open(pyc, "wb")
+ except IOError:
+ err = sys.exc_info()[1].errno
+ if err == PATH_COMPONENT_NOT_DIR:
+ # This happens when we get a EEXIST in find_module creating the
+ # __pycache__ directory and __pycache__ is by some non-dir node.
+ return False
+ raise
+ try:
+ fp.write(imp.get_magic())
+ fp.write(struct.pack("<l", mtime))
+ marshal.dump(co, fp)
+ finally:
+ fp.close()
+ return True
+
+RN = "\r\n".encode("utf-8")
+N = "\n".encode("utf-8")
+
+def _rewrite_test(state, fn):
+ """Try to read and rewrite *fn* and return the code object."""
+ try:
+ source = fn.read("rb")
+ except EnvironmentError:
+ return None
+ # On Python versions which are not 2.7 and less than or equal to 3.1, the
+ # parser expects *nix newlines.
+ if REWRITE_NEWLINES:
+ source = source.replace(RN, N) + N
+ try:
+ tree = ast.parse(source)
+ except SyntaxError:
+ # Let this pop up again in the real import.
+ state.trace("failed to parse: %r" % (fn,))
+ return None
+ rewrite_asserts(tree)
+ try:
+ co = compile(tree, fn.strpath, "exec")
+ except SyntaxError:
+ # It's possible that this error is from some bug in the
+ # assertion rewriting, but I don't know of a fast way to tell.
+ state.trace("failed to compile: %r" % (fn,))
+ return None
+ return co
+
+def _make_rewritten_pyc(state, fn, pyc, co):
+ """Try to dump rewritten code to *pyc*."""
+ if sys.platform.startswith("win"):
+ # Windows grants exclusive access to open files and doesn't have atomic
+ # rename, so just write into the final file.
+ _write_pyc(co, fn, pyc)
+ else:
+ # When not on windows, assume rename is atomic. Dump the code object
+ # into a file specific to this process and atomically replace it.
+ proc_pyc = pyc + "." + str(os.getpid())
+ if _write_pyc(co, fn, proc_pyc):
+ os.rename(proc_pyc, pyc)
+
+def _read_pyc(source, pyc):
+ """Possibly read a py.test pyc containing rewritten code.
+
+ Return rewritten code if successful or None if not.
+ """
+ try:
+ fp = open(pyc, "rb")
+ except IOError:
+ return None
+ try:
+ try:
+ mtime = int(source.mtime())
+ data = fp.read(8)
+ except EnvironmentError:
+ return None
+ # Check for invalid or out of date pyc file.
+ if (len(data) != 8 or
+ data[:4] != imp.get_magic() or
+ struct.unpack("<l", data[4:])[0] != mtime):
+ return None
+ co = marshal.load(fp)
+ if not isinstance(co, types.CodeType):
+ # That's interesting....
+ return None
+ return co
+ finally:
+ fp.close()
+
+
def rewrite_asserts(mod):
"""Rewrite the assert statements in mod."""
AssertionRewriter().run(mod)
@@ -17,13 +261,8 @@
_saferepr = py.io.saferepr
from _pytest.assertion.util import format_explanation as _format_explanation
-def _format_boolop(operands, explanations, is_or):
- show_explanations = []
- for operand, expl in zip(operands, explanations):
- show_explanations.append(expl)
- if operand == is_or:
- break
- return "(" + (is_or and " or " or " and ").join(show_explanations) + ")"
+def _format_boolop(explanations, is_or):
+ return "(" + (is_or and " or " or " and ").join(explanations) + ")"
def _call_reprcompare(ops, results, expls, each_obj):
for i, res, expl in zip(range(len(ops)), results, expls):
@@ -109,8 +348,8 @@
return
lineno += len(doc) - 1
expect_docstring = False
- elif (not isinstance(item, ast.ImportFrom) or item.level > 0 and
- item.identifier != "__future__"):
+ elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
+ item.module != "__future__"):
lineno = item.lineno
break
pos += 1
@@ -118,9 +357,9 @@
for alias in aliases]
mod.body[pos:pos] = imports
# Collect asserts.
- nodes = collections.deque([mod])
+ nodes = [mod]
while nodes:
- node = nodes.popleft()
+ node = nodes.pop()
for name, field in ast.iter_fields(node):
if isinstance(field, list):
new = []
@@ -143,7 +382,7 @@
"""Get a new variable."""
# Use a character invalid in python identifiers to avoid clashing.
name = "@py_assert" + str(next(self.variable_counter))
- self.variables.add(name)
+ self.variables.append(name)
return name
def assign(self, expr):
@@ -198,7 +437,8 @@
# There's already a message. Don't mess with it.
return [assert_]
self.statements = []
- self.variables = set()
+ self.cond_chain = ()
+ self.variables = []
self.variable_counter = itertools.count()
self.stack = []
self.on_failure = []
@@ -220,11 +460,11 @@
else:
raise_ = ast.Raise(exc, None, None)
body.append(raise_)
- # Delete temporary variables.
- names = [ast.Name(name, ast.Del()) for name in self.variables]
- if names:
- delete = ast.Delete(names)
- self.statements.append(delete)
+ # Clear temporary variables by setting them to None.
+ if self.variables:
+ variables = [ast.Name(name, ast.Store()) for name in self.variables]
+ clear = ast.Assign(variables, ast.Name("None", ast.Load()))
+ self.statements.append(clear)
# Fix line numbers.
for stmt in self.statements:
set_location(stmt, assert_.lineno, assert_.col_offset)
@@ -240,21 +480,38 @@
return name, self.explanation_param(expr)
def visit_BoolOp(self, boolop):
- operands = []
- explanations = []
+ res_var = self.variable()
+ expl_list = self.assign(ast.List([], ast.Load()))
+ app = ast.Attribute(expl_list, "append", ast.Load())
+ is_or = int(isinstance(boolop.op, ast.Or))
+ body = save = self.statements
+ fail_save = self.on_failure
+ levels = len(boolop.values) - 1
self.push_format_context()
- for operand in boolop.values:
- res, explanation = self.visit(operand)
- operands.append(res)
- explanations.append(explanation)
- expls = ast.Tuple([ast.Str(expl) for expl in explanations], ast.Load())
- is_or = ast.Num(isinstance(boolop.op, ast.Or))
- expl_template = self.helper("format_boolop",
- ast.Tuple(operands, ast.Load()), expls,
- is_or)
+ # Process each operand, short-circuting if needed.
+ for i, v in enumerate(boolop.values):
+ if i:
+ fail_inner = []
+ self.on_failure.append(ast.If(cond, fail_inner, []))
+ self.on_failure = fail_inner
+ self.push_format_context()
+ res, expl = self.visit(v)
+ body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
+ expl_format = self.pop_format_context(ast.Str(expl))
+ call = ast.Call(app, [expl_format], [], None, None)
+ self.on_failure.append(ast.Expr(call))
+ if i < levels:
+ cond = res
+ if is_or:
+ cond = ast.UnaryOp(ast.Not(), cond)
+ inner = []
+ self.statements.append(ast.If(cond, inner, []))
+ self.statements = body = inner
+ self.statements = save
+ self.on_failure = fail_save
+ expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
expl = self.pop_format_context(expl_template)
- res = self.assign(ast.BoolOp(boolop.op, operands))
- return res, self.explanation_param(expl)
+ return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
def visit_UnaryOp(self, unary):
pattern = unary_map[unary.op.__class__]
@@ -288,7 +545,7 @@
new_star, expl = self.visit(call.starargs)
arg_expls.append("*" + expl)
if call.kwargs:
- new_kwarg, expl = self.visit(call.kwarg)
+ new_kwarg, expl = self.visit(call.kwargs)
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs, new_star, new_kwarg)
diff --git a/_pytest/capture.py b/_pytest/capture.py
--- a/_pytest/capture.py
+++ b/_pytest/capture.py
@@ -11,22 +11,22 @@
group._addoption('-s', action="store_const", const="no", dest="capture",
help="shortcut for --capture=no.")
+ at pytest.mark.tryfirst
+def pytest_cmdline_parse(pluginmanager, args):
+ # we want to perform capturing already for plugin/conftest loading
+ if '-s' in args or "--capture=no" in args:
+ method = "no"
+ elif hasattr(os, 'dup') and '--capture=sys' not in args:
+ method = "fd"
+ else:
+ method = "sys"
+ capman = CaptureManager(method)
+ pluginmanager.register(capman, "capturemanager")
+
def addouterr(rep, outerr):
- repr = getattr(rep, 'longrepr', None)
- if not hasattr(repr, 'addsection'):
- return
for secname, content in zip(["out", "err"], outerr):
if content:
- repr.addsection("Captured std%s" % secname, content.rstrip())
-
-def pytest_unconfigure(config):
- # registered in config.py during early conftest.py loading
- capman = config.pluginmanager.getplugin('capturemanager')
- while capman._method2capture:
- name, cap = capman._method2capture.popitem()
- # XXX logging module may wants to close it itself on process exit
- # otherwise we could do finalization here and call "reset()".
- cap.suspend()
+ rep.sections.append(("Captured std%s" % secname, content))
class NoCapture:
def startall(self):
@@ -39,8 +39,9 @@
return "", ""
class CaptureManager:
- def __init__(self):
+ def __init__(self, defaultmethod=None):
self._method2capture = {}
+ self._defaultmethod = defaultmethod
def _maketempfile(self):
f = py.std.tempfile.TemporaryFile()
@@ -65,14 +66,6 @@
else:
raise ValueError("unknown capturing method: %r" % method)
- def _getmethod_preoptionparse(self, args):
- if '-s' in args or "--capture=no" in args:
- return "no"
- elif hasattr(os, 'dup') and '--capture=sys' not in args:
- return "fd"
- else:
- return "sys"
-
def _getmethod(self, config, fspath):
if config.option.capture:
method = config.option.capture
@@ -85,16 +78,22 @@
method = "sys"
return method
+ def reset_capturings(self):
+ for name, cap in self._method2capture.items():
+ cap.reset()
+
def resumecapture_item(self, item):
method = self._getmethod(item.config, item.fspath)
if not hasattr(item, 'outerr'):
item.outerr = ('', '') # we accumulate outerr on the item
return self.resumecapture(method)
- def resumecapture(self, method):
+ def resumecapture(self, method=None):
if hasattr(self, '_capturing'):
raise ValueError("cannot resume, already capturing with %r" %
(self._capturing,))
+ if method is None:
+ method = self._defaultmethod
cap = self._method2capture.get(method)
self._capturing = method
if cap is None:
@@ -164,17 +163,6 @@
def pytest_runtest_teardown(self, item):
self.resumecapture_item(item)
- def pytest__teardown_final(self, __multicall__, session):
- method = self._getmethod(session.config, None)
- self.resumecapture(method)
- try:
- rep = __multicall__.execute()
- finally:
- outerr = self.suspendcapture()
- if rep:
- addouterr(rep, outerr)
- return rep
-
def pytest_keyboard_interrupt(self, excinfo):
if hasattr(self, '_capturing'):
self.suspendcapture()
diff --git a/_pytest/config.py b/_pytest/config.py
--- a/_pytest/config.py
+++ b/_pytest/config.py
@@ -8,13 +8,15 @@
def pytest_cmdline_parse(pluginmanager, args):
config = Config(pluginmanager)
config.parse(args)
- if config.option.debug:
- config.trace.root.setwriter(sys.stderr.write)
return config
def pytest_unconfigure(config):
- for func in config._cleanup:
- func()
+ while 1:
+ try:
+ fin = config._cleanup.pop()
+ except IndexError:
+ break
+ fin()
class Parser:
""" Parser for command line arguments. """
@@ -81,6 +83,7 @@
self._inidict[name] = (help, type, default)
self._ininames.append(name)
+
class OptionGroup:
def __init__(self, name, description="", parser=None):
self.name = name
@@ -256,11 +259,14 @@
self.hook = self.pluginmanager.hook
self._inicache = {}
self._cleanup = []
-
+
@classmethod
def fromdictargs(cls, option_dict, args):
""" constructor useable for subprocesses. """
config = cls()
+ # XXX slightly crude way to initialize capturing
+ import _pytest.capture
+ _pytest.capture.pytest_cmdline_parse(config.pluginmanager, args)
config._preparse(args, addopts=False)
config.option.__dict__.update(option_dict)
for x in config.option.plugins:
@@ -285,11 +291,10 @@
def _setinitialconftest(self, args):
# capture output during conftest init (#issue93)
- from _pytest.capture import CaptureManager
- capman = CaptureManager()
- self.pluginmanager.register(capman, 'capturemanager')
- # will be unregistered in capture.py's unconfigure()
- capman.resumecapture(capman._getmethod_preoptionparse(args))
+ # XXX introduce load_conftest hook to avoid needing to know
+ # about capturing plugin here
+ capman = self.pluginmanager.getplugin("capturemanager")
+ capman.resumecapture()
try:
try:
self._conftest.setinitial(args)
@@ -334,6 +339,7 @@
# Note that this can only be called once per testing process.
assert not hasattr(self, 'args'), (
"can only parse cmdline args at most once per Config object")
+ self._origargs = args
self._preparse(args)
self._parser.hints.extend(self.pluginmanager._hints)
args = self._parser.parse_setoption(args, self.option)
@@ -341,6 +347,14 @@
args.append(py.std.os.getcwd())
self.args = args
+ def addinivalue_line(self, name, line):
+ """ add a line to an ini-file option. The option must have been
+ declared but might not yet be set in which case the line becomes the
+ the first line in its value. """
+ x = self.getini(name)
+ assert isinstance(x, list)
+ x.append(line) # modifies the cached list inline
+
def getini(self, name):
""" return configuration value from an ini file. If the
specified name hasn't been registered through a prior ``parse.addini``
@@ -422,7 +436,7 @@
def getcfg(args, inibasenames):
- args = [x for x in args if str(x)[0] != "-"]
+ args = [x for x in args if not str(x).startswith("-")]
if not args:
args = [py.path.local()]
for arg in args:
diff --git a/_pytest/core.py b/_pytest/core.py
--- a/_pytest/core.py
+++ b/_pytest/core.py
@@ -16,11 +16,10 @@
"junitxml resultlog doctest").split()
class TagTracer:
- def __init__(self, prefix="[pytest] "):
+ def __init__(self):
self._tag2proc = {}
self.writer = None
self.indent = 0
- self.prefix = prefix
def get(self, name):
return TagTracerSub(self, (name,))
@@ -30,7 +29,7 @@
if args:
indent = " " * self.indent
content = " ".join(map(str, args))
- self.writer("%s%s%s\n" %(self.prefix, indent, content))
+ self.writer("%s%s [%s]\n" %(indent, content, ":".join(tags)))
try:
self._tag2proc[tags](tags, args)
except KeyError:
@@ -212,6 +211,14 @@
self.register(mod, modname)
self.consider_module(mod)
+ def pytest_configure(self, config):
+ config.addinivalue_line("markers",
+ "tryfirst: mark a hook implementation function such that the "
+ "plugin machinery will try to call it first/as early as possible.")
+ config.addinivalue_line("markers",
+ "trylast: mark a hook implementation function such that the "
+ "plugin machinery will try to call it last/as late as possible.")
+
def pytest_plugin_registered(self, plugin):
import pytest
dic = self.call_plugin(plugin, "pytest_namespace", {}) or {}
@@ -432,10 +439,7 @@
def _preloadplugins():
_preinit.append(PluginManager(load=True))
-def main(args=None, plugins=None):
- """ returned exit code integer, after an in-process testing run
- with the given command line arguments, preloading an optional list
- of passed in plugin objects. """
+def _prepareconfig(args=None, plugins=None):
if args is None:
args = sys.argv[1:]
elif isinstance(args, py.path.local):
@@ -449,13 +453,19 @@
else: # subsequent calls to main will create a fresh instance
_pluginmanager = PluginManager(load=True)
hook = _pluginmanager.hook
+ if plugins:
+ for plugin in plugins:
+ _pluginmanager.register(plugin)
+ return hook.pytest_cmdline_parse(
+ pluginmanager=_pluginmanager, args=args)
+
+def main(args=None, plugins=None):
+ """ returned exit code integer, after an in-process testing run
+ with the given command line arguments, preloading an optional list
+ of passed in plugin objects. """
try:
- if plugins:
- for plugin in plugins:
- _pluginmanager.register(plugin)
- config = hook.pytest_cmdline_parse(
- pluginmanager=_pluginmanager, args=args)
- exitstatus = hook.pytest_cmdline_main(config=config)
+ config = _prepareconfig(args, plugins)
+ exitstatus = config.hook.pytest_cmdline_main(config=config)
except UsageError:
e = sys.exc_info()[1]
sys.stderr.write("ERROR: %s\n" %(e.args[0],))
diff --git a/_pytest/helpconfig.py b/_pytest/helpconfig.py
--- a/_pytest/helpconfig.py
+++ b/_pytest/helpconfig.py
@@ -1,7 +1,7 @@
""" version info, help messages, tracing configuration. """
import py
import pytest
-import inspect, sys
+import os, inspect, sys
from _pytest.core import varnames
def pytest_addoption(parser):
@@ -18,7 +18,29 @@
help="trace considerations of conftest.py files."),
group.addoption('--debug',
action="store_true", dest="debug", default=False,
- help="generate and show internal debugging information.")
+ help="store internal tracing debug information in 'pytestdebug.log'.")
+
+
+def pytest_cmdline_parse(__multicall__):
+ config = __multicall__.execute()
+ if config.option.debug:
+ path = os.path.abspath("pytestdebug.log")
+ f = open(path, 'w')
+ config._debugfile = f
+ f.write("versions pytest-%s, py-%s, python-%s\ncwd=%s\nargs=%s\n\n" %(
+ pytest.__version__, py.__version__, ".".join(map(str, sys.version_info)),
+ os.getcwd(), config._origargs))
+ config.trace.root.setwriter(f.write)
+ sys.stderr.write("writing pytestdebug information to %s\n" % path)
+ return config
+
+ at pytest.mark.trylast
+def pytest_unconfigure(config):
+ if hasattr(config, '_debugfile'):
+ config._debugfile.close()
+ sys.stderr.write("wrote pytestdebug information to %s\n" %
+ config._debugfile.name)
+ config.trace.root.setwriter(None)
def pytest_cmdline_main(config):
@@ -34,6 +56,7 @@
elif config.option.help:
config.pluginmanager.do_configure(config)
showhelp(config)
+ config.pluginmanager.do_unconfigure(config)
return 0
def showhelp(config):
@@ -91,7 +114,7 @@
verinfo = getpluginversioninfo(config)
if verinfo:
lines.extend(verinfo)
-
+
if config.option.traceconfig:
lines.append("active plugins:")
plugins = []
diff --git a/_pytest/hookspec.py b/_pytest/hookspec.py
--- a/_pytest/hookspec.py
+++ b/_pytest/hookspec.py
@@ -121,16 +121,23 @@
def pytest_itemstart(item, node=None):
""" (deprecated, use pytest_runtest_logstart). """
-def pytest_runtest_protocol(item):
- """ implements the standard runtest_setup/call/teardown protocol including
- capturing exceptions and calling reporting hooks on the results accordingly.
+def pytest_runtest_protocol(item, nextitem):
+ """ implements the runtest_setup/call/teardown protocol for
+ the given test item, including capturing exceptions and calling
+ reporting hooks.
+
+ :arg item: test item for which the runtest protocol is performed.
+
+ :arg nexitem: the scheduled-to-be-next test item (or None if this
+ is the end my friend). This argument is passed on to
+ :py:func:`pytest_runtest_teardown`.
:return boolean: True if no further hook implementations should be invoked.
"""
pytest_runtest_protocol.firstresult = True
def pytest_runtest_logstart(nodeid, location):
- """ signal the start of a test run. """
+ """ signal the start of running a single test item. """
def pytest_runtest_setup(item):
""" called before ``pytest_runtest_call(item)``. """
@@ -138,8 +145,14 @@
def pytest_runtest_call(item):
""" called to execute the test ``item``. """
-def pytest_runtest_teardown(item):
- """ called after ``pytest_runtest_call``. """
+def pytest_runtest_teardown(item, nextitem):
+ """ called after ``pytest_runtest_call``.
+
+ :arg nexitem: the scheduled-to-be-next test item (None if no further
+ test item is scheduled). This argument can be used to
+ perform exact teardowns, i.e. calling just enough finalizers
+ so that nextitem only needs to call setup-functions.
+ """
def pytest_runtest_makereport(item, call):
""" return a :py:class:`_pytest.runner.TestReport` object
@@ -149,15 +162,8 @@
pytest_runtest_makereport.firstresult = True
def pytest_runtest_logreport(report):
- """ process item test report. """
-
-# special handling for final teardown - somewhat internal for now
-def pytest__teardown_final(session):
- """ called before test session finishes. """
-pytest__teardown_final.firstresult = True
-
-def pytest__teardown_final_logerror(report, session):
- """ called if runtest_teardown_final failed. """
+ """ process a test setup/call/teardown report relating to
+ the respective phase of executing a test. """
# -------------------------------------------------------------------------
# test session related hooks
diff --git a/_pytest/junitxml.py b/_pytest/junitxml.py
--- a/_pytest/junitxml.py
+++ b/_pytest/junitxml.py
@@ -25,21 +25,39 @@
long = int
+class Junit(py.xml.Namespace):
+ pass
+
+
# We need to get the subset of the invalid unicode ranges according to
# XML 1.0 which are valid in this python build. Hence we calculate
# this dynamically instead of hardcoding it. The spec range of valid
# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD]
# | [#x10000-#x10FFFF]
-_illegal_unichrs = [(0x00, 0x08), (0x0B, 0x0C), (0x0E, 0x19),
- (0xD800, 0xDFFF), (0xFDD0, 0xFFFF)]
-_illegal_ranges = [unicode("%s-%s") % (unichr(low), unichr(high))
- for (low, high) in _illegal_unichrs
+_legal_chars = (0x09, 0x0A, 0x0d)
+_legal_ranges = (
+ (0x20, 0xD7FF),
+ (0xE000, 0xFFFD),
+ (0x10000, 0x10FFFF),
+)
+_legal_xml_re = [unicode("%s-%s") % (unichr(low), unichr(high))
+ for (low, high) in _legal_ranges
if low < sys.maxunicode]
-illegal_xml_re = re.compile(unicode('[%s]') %
- unicode('').join(_illegal_ranges))
-del _illegal_unichrs
-del _illegal_ranges
+_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re
+illegal_xml_re = re.compile(unicode('[^%s]') %
+ unicode('').join(_legal_xml_re))
+del _legal_chars
+del _legal_ranges
+del _legal_xml_re
+def bin_xml_escape(arg):
+ def repl(matchobj):
+ i = ord(matchobj.group())
+ if i <= 0xFF:
+ return unicode('#x%02X') % i
+ else:
+ return unicode('#x%04X') % i
+ return illegal_xml_re.sub(repl, py.xml.escape(arg))
def pytest_addoption(parser):
group = parser.getgroup("terminal reporting")
@@ -68,117 +86,97 @@
logfile = os.path.expanduser(os.path.expandvars(logfile))
self.logfile = os.path.normpath(logfile)
self.prefix = prefix
- self.test_logs = []
+ self.tests = []
self.passed = self.skipped = 0
self.failed = self.errors = 0
- self._durations = {}
def _opentestcase(self, report):
names = report.nodeid.split("::")
names[0] = names[0].replace("/", '.')
- names = tuple(names)
- d = {'time': self._durations.pop(report.nodeid, "0")}
names = [x.replace(".py", "") for x in names if x != "()"]
classnames = names[:-1]
if self.prefix:
classnames.insert(0, self.prefix)
- d['classname'] = ".".join(classnames)
- d['name'] = py.xml.escape(names[-1])
- attrs = ['%s="%s"' % item for item in sorted(d.items())]
- self.test_logs.append("\n<testcase %s>" % " ".join(attrs))
+ self.tests.append(Junit.testcase(
+ classname=".".join(classnames),
+ name=names[-1],
+ time=getattr(report, 'duration', 0)
+ ))
- def _closetestcase(self):
- self.test_logs.append("</testcase>")
-
- def appendlog(self, fmt, *args):
- def repl(matchobj):
- i = ord(matchobj.group())
- if i <= 0xFF:
- return unicode('#x%02X') % i
- else:
- return unicode('#x%04X') % i
- args = tuple([illegal_xml_re.sub(repl, py.xml.escape(arg))
- for arg in args])
- self.test_logs.append(fmt % args)
+ def append(self, obj):
+ self.tests[-1].append(obj)
def append_pass(self, report):
self.passed += 1
- self._opentestcase(report)
- self._closetestcase()
def append_failure(self, report):
- self._opentestcase(report)
#msg = str(report.longrepr.reprtraceback.extraline)
if "xfail" in report.keywords:
- self.appendlog(
- '<skipped message="xfail-marked test passes unexpectedly"/>')
+ self.append(
+ Junit.skipped(message="xfail-marked test passes unexpectedly"))
self.skipped += 1
else:
- self.appendlog('<failure message="test failure">%s</failure>',
- report.longrepr)
+ sec = dict(report.sections)
+ fail = Junit.failure(message="test failure")
+ fail.append(str(report.longrepr))
+ self.append(fail)
+ for name in ('out', 'err'):
+ content = sec.get("Captured std%s" % name)
+ if content:
+ tag = getattr(Junit, 'system-'+name)
+ self.append(tag(bin_xml_escape(content)))
self.failed += 1
- self._closetestcase()
def append_collect_failure(self, report):
- self._opentestcase(report)
#msg = str(report.longrepr.reprtraceback.extraline)
- self.appendlog('<failure message="collection failure">%s</failure>',
- report.longrepr)
- self._closetestcase()
+ self.append(Junit.failure(str(report.longrepr),
+ message="collection failure"))
self.errors += 1
def append_collect_skipped(self, report):
- self._opentestcase(report)
#msg = str(report.longrepr.reprtraceback.extraline)
- self.appendlog('<skipped message="collection skipped">%s</skipped>',
- report.longrepr)
- self._closetestcase()
+ self.append(Junit.skipped(str(report.longrepr),
+ message="collection skipped"))
self.skipped += 1
def append_error(self, report):
- self._opentestcase(report)
- self.appendlog('<error message="test setup failure">%s</error>',
- report.longrepr)
- self._closetestcase()
+ self.append(Junit.error(str(report.longrepr),
+ message="test setup failure"))
self.errors += 1
def append_skipped(self, report):
- self._opentestcase(report)
if "xfail" in report.keywords:
- self.appendlog(
- '<skipped message="expected test failure">%s</skipped>',
- report.keywords['xfail'])
+ self.append(Junit.skipped(str(report.keywords['xfail']),
+ message="expected test failure"))
else:
filename, lineno, skipreason = report.longrepr
if skipreason.startswith("Skipped: "):
skipreason = skipreason[9:]
- self.appendlog('<skipped type="pytest.skip" '
- 'message="%s">%s</skipped>',
- skipreason, "%s:%s: %s" % report.longrepr,
- )
- self._closetestcase()
+ self.append(
+ Junit.skipped("%s:%s: %s" % report.longrepr,
+ type="pytest.skip",
+ message=skipreason
+ ))
self.skipped += 1
def pytest_runtest_logreport(self, report):
if report.passed:
- self.append_pass(report)
+ if report.when == "call": # ignore setup/teardown
+ self._opentestcase(report)
+ self.append_pass(report)
elif report.failed:
+ self._opentestcase(report)
if report.when != "call":
self.append_error(report)
else:
self.append_failure(report)
elif report.skipped:
+ self._opentestcase(report)
self.append_skipped(report)
- def pytest_runtest_call(self, item, __multicall__):
- start = time.time()
- try:
- return __multicall__.execute()
- finally:
- self._durations[item.nodeid] = time.time() - start
-
def pytest_collectreport(self, report):
if not report.passed:
+ self._opentestcase(report)
if report.failed:
self.append_collect_failure(report)
else:
@@ -187,10 +185,11 @@
def pytest_internalerror(self, excrepr):
self.errors += 1
data = py.xml.escape(excrepr)
- self.test_logs.append(
- '\n<testcase classname="pytest" name="internal">'
- ' <error message="internal error">'
- '%s</error></testcase>' % data)
+ self.tests.append(
+ Junit.testcase(
+ Junit.error(data, message="internal error"),
+ classname="pytest",
+ name="internal"))
def pytest_sessionstart(self, session):
self.suite_start_time = time.time()
@@ -204,17 +203,17 @@
suite_stop_time = time.time()
suite_time_delta = suite_stop_time - self.suite_start_time
numtests = self.passed + self.failed
+
logfile.write('<?xml version="1.0" encoding="utf-8"?>')
- logfile.write('<testsuite ')
- logfile.write('name="" ')
- logfile.write('errors="%i" ' % self.errors)
- logfile.write('failures="%i" ' % self.failed)
- logfile.write('skips="%i" ' % self.skipped)
- logfile.write('tests="%i" ' % numtests)
- logfile.write('time="%.3f"' % suite_time_delta)
- logfile.write(' >')
- logfile.writelines(self.test_logs)
- logfile.write('</testsuite>')
+ logfile.write(Junit.testsuite(
+ self.tests,
+ name="",
+ errors=self.errors,
+ failures=self.failed,
+ skips=self.skipped,
+ tests=numtests,
+ time="%.3f" % suite_time_delta,
+ ).unicode(indent=0))
logfile.close()
def pytest_terminal_summary(self, terminalreporter):
diff --git a/_pytest/main.py b/_pytest/main.py
--- a/_pytest/main.py
+++ b/_pytest/main.py
@@ -2,7 +2,7 @@
import py
import pytest, _pytest
-import os, sys
+import os, sys, imp
tracebackcutdir = py.path.local(_pytest.__file__).dirpath()
# exitcodes for the command line
@@ -11,6 +11,8 @@
EXIT_INTERRUPTED = 2
EXIT_INTERNALERROR = 3
+name_re = py.std.re.compile("^[a-zA-Z_]\w*$")
+
def pytest_addoption(parser):
parser.addini("norecursedirs", "directory patterns to avoid for recursion",
type="args", default=('.*', 'CVS', '_darcs', '{arch}'))
@@ -27,6 +29,9 @@
action="store", type="int", dest="maxfail", default=0,
help="exit after first num failures or errors.")
+ group._addoption('--strict', action="store_true",
+ help="run pytest in strict mode, warnings become errors.")
+
group = parser.getgroup("collect", "collection")
group.addoption('--collectonly',
action="store_true", dest="collectonly",
@@ -48,7 +53,7 @@
def pytest_namespace():
collect = dict(Item=Item, Collector=Collector, File=File, Session=Session)
return dict(collect=collect)
-
+
def pytest_configure(config):
py.test.config = config # compatibiltiy
if config.option.exitfirst:
@@ -77,11 +82,11 @@
session.exitstatus = EXIT_INTERNALERROR
if excinfo.errisinstance(SystemExit):
sys.stderr.write("mainloop: caught Spurious SystemExit!\n")
+ if initstate >= 2:
+ config.hook.pytest_sessionfinish(session=session,
+ exitstatus=session.exitstatus or (session._testsfailed and 1))
if not session.exitstatus and session._testsfailed:
session.exitstatus = EXIT_TESTSFAILED
- if initstate >= 2:
- config.hook.pytest_sessionfinish(session=session,
- exitstatus=session.exitstatus)
if initstate >= 1:
config.pluginmanager.do_unconfigure(config)
return session.exitstatus
@@ -101,8 +106,12 @@
def pytest_runtestloop(session):
if session.config.option.collectonly:
return True
- for item in session.session.items:
- item.config.hook.pytest_runtest_protocol(item=item)
+ for i, item in enumerate(session.items):
+ try:
+ nextitem = session.items[i+1]
+ except IndexError:
+ nextitem = None
+ item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
if session.shouldstop:
raise session.Interrupted(session.shouldstop)
return True
@@ -132,7 +141,7 @@
return getattr(pytest, name)
return property(fget, None, None,
"deprecated attribute %r, use pytest.%s" % (name,name))
-
+
class Node(object):
""" base class for all Nodes in the collection tree.
Collector subclasses have children, Items are terminal nodes."""
@@ -143,13 +152,13 @@
#: the parent collector node.
self.parent = parent
-
+
#: the test config object
self.config = config or parent.config
#: the collection this node is part of
self.session = session or parent.session
-
+
#: filesystem path where this node was collected from
self.fspath = getattr(parent, 'fspath', None)
self.ihook = self.session.gethookproxy(self.fspath)
@@ -224,13 +233,13 @@
def listchain(self):
""" return list of all parent collectors up to self,
starting from root of collection tree. """
- l = [self]
- while 1:
- x = l[0]
- if x.parent is not None: # and x.parent.parent is not None:
- l.insert(0, x.parent)
- else:
- return l
+ chain = []
+ item = self
+ while item is not None:
+ chain.append(item)
+ item = item.parent
+ chain.reverse()
+ return chain
def listnames(self):
return [x.name for x in self.listchain()]
@@ -325,6 +334,8 @@
""" a basic test invocation item. Note that for a single function
there might be multiple test invocation items.
"""
+ nextitem = None
+
def reportinfo(self):
return self.fspath, None, ""
@@ -399,6 +410,7 @@
self._notfound = []
self._initialpaths = set()
self._initialparts = []
+ self.items = items = []
for arg in args:
parts = self._parsearg(arg)
self._initialparts.append(parts)
@@ -414,7 +426,6 @@
if not genitems:
return rep.result
else:
- self.items = items = []
if rep.passed:
for node in rep.result:
self.items.extend(self.genitems(node))
@@ -469,16 +480,29 @@
return True
def _tryconvertpyarg(self, x):
- try:
- mod = __import__(x, None, None, ['__doc__'])
- except (ValueError, ImportError):
- return x
- p = py.path.local(mod.__file__)
- if p.purebasename == "__init__":
- p = p.dirpath()
- else:
- p = p.new(basename=p.purebasename+".py")
- return str(p)
+ mod = None
+ path = [os.path.abspath('.')] + sys.path
+ for name in x.split('.'):
+ # ignore anything that's not a proper name here
+ # else something like --pyargs will mess up '.'
+ # since imp.find_module will actually sometimes work for it
+ # but it's supposed to be considered a filesystem path
+ # not a package
+ if name_re.match(name) is None:
+ return x
+ try:
+ fd, mod, type_ = imp.find_module(name, path)
+ except ImportError:
+ return x
+ else:
+ if fd is not None:
+ fd.close()
+
+ if type_[2] != imp.PKG_DIRECTORY:
+ path = [os.path.dirname(mod)]
+ else:
+ path = [mod]
+ return mod
def _parsearg(self, arg):
""" return (fspath, names) tuple after checking the file exists. """
@@ -496,7 +520,7 @@
raise pytest.UsageError(msg + arg)
parts[0] = path
return parts
-
+
def matchnodes(self, matching, names):
self.trace("matchnodes", matching, names)
self.trace.root.indent += 1
diff --git a/_pytest/mark.py b/_pytest/mark.py
--- a/_pytest/mark.py
+++ b/_pytest/mark.py
@@ -14,12 +14,37 @@
"Terminate expression with ':' to make the first match match "
"all subsequent tests (usually file-order). ")
+ group._addoption("-m",
+ action="store", dest="markexpr", default="", metavar="MARKEXPR",
+ help="only run tests matching given mark expression. "
+ "example: -m 'mark1 and not mark2'."
+ )
+
+ group.addoption("--markers", action="store_true", help=
+ "show markers (builtin, plugin and per-project ones).")
+
+ parser.addini("markers", "markers for test functions", 'linelist')
+
+def pytest_cmdline_main(config):
+ if config.option.markers:
+ config.pluginmanager.do_configure(config)
+ tw = py.io.TerminalWriter()
+ for line in config.getini("markers"):
+ name, rest = line.split(":", 1)
+ tw.write("@pytest.mark.%s:" % name, bold=True)
+ tw.line(rest)
+ tw.line()
+ config.pluginmanager.do_unconfigure(config)
+ return 0
+pytest_cmdline_main.tryfirst = True
+
def pytest_collection_modifyitems(items, config):
keywordexpr = config.option.keyword
- if not keywordexpr:
+ matchexpr = config.option.markexpr
+ if not keywordexpr and not matchexpr:
return
selectuntil = False
- if keywordexpr[-1] == ":":
+ if keywordexpr[-1:] == ":":
selectuntil = True
keywordexpr = keywordexpr[:-1]
@@ -29,21 +54,38 @@
if keywordexpr and skipbykeyword(colitem, keywordexpr):
deselected.append(colitem)
else:
- remaining.append(colitem)
if selectuntil:
keywordexpr = None
+ if matchexpr:
+ if not matchmark(colitem, matchexpr):
+ deselected.append(colitem)
+ continue
+ remaining.append(colitem)
if deselected:
config.hook.pytest_deselected(items=deselected)
items[:] = remaining
+class BoolDict:
+ def __init__(self, mydict):
+ self._mydict = mydict
+ def __getitem__(self, name):
+ return name in self._mydict
+
+def matchmark(colitem, matchexpr):
+ return eval(matchexpr, {}, BoolDict(colitem.obj.__dict__))
+
+def pytest_configure(config):
+ if config.option.strict:
+ pytest.mark._config = config
+
def skipbykeyword(colitem, keywordexpr):
""" return True if they given keyword expression means to
skip this collector/item.
"""
if not keywordexpr:
return
-
+
itemkeywords = getkeywords(colitem)
for key in filter(None, keywordexpr.split()):
eor = key[:1] == '-'
@@ -77,15 +119,31 @@
@py.test.mark.slowtest
def test_function():
pass
-
+
will set a 'slowtest' :class:`MarkInfo` object
on the ``test_function`` object. """
def __getattr__(self, name):
if name[0] == "_":
raise AttributeError(name)
+ if hasattr(self, '_config'):
+ self._check(name)
return MarkDecorator(name)
+ def _check(self, name):
+ try:
+ if name in self._markers:
+ return
+ except AttributeError:
+ pass
+ self._markers = l = set()
+ for line in self._config.getini("markers"):
+ beginning = line.split(":", 1)
+ x = beginning[0].split("(", 1)[0]
+ l.add(x)
+ if name not in self._markers:
+ raise AttributeError("%r not a registered marker" % (name,))
+
class MarkDecorator:
""" A decorator for test functions and test classes. When applied
it will create :class:`MarkInfo` objects which may be
@@ -133,8 +191,7 @@
holder = MarkInfo(self.markname, self.args, self.kwargs)
setattr(func, self.markname, holder)
else:
- holder.kwargs.update(self.kwargs)
- holder.args += self.args
+ holder.add(self.args, self.kwargs)
return func
kw = self.kwargs.copy()
kw.update(kwargs)
@@ -150,27 +207,20 @@
self.args = args
#: keyword argument dictionary, empty if nothing specified
self.kwargs = kwargs
+ self._arglist = [(args, kwargs.copy())]
def __repr__(self):
return "<MarkInfo %r args=%r kwargs=%r>" % (
self.name, self.args, self.kwargs)
-def pytest_itemcollected(item):
- if not isinstance(item, pytest.Function):
- return
- try:
- func = item.obj.__func__
- except AttributeError:
- func = getattr(item.obj, 'im_func', item.obj)
- pyclasses = (pytest.Class, pytest.Module)
- for node in item.listchain():
- if isinstance(node, pyclasses):
- marker = getattr(node.obj, 'pytestmark', None)
- if marker is not None:
- if isinstance(marker, list):
- for mark in marker:
- mark(func)
- else:
- marker(func)
- node = node.parent
- item.keywords.update(py.builtin._getfuncdict(func))
+ def add(self, args, kwargs):
+ """ add a MarkInfo with the given args and kwargs. """
+ self._arglist.append((args, kwargs))
+ self.args += args
+ self.kwargs.update(kwargs)
+
+ def __iter__(self):
+ """ yield MarkInfo objects each relating to a marking-call. """
+ for args, kwargs in self._arglist:
+ yield MarkInfo(self.name, args, kwargs)
+
diff --git a/_pytest/monkeypatch.py b/_pytest/monkeypatch.py
--- a/_pytest/monkeypatch.py
+++ b/_pytest/monkeypatch.py
@@ -13,6 +13,7 @@
monkeypatch.setenv(name, value, prepend=False)
monkeypatch.delenv(name, value, raising=True)
monkeypatch.syspath_prepend(path)
+ monkeypatch.chdir(path)
All modifications will be undone after the requesting
test function has finished. The ``raising``
@@ -30,6 +31,7 @@
def __init__(self):
self._setattr = []
self._setitem = []
+ self._cwd = None
def setattr(self, obj, name, value, raising=True):
""" set attribute ``name`` on ``obj`` to ``value``, by default
@@ -83,6 +85,17 @@
self._savesyspath = sys.path[:]
sys.path.insert(0, str(path))
+ def chdir(self, path):
+ """ change the current working directory to the specified path
+ path can be a string or a py.path.local object
+ """
+ if self._cwd is None:
+ self._cwd = os.getcwd()
+ if hasattr(path, "chdir"):
+ path.chdir()
+ else:
+ os.chdir(path)
+
def undo(self):
""" undo previous changes. This call consumes the
undo stack. Calling it a second time has no effect unless
@@ -95,9 +108,17 @@
self._setattr[:] = []
for dictionary, name, value in self._setitem:
if value is notset:
- del dictionary[name]
+ try:
+ del dictionary[name]
+ except KeyError:
+ pass # was already deleted, so we have the desired state
else:
dictionary[name] = value
self._setitem[:] = []
if hasattr(self, '_savesyspath'):
sys.path[:] = self._savesyspath
+ del self._savesyspath
+
+ if self._cwd is not None:
+ os.chdir(self._cwd)
+ self._cwd = None
diff --git a/_pytest/nose.py b/_pytest/nose.py
--- a/_pytest/nose.py
+++ b/_pytest/nose.py
@@ -13,6 +13,7 @@
call.excinfo = call2.excinfo
+ at pytest.mark.trylast
def pytest_runtest_setup(item):
if isinstance(item, (pytest.Function)):
if isinstance(item.parent, pytest.Generator):
diff --git a/_pytest/pastebin.py b/_pytest/pastebin.py
--- a/_pytest/pastebin.py
+++ b/_pytest/pastebin.py
@@ -38,7 +38,11 @@
del tr._tw.__dict__['write']
def getproxy():
- return py.std.xmlrpclib.ServerProxy(url.xmlrpc).pastes
+ if sys.version_info < (3, 0):
+ from xmlrpclib import ServerProxy
+ else:
+ from xmlrpc.client import ServerProxy
+ return ServerProxy(url.xmlrpc).pastes
def pytest_terminal_summary(terminalreporter):
if terminalreporter.config.option.pastebin != "failed":
diff --git a/_pytest/pdb.py b/_pytest/pdb.py
--- a/_pytest/pdb.py
+++ b/_pytest/pdb.py
@@ -19,11 +19,13 @@
class pytestPDB:
""" Pseudo PDB that defers to the real pdb. """
item = None
+ collector = None
def set_trace(self):
""" invoke PDB set_trace debugging, dropping any IO capturing. """
frame = sys._getframe().f_back
- item = getattr(self, 'item', None)
+ item = self.item or self.collector
+
if item is not None:
capman = item.config.pluginmanager.getplugin("capturemanager")
out, err = capman.suspendcapture()
@@ -38,6 +40,14 @@
pytestPDB.item = item
pytest_runtest_setup = pytest_runtest_call = pytest_runtest_teardown = pdbitem
+ at pytest.mark.tryfirst
+def pytest_make_collect_report(__multicall__, collector):
+ try:
+ pytestPDB.collector = collector
+ return __multicall__.execute()
+ finally:
+ pytestPDB.collector = None
+
def pytest_runtest_makereport():
pytestPDB.item = None
@@ -60,7 +70,13 @@
tw.sep(">", "traceback")
rep.toterminal(tw)
tw.sep(">", "entering PDB")
- post_mortem(call.excinfo._excinfo[2])
+ # A doctest.UnexpectedException is not useful for post_mortem.
+ # Use the underlying exception instead:
+ if isinstance(call.excinfo.value, py.std.doctest.UnexpectedException):
+ tb = call.excinfo.value.exc_info[2]
+ else:
+ tb = call.excinfo._excinfo[2]
+ post_mortem(tb)
rep._pdbshown = True
return rep
diff --git a/_pytest/pytester.py b/_pytest/pytester.py
--- a/_pytest/pytester.py
+++ b/_pytest/pytester.py
@@ -25,6 +25,7 @@
_pytest_fullpath
except NameError:
_pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc"))
+ _pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py")
def pytest_funcarg___pytest(request):
return PytestArg(request)
@@ -313,16 +314,6 @@
result.extend(session.genitems(colitem))
return result
- def inline_genitems(self, *args):
- #config = self.parseconfig(*args)
- config = self.parseconfigure(*args)
- rec = self.getreportrecorder(config)
- session = Session(config)
- config.hook.pytest_sessionstart(session=session)
- session.perform_collect()
- config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
- return session.items, rec
-
def runitem(self, source):
# used from runner functional tests
item = self.getitem(source)
@@ -343,64 +334,57 @@
l = list(args) + [p]
reprec = self.inline_run(*l)
reports = reprec.getreports("pytest_runtest_logreport")
- assert len(reports) == 1, reports
- return reports[0]
+ assert len(reports) == 3, reports # setup/call/teardown
+ return reports[1]
+
+ def inline_genitems(self, *args):
+ return self.inprocess_run(list(args) + ['--collectonly'])
def inline_run(self, *args):
- args = ("-s", ) + args # otherwise FD leakage
- config = self.parseconfig(*args)
- reprec = self.getreportrecorder(config)
- #config.pluginmanager.do_configure(config)
- config.hook.pytest_cmdline_main(config=config)
- #config.pluginmanager.do_unconfigure(config)
- return reprec
+ items, rec = self.inprocess_run(args)
+ return rec
- def config_preparse(self):
- config = self.Config()
- for plugin in self.plugins:
- if isinstance(plugin, str):
- config.pluginmanager.import_plugin(plugin)
- else:
- if isinstance(plugin, dict):
- plugin = PseudoPlugin(plugin)
- if not config.pluginmanager.isregistered(plugin):
- config.pluginmanager.register(plugin)
- return config
+ def inprocess_run(self, args, plugins=None):
+ rec = []
+ items = []
+ class Collect:
+ def pytest_configure(x, config):
+ rec.append(self.getreportrecorder(config))
+ def pytest_itemcollected(self, item):
+ items.append(item)
+ if not plugins:
+ plugins = []
+ plugins.append(Collect())
+ ret = self.pytestmain(list(args), plugins=[Collect()])
+ reprec = rec[0]
+ reprec.ret = ret
+ assert len(rec) == 1
+ return items, reprec
def parseconfig(self, *args):
- if not args:
- args = (self.tmpdir,)
- config = self.config_preparse()
- args = list(args)
+ args = [str(x) for x in args]
for x in args:
if str(x).startswith('--basetemp'):
break
else:
args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp'))
- config.parse(args)
+ import _pytest.core
+ config = _pytest.core._prepareconfig(args, self.plugins)
+ # the in-process pytest invocation needs to avoid leaking FDs
+ # so we register a "reset_capturings" callmon the capturing manager
+ # and make sure it gets called
+ config._cleanup.append(
+ config.pluginmanager.getplugin("capturemanager").reset_capturings)
+ import _pytest.config
+ self.request.addfinalizer(
+ lambda: _pytest.config.pytest_unconfigure(config))
return config
- def reparseconfig(self, args=None):
- """ this is used from tests that want to re-invoke parse(). """
- if not args:
- args = [self.tmpdir]
- oldconfig = getattr(py.test, 'config', None)
- try:
- c = py.test.config = self.Config()
- c.basetemp = py.path.local.make_numbered_dir(prefix="reparse",
- keep=0, rootdir=self.tmpdir, lock_timeout=None)
- c.parse(args)
- c.pluginmanager.do_configure(c)
- self.request.addfinalizer(lambda: c.pluginmanager.do_unconfigure(c))
- return c
- finally:
- py.test.config = oldconfig
-
def parseconfigure(self, *args):
config = self.parseconfig(*args)
config.pluginmanager.do_configure(config)
self.request.addfinalizer(lambda:
- config.pluginmanager.do_unconfigure(config))
+ config.pluginmanager.do_unconfigure(config))
return config
def getitem(self, source, funcname="test_func"):
@@ -420,7 +404,6 @@
self.makepyfile(__init__ = "#")
self.config = config = self.parseconfigure(path, *configargs)
node = self.getnode(config, path)
- #config.pluginmanager.do_unconfigure(config)
return node
def collect_by_name(self, modcol, name):
@@ -437,9 +420,16 @@
return py.std.subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw)
def pytestmain(self, *args, **kwargs):
- ret = pytest.main(*args, **kwargs)
- if ret == 2:
- raise KeyboardInterrupt()
+ class ResetCapturing:
+ @pytest.mark.trylast
+ def pytest_unconfigure(self, config):
+ capman = config.pluginmanager.getplugin("capturemanager")
+ capman.reset_capturings()
+ plugins = kwargs.setdefault("plugins", [])
+ rc = ResetCapturing()
+ plugins.append(rc)
+ return pytest.main(*args, **kwargs)
+
def run(self, *cmdargs):
return self._run(*cmdargs)
@@ -528,6 +518,8 @@
pexpect = py.test.importorskip("pexpect", "2.4")
if hasattr(sys, 'pypy_version_info') and '64' in py.std.platform.machine():
pytest.skip("pypy-64 bit not supported")
+ if sys.platform == "darwin":
+ pytest.xfail("pexpect does not work reliably on darwin?!")
logfile = self.tmpdir.join("spawn.out")
child = pexpect.spawn(cmd, logfile=logfile.open("w"))
child.timeout = expect_timeout
@@ -540,10 +532,6 @@
return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
py.io.saferepr(out),)
-class PseudoPlugin:
- def __init__(self, vars):
- self.__dict__.update(vars)
-
class ReportRecorder(object):
def __init__(self, hook):
self.hook = hook
@@ -565,10 +553,17 @@
def getreports(self, names="pytest_runtest_logreport pytest_collectreport"):
return [x.report for x in self.getcalls(names)]
- def matchreport(self, inamepart="", names="pytest_runtest_logreport pytest_collectreport", when=None):
+ def matchreport(self, inamepart="",
+ names="pytest_runtest_logreport pytest_collectreport", when=None):
""" return a testreport whose dotted import path matches """
l = []
for rep in self.getreports(names=names):
+ try:
+ if not when and rep.when != "call" and rep.passed:
+ # setup/teardown passing reports - let's ignore those
+ continue
+ except AttributeError:
+ pass
if when and getattr(rep, 'when', None) != when:
continue
if not inamepart or inamepart in rep.nodeid.split("::"):
diff --git a/_pytest/python.py b/_pytest/python.py
--- a/_pytest/python.py
+++ b/_pytest/python.py
@@ -4,6 +4,7 @@
import sys
import pytest
from py._code.code import TerminalRepr
+from _pytest.monkeypatch import monkeypatch
import _pytest
cutdir = py.path.local(_pytest.__file__).dirpath()
@@ -26,6 +27,24 @@
showfuncargs(config)
return 0
+
+def pytest_generate_tests(metafunc):
+ try:
+ param = metafunc.function.parametrize
+ except AttributeError:
+ return
+ for p in param:
+ metafunc.parametrize(*p.args, **p.kwargs)
+
+def pytest_configure(config):
+ config.addinivalue_line("markers",
+ "parametrize(argnames, argvalues): call a test function multiple "
+ "times passing in multiple different argument value sets. Example: "
+ "@parametrize('arg1', [1,2]) would lead to two calls of the decorated "
+ "test function, one with arg1=1 and another with arg1=2."
+ )
+
+
@pytest.mark.trylast
def pytest_namespace():
raises.Exception = pytest.fail.Exception
@@ -138,6 +157,7 @@
obj = obj.place_as
self._fslineno = py.code.getfslineno(obj)
+ assert isinstance(self._fslineno[1], int), obj
return self._fslineno
def reportinfo(self):
@@ -155,6 +175,7 @@
else:
fspath, lineno = self._getfslineno()
modpath = self.getmodpath()
+ assert isinstance(lineno, int)
return fspath, lineno, modpath
class PyCollectorMixin(PyobjMixin, pytest.Collector):
@@ -200,6 +221,7 @@
module = self.getparent(Module).obj
clscol = self.getparent(Class)
cls = clscol and clscol.obj or None
+ transfer_markers(funcobj, cls, module)
metafunc = Metafunc(funcobj, config=self.config,
cls=cls, module=module)
gentesthook = self.config.hook.pytest_generate_tests
@@ -219,6 +241,19 @@
l.append(function)
return l
+def transfer_markers(funcobj, cls, mod):
+ # XXX this should rather be code in the mark plugin or the mark
+ # plugin should merge with the python plugin.
+ for holder in (cls, mod):
+ try:
+ pytestmark = holder.pytestmark
+ except AttributeError:
+ continue
+ if isinstance(pytestmark, list):
+ for mark in pytestmark:
+ mark(funcobj)
+ else:
+ pytestmark(funcobj)
class Module(pytest.File, PyCollectorMixin):
def _getobj(self):
@@ -226,13 +261,8 @@
def _importtestmodule(self):
# we assume we are only called once per module
- from _pytest import assertion
- assertion.before_module_import(self)
try:
- try:
- mod = self.fspath.pyimport(ensuresyspath=True)
- finally:
- assertion.after_module_import(self)
+ mod = self.fspath.pyimport(ensuresyspath=True)
except SyntaxError:
excinfo = py.code.ExceptionInfo()
raise self.CollectError(excinfo.getrepr(style="short"))
@@ -244,7 +274,8 @@
" %s\n"
"which is not the same as the test file we want to collect:\n"
" %s\n"
- "HINT: use a unique basename for your test file modules"
+ "HINT: remove __pycache__ / .pyc files and/or use a "
+ "unique basename for your test file modules"
% e.args
)
#print "imported test module", mod
@@ -374,6 +405,7 @@
tw.line()
tw.line("%s:%d" % (self.filename, self.firstlineno+1))
+
class Generator(FunctionMixin, PyCollectorMixin, pytest.Collector):
def collect(self):
# test generators are seen as collectors but they also
@@ -430,6 +462,7 @@
"yielded functions (deprecated) cannot have funcargs")
else:
if callspec is not None:
+ self.callspec = callspec
self.funcargs = callspec.funcargs or {}
self._genid = callspec.id
if hasattr(callspec, "param"):
@@ -506,15 +539,59 @@
request._fillfuncargs()
_notexists = object()
-class CallSpec:
- def __init__(self, funcargs, id, param):
- self.funcargs = funcargs
- self.id = id
+
+class CallSpec2(object):
+ def __init__(self, metafunc):
+ self.metafunc = metafunc
+ self.funcargs = {}
+ self._idlist = []
+ self.params = {}
+ self._globalid = _notexists
+ self._globalid_args = set()
+ self._globalparam = _notexists
+
+ def copy(self, metafunc):
+ cs = CallSpec2(self.metafunc)
+ cs.funcargs.update(self.funcargs)
+ cs.params.update(self.params)
+ cs._idlist = list(self._idlist)
+ cs._globalid = self._globalid
+ cs._globalid_args = self._globalid_args
+ cs._globalparam = self._globalparam
+ return cs
+
+ def _checkargnotcontained(self, arg):
+ if arg in self.params or arg in self.funcargs:
+ raise ValueError("duplicate %r" %(arg,))
+
+ def getparam(self, name):
+ try:
+ return self.params[name]
+ except KeyError:
+ if self._globalparam is _notexists:
+ raise ValueError(name)
+ return self._globalparam
+
+ @property
+ def id(self):
+ return "-".join(map(str, filter(None, self._idlist)))
+
+ def setmulti(self, valtype, argnames, valset, id):
+ for arg,val in zip(argnames, valset):
+ self._checkargnotcontained(arg)
+ getattr(self, valtype)[arg] = val
+ self._idlist.append(id)
+
+ def setall(self, funcargs, id, param):
+ for x in funcargs:
+ self._checkargnotcontained(x)
+ self.funcargs.update(funcargs)
+ if id is not _notexists:
+ self._idlist.append(id)
if param is not _notexists:
- self.param = param
- def __repr__(self):
- return "<CallSpec id=%r param=%r funcargs=%r>" %(
- self.id, getattr(self, 'param', '?'), self.funcargs)
+ assert self._globalparam is _notexists
+ self._globalparam = param
+
class Metafunc:
def __init__(self, function, config=None, cls=None, module=None):
@@ -528,31 +605,71 @@
self._calls = []
self._ids = py.builtin.set()
+ def parametrize(self, argnames, argvalues, indirect=False, ids=None):
+ """ Add new invocations to the underlying test function using the list
+ of argvalues for the given argnames. Parametrization is performed
+ during the collection phase. If you need to setup expensive resources
+ you may pass indirect=True and implement a funcarg factory which can
+ perform the expensive setup just before a test is actually run.
+
+ :arg argnames: an argument name or a list of argument names
+
+ :arg argvalues: a list of values for the argname or a list of tuples of
+ values for the list of argument names.
+
+ :arg indirect: if True each argvalue corresponding to an argument will
+ be passed as request.param to its respective funcarg factory so
+ that it can perform more expensive setups during the setup phase of
+ a test rather than at collection time.
+
+ :arg ids: list of string ids each corresponding to the argvalues so
+ that they are part of the test id. If no ids are provided they will
+ be generated automatically from the argvalues.
+ """
+ if not isinstance(argnames, (tuple, list)):
+ argnames = (argnames,)
+ argvalues = [(val,) for val in argvalues]
+ if not indirect:
+ #XXX should we also check for the opposite case?
+ for arg in argnames:
+ if arg not in self.funcargnames:
+ raise ValueError("%r has no argument %r" %(self.function, arg))
+ valtype = indirect and "params" or "funcargs"
+ if not ids:
+ idmaker = IDMaker()
+ ids = list(map(idmaker, argvalues))
+ newcalls = []
+ for callspec in self._calls or [CallSpec2(self)]:
+ for i, valset in enumerate(argvalues):
+ assert len(valset) == len(argnames)
+ newcallspec = callspec.copy(self)
+ newcallspec.setmulti(valtype, argnames, valset, ids[i])
+ newcalls.append(newcallspec)
+ self._calls = newcalls
+
def addcall(self, funcargs=None, id=_notexists, param=_notexists):
- """ add a new call to the underlying test function during the
- collection phase of a test run. Note that request.addcall() is
- called during the test collection phase prior and independently
- to actual test execution. Therefore you should perform setup
- of resources in a funcarg factory which can be instrumented
- with the ``param``.
+ """ (deprecated, use parametrize) Add a new call to the underlying
+ test function during the collection phase of a test run. Note that
+ request.addcall() is called during the test collection phase prior and
+ independently to actual test execution. You should only use addcall()
+ if you need to specify multiple arguments of a test function.
:arg funcargs: argument keyword dictionary used when invoking
the test function.
:arg id: used for reporting and identification purposes. If you
- don't supply an `id` the length of the currently
- list of calls to the test function will be used.
+ don't supply an `id` an automatic unique id will be generated.
- :arg param: will be exposed to a later funcarg factory invocation
- through the ``request.param`` attribute. It allows to
- defer test fixture setup activities to when an actual
- test is run.
+ :arg param: a parameter which will be exposed to a later funcarg factory
+ invocation through the ``request.param`` attribute.
"""
assert funcargs is None or isinstance(funcargs, dict)
if funcargs is not None:
for name in funcargs:
if name not in self.funcargnames:
pytest.fail("funcarg %r not used in this function." % name)
+ else:
+ funcargs = {}
if id is None:
raise ValueError("id=None not allowed")
if id is _notexists:
@@ -561,11 +678,26 @@
if id in self._ids:
raise ValueError("duplicate id %r" % id)
self._ids.add(id)
- self._calls.append(CallSpec(funcargs, id, param))
+
+ cs = CallSpec2(self)
+ cs.setall(funcargs, id, param)
+ self._calls.append(cs)
+
+class IDMaker:
+ def __init__(self):
+ self.counter = 0
+ def __call__(self, valset):
+ l = []
+ for val in valset:
+ if not isinstance(val, (int, str)):
+ val = "."+str(self.counter)
+ self.counter += 1
+ l.append(str(val))
+ return "-".join(l)
class FuncargRequest:
""" A request for function arguments from a test function.
-
+
Note that there is an optional ``param`` attribute in case
there was an invocation to metafunc.addcall(param=...).
If no such call was done in a ``pytest_generate_tests``
@@ -637,7 +769,7 @@
def applymarker(self, marker):
- """ apply a marker to a single test function invocation.
+ """ Apply a marker to a single test function invocation.
This method is useful if you don't want to have a keyword/marker
on all function invocations.
@@ -649,7 +781,7 @@
self._pyfuncitem.keywords[marker.markname] = marker
def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
- """ return a testing resource managed by ``setup`` &
+ """ Return a testing resource managed by ``setup`` &
``teardown`` calls. ``scope`` and ``extrakey`` determine when the
``teardown`` function will be called so that subsequent calls to
``setup`` would recreate the resource.
@@ -698,11 +830,18 @@
self._raiselookupfailed(argname)
funcargfactory = self._name2factory[argname].pop()
oldarg = self._currentarg
- self._currentarg = argname
+ mp = monkeypatch()
+ mp.setattr(self, '_currentarg', argname)
+ try:
+ param = self._pyfuncitem.callspec.getparam(argname)
+ except (AttributeError, ValueError):
+ pass
+ else:
+ mp.setattr(self, 'param', param, raising=False)
try:
self._funcargs[argname] = res = funcargfactory(request=self)
finally:
- self._currentarg = oldarg
+ mp.undo()
return res
def _getscopeitem(self, scope):
@@ -817,8 +956,7 @@
>>> raises(ZeroDivisionError, f, x=0)
<ExceptionInfo ...>
- A third possibility is to use a string which which will
- be executed::
+ A third possibility is to use a string to be executed::
>>> raises(ZeroDivisionError, "f(0)")
<ExceptionInfo ...>
diff --git a/_pytest/resultlog.py b/_pytest/resultlog.py
--- a/_pytest/resultlog.py
+++ b/_pytest/resultlog.py
@@ -63,6 +63,8 @@
self.write_log_entry(testpath, lettercode, longrepr)
def pytest_runtest_logreport(self, report):
+ if report.when != "call" and report.passed:
+ return
res = self.config.hook.pytest_report_teststatus(report=report)
code = res[1]
if code == 'x':
@@ -89,5 +91,8 @@
self.log_outcome(report, code, longrepr)
def pytest_internalerror(self, excrepr):
- path = excrepr.reprcrash.path
+ reprcrash = getattr(excrepr, 'reprcrash', None)
+ path = getattr(reprcrash, "path", None)
+ if path is None:
+ path = "cwd:%s" % py.path.local()
self.write_log_entry(path, '!', str(excrepr))
diff --git a/_pytest/runner.py b/_pytest/runner.py
--- a/_pytest/runner.py
+++ b/_pytest/runner.py
@@ -1,6 +1,6 @@
""" basic collect and runtest protocol implementations """
-import py, sys
+import py, sys, time
from py._code.code import TerminalRepr
def pytest_namespace():
@@ -14,33 +14,60 @@
#
# pytest plugin hooks
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group.addoption('--durations',
+ action="store", type="int", default=None, metavar="N",
+ help="show N slowest setup/test durations (N=0 for all)."),
+
+def pytest_terminal_summary(terminalreporter):
+ durations = terminalreporter.config.option.durations
+ if durations is None:
+ return
+ tr = terminalreporter
+ dlist = []
+ for replist in tr.stats.values():
+ for rep in replist:
+ if hasattr(rep, 'duration'):
+ dlist.append(rep)
+ if not dlist:
+ return
+ dlist.sort(key=lambda x: x.duration)
+ dlist.reverse()
+ if not durations:
+ tr.write_sep("=", "slowest test durations")
+ else:
+ tr.write_sep("=", "slowest %s test durations" % durations)
+ dlist = dlist[:durations]
+
+ for rep in dlist:
+ nodeid = rep.nodeid.replace("::()::", "::")
+ tr.write_line("%02.2fs %-8s %s" %
+ (rep.duration, rep.when, nodeid))
+
def pytest_sessionstart(session):
session._setupstate = SetupState()
-
-def pytest_sessionfinish(session, exitstatus):
- hook = session.config.hook
- rep = hook.pytest__teardown_final(session=session)
- if rep:
- hook.pytest__teardown_final_logerror(session=session, report=rep)
- session.exitstatus = 1
+def pytest_sessionfinish(session):
+ session._setupstate.teardown_all()
class NodeInfo:
def __init__(self, location):
self.location = location
-def pytest_runtest_protocol(item):
+def pytest_runtest_protocol(item, nextitem):
item.ihook.pytest_runtest_logstart(
nodeid=item.nodeid, location=item.location,
)
- runtestprotocol(item)
+ runtestprotocol(item, nextitem=nextitem)
return True
-def runtestprotocol(item, log=True):
+def runtestprotocol(item, log=True, nextitem=None):
rep = call_and_report(item, "setup", log)
reports = [rep]
if rep.passed:
reports.append(call_and_report(item, "call", log))
- reports.append(call_and_report(item, "teardown", log))
+ reports.append(call_and_report(item, "teardown", log,
+ nextitem=nextitem))
return reports
def pytest_runtest_setup(item):
@@ -49,16 +76,8 @@
def pytest_runtest_call(item):
item.runtest()
-def pytest_runtest_teardown(item):
- item.session._setupstate.teardown_exact(item)
-
-def pytest__teardown_final(session):
- call = CallInfo(session._setupstate.teardown_all, when="teardown")
- if call.excinfo:
- ntraceback = call.excinfo.traceback .cut(excludepath=py._pydir)
- call.excinfo.traceback = ntraceback.filter()
- longrepr = call.excinfo.getrepr(funcargs=True)
- return TeardownErrorReport(longrepr)
+def pytest_runtest_teardown(item, nextitem):
+ item.session._setupstate.teardown_exact(item, nextitem)
def pytest_report_teststatus(report):
if report.when in ("setup", "teardown"):
@@ -74,18 +93,18 @@
#
# Implementation
-def call_and_report(item, when, log=True):
- call = call_runtest_hook(item, when)
+def call_and_report(item, when, log=True, **kwds):
+ call = call_runtest_hook(item, when, **kwds)
hook = item.ihook
report = hook.pytest_runtest_makereport(item=item, call=call)
- if log and (when == "call" or not report.passed):
+ if log:
hook.pytest_runtest_logreport(report=report)
return report
-def call_runtest_hook(item, when):
+def call_runtest_hook(item, when, **kwds):
hookname = "pytest_runtest_" + when
ihook = getattr(item.ihook, hookname)
- return CallInfo(lambda: ihook(item=item), when=when)
+ return CallInfo(lambda: ihook(item=item, **kwds), when=when)
class CallInfo:
""" Result/Exception info a function invocation. """
@@ -95,12 +114,16 @@
#: context of invocation: one of "setup", "call",
#: "teardown", "memocollect"
self.when = when
+ self.start = time.time()
try:
- self.result = func()
- except KeyboardInterrupt:
- raise
- except:
- self.excinfo = py.code.ExceptionInfo()
+ try:
+ self.result = func()
+ except KeyboardInterrupt:
+ raise
+ except:
+ self.excinfo = py.code.ExceptionInfo()
+ finally:
+ self.stop = time.time()
def __repr__(self):
if self.excinfo:
@@ -120,6 +143,10 @@
return s
class BaseReport(object):
+
+ def __init__(self, **kw):
+ self.__dict__.update(kw)
+
def toterminal(self, out):
longrepr = self.longrepr
if hasattr(self, 'node'):
@@ -139,6 +166,7 @@
def pytest_runtest_makereport(item, call):
when = call.when
+ duration = call.stop-call.start
keywords = dict([(x,1) for x in item.keywords])
excinfo = call.excinfo
if not call.excinfo:
@@ -160,14 +188,15 @@
else: # exception in setup or teardown
longrepr = item._repr_failure_py(excinfo)
return TestReport(item.nodeid, item.location,
- keywords, outcome, longrepr, when)
+ keywords, outcome, longrepr, when,
+ duration=duration)
class TestReport(BaseReport):
""" Basic test report object (also used for setup and teardown calls if
they fail).
"""
def __init__(self, nodeid, location,
- keywords, outcome, longrepr, when):
+ keywords, outcome, longrepr, when, sections=(), duration=0, **extra):
#: normalized collection node id
self.nodeid = nodeid
@@ -179,16 +208,25 @@
#: a name -> value dictionary containing all keywords and
#: markers associated with a test invocation.
self.keywords = keywords
-
+
#: test outcome, always one of "passed", "failed", "skipped".
self.outcome = outcome
#: None or a failure representation.
self.longrepr = longrepr
-
+
#: one of 'setup', 'call', 'teardown' to indicate runtest phase.
self.when = when
+ #: list of (secname, data) extra information which needs to
+ #: marshallable
+ self.sections = list(sections)
+
+ #: time it took to run just the test
+ self.duration = duration
+
+ self.__dict__.update(extra)
+
def __repr__(self):
return "<TestReport %r when=%r outcome=%r>" % (
self.nodeid, self.when, self.outcome)
@@ -196,8 +234,10 @@
class TeardownErrorReport(BaseReport):
outcome = "failed"
when = "teardown"
- def __init__(self, longrepr):
+ def __init__(self, longrepr, **extra):
self.longrepr = longrepr
+ self.sections = []
+ self.__dict__.update(extra)
def pytest_make_collect_report(collector):
call = CallInfo(collector._memocollect, "memocollect")
@@ -219,11 +259,13 @@
getattr(call, 'result', None))
class CollectReport(BaseReport):
- def __init__(self, nodeid, outcome, longrepr, result):
+ def __init__(self, nodeid, outcome, longrepr, result, sections=(), **extra):
self.nodeid = nodeid
self.outcome = outcome
self.longrepr = longrepr
self.result = result or []
+ self.sections = list(sections)
+ self.__dict__.update(extra)
@property
def location(self):
@@ -277,20 +319,22 @@
self._teardown_with_finalization(None)
assert not self._finalizers
- def teardown_exact(self, item):
- if self.stack and item == self.stack[-1]:
+ def teardown_exact(self, item, nextitem):
+ needed_collectors = nextitem and nextitem.listchain() or []
+ self._teardown_towards(needed_collectors)
+
+ def _teardown_towards(self, needed_collectors):
+ while self.stack:
+ if self.stack == needed_collectors[:len(self.stack)]:
+ break
self._pop_and_teardown()
- else:
- self._callfinalizers(item)
def prepare(self, colitem):
""" setup objects along the collector chain to the test-method
and teardown previously setup objects."""
needed_collectors = colitem.listchain()
- while self.stack:
- if self.stack == needed_collectors[:len(self.stack)]:
- break
- self._pop_and_teardown()
+ self._teardown_towards(needed_collectors)
+
# check if the last collection node has raised an error
for col in self.stack:
if hasattr(col, '_prepare_exc'):
diff --git a/_pytest/skipping.py b/_pytest/skipping.py
--- a/_pytest/skipping.py
+++ b/_pytest/skipping.py
@@ -9,6 +9,21 @@
action="store_true", dest="runxfail", default=False,
help="run tests even if they are marked xfail")
+def pytest_configure(config):
+ config.addinivalue_line("markers",
+ "skipif(*conditions): skip the given test function if evaluation "
+ "of all conditions has a True value. Evaluation happens within the "
+ "module global context. Example: skipif('sys.platform == \"win32\"') "
+ "skips the test if we are on the win32 platform. "
+ )
+ config.addinivalue_line("markers",
+ "xfail(*conditions, reason=None, run=True): mark the the test function "
+ "as an expected failure. Optionally specify a reason and run=False "
+ "if you don't even want to execute the test function. Any positional "
+ "condition strings will be evaluated (like with skipif) and if one is "
+ "False the marker will not be applied."
+ )
+
def pytest_namespace():
return dict(xfail=xfail)
@@ -117,6 +132,14 @@
def pytest_runtest_makereport(__multicall__, item, call):
if not isinstance(item, pytest.Function):
return
+ # unitttest special case, see setting of _unexpectedsuccess
+ if hasattr(item, '_unexpectedsuccess'):
+ rep = __multicall__.execute()
+ if rep.when == "call":
+ # we need to translate into how py.test encodes xpass
+ rep.keywords['xfail'] = "reason: " + item._unexpectedsuccess
+ rep.outcome = "failed"
+ return rep
if not (call.excinfo and
call.excinfo.errisinstance(py.test.xfail.Exception)):
evalxfail = getattr(item, '_evalxfail', None)
@@ -169,21 +192,23 @@
elif char == "X":
show_xpassed(terminalreporter, lines)
elif char in "fF":
- show_failed(terminalreporter, lines)
+ show_simple(terminalreporter, lines, 'failed', "FAIL %s")
elif char in "sS":
show_skipped(terminalreporter, lines)
+ elif char == "E":
+ show_simple(terminalreporter, lines, 'error', "ERROR %s")
if lines:
tr._tw.sep("=", "short test summary info")
for line in lines:
tr._tw.line(line)
-def show_failed(terminalreporter, lines):
+def show_simple(terminalreporter, lines, stat, format):
tw = terminalreporter._tw
- failed = terminalreporter.stats.get("failed")
+ failed = terminalreporter.stats.get(stat)
if failed:
for rep in failed:
pos = rep.nodeid
- lines.append("FAIL %s" %(pos, ))
+ lines.append(format %(pos, ))
def show_xfailed(terminalreporter, lines):
xfailed = terminalreporter.stats.get("xfailed")
diff --git a/_pytest/terminal.py b/_pytest/terminal.py
--- a/_pytest/terminal.py
+++ b/_pytest/terminal.py
@@ -15,7 +15,7 @@
group._addoption('-r',
action="store", dest="reportchars", default=None, metavar="chars",
help="show extra test summary info as specified by chars (f)ailed, "
- "(s)skipped, (x)failed, (X)passed.")
+ "(E)error, (s)skipped, (x)failed, (X)passed.")
group._addoption('-l', '--showlocals',
action="store_true", dest="showlocals", default=False,
help="show locals in tracebacks (disabled by default).")
@@ -43,7 +43,8 @@
pass
else:
stdout = os.fdopen(newfd, stdout.mode, 1)
- config._toclose = stdout
+ config._cleanup.append(lambda: stdout.close())
+
reporter = TerminalReporter(config, stdout)
config.pluginmanager.register(reporter, 'terminalreporter')
if config.option.debug or config.option.traceconfig:
@@ -52,11 +53,6 @@
reporter.write_line("[traceconfig] " + msg)
config.trace.root.setprocessor("pytest:config", mywriter)
-def pytest_unconfigure(config):
- if hasattr(config, '_toclose'):
- #print "closing", config._toclose, config._toclose.fileno()
- config._toclose.close()
-
def getreportopt(config):
reportopts = ""
optvalue = config.option.report
@@ -165,9 +161,6 @@
def pytest_deselected(self, items):
self.stats.setdefault('deselected', []).extend(items)
- def pytest__teardown_final_logerror(self, report):
- self.stats.setdefault("error", []).append(report)
-
def pytest_runtest_logstart(self, nodeid, location):
# ensure that the path is printed before the
# 1st test of a module starts running
@@ -259,7 +252,7 @@
msg = "platform %s -- Python %s" % (sys.platform, verinfo)
if hasattr(sys, 'pypy_version_info'):
verinfo = ".".join(map(str, sys.pypy_version_info[:3]))
- msg += "[pypy-%s]" % verinfo
+ msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3])
msg += " -- pytest-%s" % (py.test.__version__)
if self.verbosity > 0 or self.config.option.debug or \
getattr(self.config.option, 'pastebin', None):
@@ -289,10 +282,18 @@
# we take care to leave out Instances aka ()
# because later versions are going to get rid of them anyway
if self.config.option.verbose < 0:
- for item in items:
- nodeid = item.nodeid
- nodeid = nodeid.replace("::()::", "::")
- self._tw.line(nodeid)
+ if self.config.option.verbose < -1:
+ counts = {}
+ for item in items:
+ name = item.nodeid.split('::', 1)[0]
+ counts[name] = counts.get(name, 0) + 1
+ for name, count in sorted(counts.items()):
+ self._tw.line("%s: %d" % (name, count))
+ else:
+ for item in items:
+ nodeid = item.nodeid
+ nodeid = nodeid.replace("::()::", "::")
+ self._tw.line(nodeid)
return
stack = []
indent = ""
@@ -318,12 +319,17 @@
self.config.hook.pytest_terminal_summary(terminalreporter=self)
if exitstatus == 2:
self._report_keyboardinterrupt()
+ del self._keyboardinterrupt_memo
self.summary_deselected()
self.summary_stats()
def pytest_keyboard_interrupt(self, excinfo):
self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
+ def pytest_unconfigure(self):
+ if hasattr(self, '_keyboardinterrupt_memo'):
+ self._report_keyboardinterrupt()
+
def _report_keyboardinterrupt(self):
excrepr = self._keyboardinterrupt_memo
msg = excrepr.reprcrash.message
@@ -388,7 +394,7 @@
else:
msg = self._getfailureheadline(rep)
self.write_sep("_", msg)
- rep.toterminal(self._tw)
+ self._outrep_summary(rep)
def summary_errors(self):
if self.config.option.tbstyle != "no":
@@ -406,7 +412,15 @@
elif rep.when == "teardown":
msg = "ERROR at teardown of " + msg
self.write_sep("_", msg)
- rep.toterminal(self._tw)
+ self._outrep_summary(rep)
+
+ def _outrep_summary(self, rep):
+ rep.toterminal(self._tw)
+ for secname, content in rep.sections:
+ self._tw.sep("-", secname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ self._tw.line(content)
def summary_stats(self):
session_duration = py.std.time.time() - self._sessionstarttime
@@ -417,9 +431,10 @@
keys.append(key)
parts = []
for key in keys:
- val = self.stats.get(key, None)
- if val:
- parts.append("%d %s" %(len(val), key))
+ if key: # setup/teardown reports have an empty key, ignore them
+ val = self.stats.get(key, None)
+ if val:
+ parts.append("%d %s" %(len(val), key))
line = ", ".join(parts)
# XXX coloring
msg = "%s in %.2f seconds" %(line, session_duration)
@@ -430,8 +445,15 @@
def summary_deselected(self):
if 'deselected' in self.stats:
+ l = []
+ k = self.config.option.keyword
+ if k:
+ l.append("-k%s" % k)
+ m = self.config.option.markexpr
+ if m:
+ l.append("-m %r" % m)
self.write_sep("=", "%d tests deselected by %r" %(
- len(self.stats['deselected']), self.config.option.keyword), bold=True)
+ len(self.stats['deselected']), " ".join(l)), bold=True)
def repr_pythonversion(v=None):
if v is None:
diff --git a/_pytest/tmpdir.py b/_pytest/tmpdir.py
--- a/_pytest/tmpdir.py
+++ b/_pytest/tmpdir.py
@@ -46,7 +46,7 @@
def finish(self):
self.trace("finish")
-
+
def pytest_configure(config):
mp = monkeypatch()
t = TempdirHandler(config)
@@ -64,5 +64,5 @@
name = request._pyfuncitem.name
name = py.std.re.sub("[\W]", "_", name)
x = request.config._tmpdirhandler.mktemp(name, numbered=True)
- return x.realpath()
+ return x
diff --git a/_pytest/unittest.py b/_pytest/unittest.py
--- a/_pytest/unittest.py
+++ b/_pytest/unittest.py
@@ -2,6 +2,9 @@
import pytest, py
import sys, pdb
+# for transfering markers
+from _pytest.python import transfer_markers
+
def pytest_pycollect_makeitem(collector, name, obj):
unittest = sys.modules.get('unittest')
if unittest is None:
@@ -19,7 +22,14 @@
class UnitTestCase(pytest.Class):
def collect(self):
loader = py.std.unittest.TestLoader()
+ module = self.getparent(pytest.Module).obj
+ cls = self.obj
for name in loader.getTestCaseNames(self.obj):
+ x = getattr(self.obj, name)
+ funcobj = getattr(x, 'im_func', x)
+ transfer_markers(funcobj, cls, module)
+ if hasattr(funcobj, 'todo'):
+ pytest.mark.xfail(reason=str(funcobj.todo))(funcobj)
yield TestCaseFunction(name, parent=self)
def setup(self):
@@ -37,15 +47,13 @@
class TestCaseFunction(pytest.Function):
_excinfo = None
- def __init__(self, name, parent):
- super(TestCaseFunction, self).__init__(name, parent)
- if hasattr(self._obj, 'todo'):
- getattr(self._obj, 'im_func', self._obj).xfail = \
- pytest.mark.xfail(reason=str(self._obj.todo))
-
def setup(self):
self._testcase = self.parent.obj(self.name)
self._obj = getattr(self._testcase, self.name)
+ if hasattr(self._testcase, 'skip'):
+ pytest.skip(self._testcase.skip)
+ if hasattr(self._obj, 'skip'):
+ pytest.skip(self._obj.skip)
if hasattr(self._testcase, 'setup_method'):
self._testcase.setup_method(self._obj)
@@ -83,28 +91,37 @@
self._addexcinfo(rawexcinfo)
def addFailure(self, testcase, rawexcinfo):
self._addexcinfo(rawexcinfo)
+
def addSkip(self, testcase, reason):
try:
pytest.skip(reason)
except pytest.skip.Exception:
self._addexcinfo(sys.exc_info())
- def addExpectedFailure(self, testcase, rawexcinfo, reason):
+
+ def addExpectedFailure(self, testcase, rawexcinfo, reason=""):
try:
pytest.xfail(str(reason))
except pytest.xfail.Exception:
self._addexcinfo(sys.exc_info())
- def addUnexpectedSuccess(self, testcase, reason):
- pass
+
+ def addUnexpectedSuccess(self, testcase, reason=""):
+ self._unexpectedsuccess = reason
+
def addSuccess(self, testcase):
pass
+
def stopTest(self, testcase):
pass
+
def runtest(self):
self._testcase(result=self)
def _prunetraceback(self, excinfo):
pytest.Function._prunetraceback(self, excinfo)
- excinfo.traceback = excinfo.traceback.filter(lambda x:not x.frame.f_globals.get('__unittest'))
+ traceback = excinfo.traceback.filter(
+ lambda x:not x.frame.f_globals.get('__unittest'))
+ if traceback:
+ excinfo.traceback = traceback
@pytest.mark.tryfirst
def pytest_runtest_makereport(item, call):
@@ -120,14 +137,19 @@
ut = sys.modules['twisted.python.failure']
Failure__init__ = ut.Failure.__init__.im_func
check_testcase_implements_trial_reporter()
- def excstore(self, exc_value=None, exc_type=None, exc_tb=None):
+ def excstore(self, exc_value=None, exc_type=None, exc_tb=None,
+ captureVars=None):
if exc_value is None:
self._rawexcinfo = sys.exc_info()
else:
if exc_type is None:
exc_type = type(exc_value)
self._rawexcinfo = (exc_type, exc_value, exc_tb)
- Failure__init__(self, exc_value, exc_type, exc_tb)
+ try:
+ Failure__init__(self, exc_value, exc_type, exc_tb,
+ captureVars=captureVars)
+ except TypeError:
+ Failure__init__(self, exc_value, exc_type, exc_tb)
ut.Failure.__init__ = excstore
try:
return __multicall__.execute()
diff --git a/dotviewer/graphparse.py b/dotviewer/graphparse.py
--- a/dotviewer/graphparse.py
+++ b/dotviewer/graphparse.py
@@ -93,6 +93,7 @@
return result
def parse_plain(graph_id, plaincontent, links={}, fixedfont=False):
+ plaincontent = plaincontent.replace('\r\n', '\n') # fix Windows EOL
lines = plaincontent.splitlines(True)
for i in range(len(lines)-2, -1, -1):
if lines[i].endswith('\\\n'): # line ending in '\'
diff --git a/lib_pypy/datetime.py b/lib_pypy/datetime.py
--- a/lib_pypy/datetime.py
+++ b/lib_pypy/datetime.py
@@ -968,8 +968,7 @@
self._checkOverflow(t.year)
result = date(t.year, t.month, t.day)
return result
- raise TypeError
- # XXX Should be 'return NotImplemented', but there's a bug in 2.2...
+ return NotImplemented # note that this doesn't work on CPython 2.2
__radd__ = __add__
diff --git a/py/__init__.py b/py/__init__.py
--- a/py/__init__.py
+++ b/py/__init__.py
@@ -8,7 +8,7 @@
(c) Holger Krekel and others, 2004-2010
"""
-__version__ = '1.4.4.dev1'
+__version__ = '1.4.7'
from py import _apipkg
@@ -70,6 +70,11 @@
'getrawcode' : '._code.code:getrawcode',
'patch_builtins' : '._code.code:patch_builtins',
'unpatch_builtins' : '._code.code:unpatch_builtins',
+ '_AssertionError' : '._code.assertion:AssertionError',
+ '_reinterpret_old' : '._code.assertion:reinterpret_old',
+ '_reinterpret' : '._code.assertion:reinterpret',
+ '_reprcompare' : '._code.assertion:_reprcompare',
+ '_format_explanation' : '._code.assertion:_format_explanation',
},
# backports and additions of builtins
diff --git a/py/_builtin.py b/py/_builtin.py
--- a/py/_builtin.py
+++ b/py/_builtin.py
@@ -113,9 +113,12 @@
# some backward compatibility helpers
_basestring = str
- def _totext(obj, encoding=None):
+ def _totext(obj, encoding=None, errors=None):
if isinstance(obj, bytes):
- obj = obj.decode(encoding)
+ if errors is None:
+ obj = obj.decode(encoding)
+ else:
+ obj = obj.decode(encoding, errors)
elif not isinstance(obj, str):
obj = str(obj)
return obj
@@ -142,7 +145,7 @@
del back
elif locs is None:
locs = globs
- fp = open(fn, "rb")
+ fp = open(fn, "r")
try:
source = fp.read()
finally:
diff --git a/py/_code/_assertionnew.py b/py/_code/_assertionnew.py
new file mode 100644
--- /dev/null
+++ b/py/_code/_assertionnew.py
@@ -0,0 +1,339 @@
+"""
+Find intermediate evalutation results in assert statements through builtin AST.
+This should replace _assertionold.py eventually.
+"""
+
+import sys
+import ast
+
+import py
+from py._code.assertion import _format_explanation, BuiltinAssertionError
+
+
+if sys.platform.startswith("java") and sys.version_info < (2, 5, 2):
+ # See http://bugs.jython.org/issue1497
+ _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
+ "ListComp", "GeneratorExp", "Yield", "Compare", "Call",
+ "Repr", "Num", "Str", "Attribute", "Subscript", "Name",
+ "List", "Tuple")
+ _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
+ "AugAssign", "Print", "For", "While", "If", "With", "Raise",
+ "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
+ "Exec", "Global", "Expr", "Pass", "Break", "Continue")
+ _expr_nodes = set(getattr(ast, name) for name in _exprs)
+ _stmt_nodes = set(getattr(ast, name) for name in _stmts)
+ def _is_ast_expr(node):
+ return node.__class__ in _expr_nodes
+ def _is_ast_stmt(node):
+ return node.__class__ in _stmt_nodes
+else:
+ def _is_ast_expr(node):
+ return isinstance(node, ast.expr)
+ def _is_ast_stmt(node):
+ return isinstance(node, ast.stmt)
+
+
+class Failure(Exception):
+ """Error found while interpreting AST."""
+
+ def __init__(self, explanation=""):
+ self.cause = sys.exc_info()
+ self.explanation = explanation
+
+
+def interpret(source, frame, should_fail=False):
+ mod = ast.parse(source)
+ visitor = DebugInterpreter(frame)
+ try:
+ visitor.visit(mod)
+ except Failure:
+ failure = sys.exc_info()[1]
+ return getfailure(failure)
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --no-assert)")
+
+def run(offending_line, frame=None):
+ if frame is None:
+ frame = py.code.Frame(sys._getframe(1))
+ return interpret(offending_line, frame)
+
+def getfailure(failure):
+ explanation = _format_explanation(failure.explanation)
+ value = failure.cause[1]
+ if str(value):
+ lines = explanation.splitlines()
+ if not lines:
+ lines.append("")
+ lines[0] += " << %s" % (value,)
+ explanation = "\n".join(lines)
+ text = "%s: %s" % (failure.cause[0].__name__, explanation)
+ if text.startswith("AssertionError: assert "):
+ text = text[16:]
+ return text
+
+
+operator_map = {
+ ast.BitOr : "|",
+ ast.BitXor : "^",
+ ast.BitAnd : "&",
+ ast.LShift : "<<",
+ ast.RShift : ">>",
+ ast.Add : "+",
+ ast.Sub : "-",
+ ast.Mult : "*",
+ ast.Div : "/",
+ ast.FloorDiv : "//",
+ ast.Mod : "%",
+ ast.Eq : "==",
+ ast.NotEq : "!=",
+ ast.Lt : "<",
+ ast.LtE : "<=",
+ ast.Gt : ">",
+ ast.GtE : ">=",
+ ast.Pow : "**",
+ ast.Is : "is",
+ ast.IsNot : "is not",
+ ast.In : "in",
+ ast.NotIn : "not in"
+}
+
+unary_map = {
+ ast.Not : "not %s",
+ ast.Invert : "~%s",
+ ast.USub : "-%s",
+ ast.UAdd : "+%s"
+}
+
+
+class DebugInterpreter(ast.NodeVisitor):
+ """Interpret AST nodes to gleam useful debugging information. """
+
+ def __init__(self, frame):
+ self.frame = frame
+
+ def generic_visit(self, node):
+ # Fallback when we don't have a special implementation.
+ if _is_ast_expr(node):
+ mod = ast.Expression(node)
+ co = self._compile(mod)
+ try:
+ result = self.frame.eval(co)
+ except Exception:
+ raise Failure()
+ explanation = self.frame.repr(result)
+ return explanation, result
+ elif _is_ast_stmt(node):
+ mod = ast.Module([node])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co)
+ except Exception:
+ raise Failure()
+ return None, None
+ else:
+ raise AssertionError("can't handle %s" %(node,))
+
+ def _compile(self, source, mode="eval"):
+ return compile(source, "<assertion interpretation>", mode)
+
+ def visit_Expr(self, expr):
+ return self.visit(expr.value)
+
+ def visit_Module(self, mod):
+ for stmt in mod.body:
+ self.visit(stmt)
+
+ def visit_Name(self, name):
+ explanation, result = self.generic_visit(name)
+ # See if the name is local.
+ source = "%r in locals() is not globals()" % (name.id,)
+ co = self._compile(source)
+ try:
+ local = self.frame.eval(co)
+ except Exception:
+ # have to assume it isn't
+ local = False
+ if not local:
+ return name.id, result
+ return explanation, result
+
+ def visit_Compare(self, comp):
+ left = comp.left
+ left_explanation, left_result = self.visit(left)
+ for op, next_op in zip(comp.ops, comp.comparators):
+ next_explanation, next_result = self.visit(next_op)
+ op_symbol = operator_map[op.__class__]
+ explanation = "%s %s %s" % (left_explanation, op_symbol,
+ next_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=next_result)
+ except Exception:
+ raise Failure(explanation)
+ try:
+ if not result:
+ break
+ except KeyboardInterrupt:
+ raise
+ except:
+ break
+ left_explanation, left_result = next_explanation, next_result
+
+ rcomp = py.code._reprcompare
+ if rcomp:
+ res = rcomp(op_symbol, left_result, next_result)
+ if res:
+ explanation = res
+ return explanation, result
+
+ def visit_BoolOp(self, boolop):
+ is_or = isinstance(boolop.op, ast.Or)
+ explanations = []
+ for operand in boolop.values:
+ explanation, result = self.visit(operand)
+ explanations.append(explanation)
+ if result == is_or:
+ break
+ name = is_or and " or " or " and "
+ explanation = "(" + name.join(explanations) + ")"
+ return explanation, result
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_explanation, operand_result = self.visit(unary.operand)
+ explanation = pattern % (operand_explanation,)
+ co = self._compile(pattern % ("__exprinfo_expr",))
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=operand_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_BinOp(self, binop):
+ left_explanation, left_result = self.visit(binop.left)
+ right_explanation, right_result = self.visit(binop.right)
+ symbol = operator_map[binop.op.__class__]
+ explanation = "(%s %s %s)" % (left_explanation, symbol,
+ right_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=right_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_Call(self, call):
+ func_explanation, func = self.visit(call.func)
+ arg_explanations = []
+ ns = {"__exprinfo_func" : func}
+ arguments = []
+ for arg in call.args:
+ arg_explanation, arg_result = self.visit(arg)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ arguments.append(arg_name)
+ arg_explanations.append(arg_explanation)
+ for keyword in call.keywords:
+ arg_explanation, arg_result = self.visit(keyword.value)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ keyword_source = "%s=%%s" % (keyword.arg)
+ arguments.append(keyword_source % (arg_name,))
+ arg_explanations.append(keyword_source % (arg_explanation,))
+ if call.starargs:
+ arg_explanation, arg_result = self.visit(call.starargs)
+ arg_name = "__exprinfo_star"
+ ns[arg_name] = arg_result
+ arguments.append("*%s" % (arg_name,))
+ arg_explanations.append("*%s" % (arg_explanation,))
+ if call.kwargs:
+ arg_explanation, arg_result = self.visit(call.kwargs)
+ arg_name = "__exprinfo_kwds"
+ ns[arg_name] = arg_result
+ arguments.append("**%s" % (arg_name,))
+ arg_explanations.append("**%s" % (arg_explanation,))
+ args_explained = ", ".join(arg_explanations)
+ explanation = "%s(%s)" % (func_explanation, args_explained)
+ args = ", ".join(arguments)
+ source = "__exprinfo_func(%s)" % (args,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, **ns)
+ except Exception:
+ raise Failure(explanation)
+ pattern = "%s\n{%s = %s\n}"
+ rep = self.frame.repr(result)
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def _is_builtin_name(self, name):
+ pattern = "%r not in globals() and %r not in locals()"
+ source = pattern % (name.id, name.id)
+ co = self._compile(source)
+ try:
+ return self.frame.eval(co)
+ except Exception:
+ return False
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ source_explanation, source_result = self.visit(attr.value)
+ explanation = "%s.%s" % (source_explanation, attr.attr)
+ source = "__exprinfo_expr.%s" % (attr.attr,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ raise Failure(explanation)
+ explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
+ self.frame.repr(result),
+ source_explanation, attr.attr)
+ # Check if the attr is from an instance.
+ source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
+ source = source % (attr.attr,)
+ co = self._compile(source)
+ try:
+ from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ from_instance = True
+ if from_instance:
+ rep = self.frame.repr(result)
+ pattern = "%s\n{%s = %s\n}"
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def visit_Assert(self, assrt):
+ test_explanation, test_result = self.visit(assrt.test)
+ if test_explanation.startswith("False\n{False =") and \
+ test_explanation.endswith("\n"):
+ test_explanation = test_explanation[15:-2]
+ explanation = "assert %s" % (test_explanation,)
+ if not test_result:
+ try:
+ raise BuiltinAssertionError
+ except Exception:
+ raise Failure(explanation)
+ return explanation, test_result
+
+ def visit_Assign(self, assign):
+ value_explanation, value_result = self.visit(assign.value)
+ explanation = "... = %s" % (value_explanation,)
+ name = ast.Name("__exprinfo_expr", ast.Load(),
+ lineno=assign.value.lineno,
+ col_offset=assign.value.col_offset)
+ new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
+ col_offset=assign.col_offset)
+ mod = ast.Module([new_assign])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co, __exprinfo_expr=value_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, value_result
diff --git a/py/_code/_assertionold.py b/py/_code/_assertionold.py
new file mode 100644
--- /dev/null
+++ b/py/_code/_assertionold.py
@@ -0,0 +1,555 @@
+import py
+import sys, inspect
+from compiler import parse, ast, pycodegen
+from py._code.assertion import BuiltinAssertionError, _format_explanation
+
+passthroughex = py.builtin._sysex
+
+class Failure:
+ def __init__(self, node):
+ self.exc, self.value, self.tb = sys.exc_info()
+ self.node = node
+
+class View(object):
+ """View base class.
+
+ If C is a subclass of View, then C(x) creates a proxy object around
+ the object x. The actual class of the proxy is not C in general,
+ but a *subclass* of C determined by the rules below. To avoid confusion
+ we call view class the class of the proxy (a subclass of C, so of View)
+ and object class the class of x.
+
+ Attributes and methods not found in the proxy are automatically read on x.
+ Other operations like setting attributes are performed on the proxy, as
+ determined by its view class. The object x is available from the proxy
+ as its __obj__ attribute.
+
+ The view class selection is determined by the __view__ tuples and the
+ optional __viewkey__ method. By default, the selected view class is the
+ most specific subclass of C whose __view__ mentions the class of x.
+ If no such subclass is found, the search proceeds with the parent
+ object classes. For example, C(True) will first look for a subclass
+ of C with __view__ = (..., bool, ...) and only if it doesn't find any
+ look for one with __view__ = (..., int, ...), and then ..., object,...
+ If everything fails the class C itself is considered to be the default.
+
+ Alternatively, the view class selection can be driven by another aspect
+ of the object x, instead of the class of x, by overriding __viewkey__.
+ See last example at the end of this module.
+ """
+
+ _viewcache = {}
+ __view__ = ()
+
+ def __new__(rootclass, obj, *args, **kwds):
+ self = object.__new__(rootclass)
+ self.__obj__ = obj
+ self.__rootclass__ = rootclass
+ key = self.__viewkey__()
+ try:
+ self.__class__ = self._viewcache[key]
+ except KeyError:
+ self.__class__ = self._selectsubclass(key)
+ return self
+
+ def __getattr__(self, attr):
+ # attributes not found in the normal hierarchy rooted on View
+ # are looked up in the object's real class
+ return getattr(self.__obj__, attr)
+
+ def __viewkey__(self):
+ return self.__obj__.__class__
+
+ def __matchkey__(self, key, subclasses):
+ if inspect.isclass(key):
+ keys = inspect.getmro(key)
+ else:
+ keys = [key]
+ for key in keys:
+ result = [C for C in subclasses if key in C.__view__]
+ if result:
+ return result
+ return []
+
+ def _selectsubclass(self, key):
+ subclasses = list(enumsubclasses(self.__rootclass__))
+ for C in subclasses:
+ if not isinstance(C.__view__, tuple):
+ C.__view__ = (C.__view__,)
+ choices = self.__matchkey__(key, subclasses)
+ if not choices:
+ return self.__rootclass__
+ elif len(choices) == 1:
+ return choices[0]
+ else:
+ # combine the multiple choices
+ return type('?', tuple(choices), {})
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
+
+
+def enumsubclasses(cls):
+ for subcls in cls.__subclasses__():
+ for subsubclass in enumsubclasses(subcls):
+ yield subsubclass
+ yield cls
+
+
+class Interpretable(View):
+ """A parse tree node with a few extra methods."""
+ explanation = None
+
+ def is_builtin(self, frame):
+ return False
+
+ def eval(self, frame):
+ # fall-back for unknown expression nodes
+ try:
+ expr = ast.Expression(self.__obj__)
+ expr.filename = '<eval>'
+ self.__obj__.filename = '<eval>'
+ co = pycodegen.ExpressionCodeGenerator(expr).getCode()
+ result = frame.eval(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.result = result
+ self.explanation = self.explanation or frame.repr(self.result)
+
+ def run(self, frame):
+ # fall-back for unknown statement nodes
+ try:
+ expr = ast.Module(None, ast.Stmt([self.__obj__]))
+ expr.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(expr).getCode()
+ frame.exec_(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ def nice_explanation(self):
+ return _format_explanation(self.explanation)
+
+
+class Name(Interpretable):
+ __view__ = ast.Name
+
+ def is_local(self, frame):
+ source = '%r in locals() is not globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_global(self, frame):
+ source = '%r in globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_builtin(self, frame):
+ source = '%r not in locals() and %r not in globals()' % (
+ self.name, self.name)
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ super(Name, self).eval(frame)
+ if not self.is_local(frame):
+ self.explanation = self.name
+
+class Compare(Interpretable):
+ __view__ = ast.Compare
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ for operation, expr2 in self.ops:
+ if hasattr(self, 'result'):
+ # shortcutting in chained expressions
+ if not frame.is_true(self.result):
+ break
+ expr2 = Interpretable(expr2)
+ expr2.eval(frame)
+ self.explanation = "%s %s %s" % (
+ expr.explanation, operation, expr2.explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % operation
+ try:
+ self.result = frame.eval(source,
+ __exprinfo_left=expr.result,
+ __exprinfo_right=expr2.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ expr = expr2
+
+class And(Interpretable):
+ __view__ = ast.And
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if not frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' and '.join(explanations) + ')'
+
+class Or(Interpretable):
+ __view__ = ast.Or
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' or '.join(explanations) + ')'
+
+
+# == Unary operations ==
+keepalive = []
+for astclass, astpattern in {
+ ast.Not : 'not __exprinfo_expr',
+ ast.Invert : '(~__exprinfo_expr)',
+ }.items():
+
+ class UnaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.explanation = astpattern.replace('__exprinfo_expr',
+ expr.explanation)
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(UnaryArith)
+
+# == Binary operations ==
+for astclass, astpattern in {
+ ast.Add : '(__exprinfo_left + __exprinfo_right)',
+ ast.Sub : '(__exprinfo_left - __exprinfo_right)',
+ ast.Mul : '(__exprinfo_left * __exprinfo_right)',
+ ast.Div : '(__exprinfo_left / __exprinfo_right)',
+ ast.Mod : '(__exprinfo_left % __exprinfo_right)',
+ ast.Power : '(__exprinfo_left ** __exprinfo_right)',
+ }.items():
+
+ class BinaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ left = Interpretable(self.left)
+ left.eval(frame)
+ right = Interpretable(self.right)
+ right.eval(frame)
+ self.explanation = (astpattern
+ .replace('__exprinfo_left', left .explanation)
+ .replace('__exprinfo_right', right.explanation))
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_left=left.result,
+ __exprinfo_right=right.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(BinaryArith)
+
+
+class CallFunc(Interpretable):
+ __view__ = ast.CallFunc
+
+ def is_bool(self, frame):
+ source = 'isinstance(__exprinfo_value, bool)'
+ try:
+ return frame.is_true(frame.eval(source,
+ __exprinfo_value=self.result))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ node = Interpretable(self.node)
+ node.eval(frame)
+ explanations = []
+ vars = {'__exprinfo_fn': node.result}
+ source = '__exprinfo_fn('
+ for a in self.args:
+ if isinstance(a, ast.Keyword):
+ keyword = a.name
+ a = a.expr
+ else:
+ keyword = None
+ a = Interpretable(a)
+ a.eval(frame)
+ argname = '__exprinfo_%d' % len(vars)
+ vars[argname] = a.result
+ if keyword is None:
+ source += argname + ','
+ explanations.append(a.explanation)
+ else:
+ source += '%s=%s,' % (keyword, argname)
+ explanations.append('%s=%s' % (keyword, a.explanation))
+ if self.star_args:
+ star_args = Interpretable(self.star_args)
+ star_args.eval(frame)
+ argname = '__exprinfo_star'
+ vars[argname] = star_args.result
+ source += '*' + argname + ','
+ explanations.append('*' + star_args.explanation)
+ if self.dstar_args:
+ dstar_args = Interpretable(self.dstar_args)
+ dstar_args.eval(frame)
+ argname = '__exprinfo_kwds'
+ vars[argname] = dstar_args.result
+ source += '**' + argname + ','
+ explanations.append('**' + dstar_args.explanation)
+ self.explanation = "%s(%s)" % (
+ node.explanation, ', '.join(explanations))
+ if source.endswith(','):
+ source = source[:-1]
+ source += ')'
+ try:
+ self.result = frame.eval(source, **vars)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ if not node.is_builtin(frame) or not self.is_bool(frame):
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+class Getattr(Interpretable):
+ __view__ = ast.Getattr
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ source = '__exprinfo_expr.%s' % self.attrname
+ try:
+ self.result = frame.eval(source, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.explanation = '%s.%s' % (expr.explanation, self.attrname)
+ # if the attribute comes from the instance, its value is interesting
+ source = ('hasattr(__exprinfo_expr, "__dict__") and '
+ '%r in __exprinfo_expr.__dict__' % self.attrname)
+ try:
+ from_instance = frame.is_true(
+ frame.eval(source, __exprinfo_expr=expr.result))
+ except passthroughex:
+ raise
+ except:
+ from_instance = True
+ if from_instance:
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+# == Re-interpretation of full statements ==
+
+class Assert(Interpretable):
+ __view__ = ast.Assert
+
+ def run(self, frame):
+ test = Interpretable(self.test)
+ test.eval(frame)
+ # simplify 'assert False where False = ...'
+ if (test.explanation.startswith('False\n{False = ') and
+ test.explanation.endswith('\n}')):
+ test.explanation = test.explanation[15:-2]
+ # print the result as 'assert <explanation>'
+ self.result = test.result
+ self.explanation = 'assert ' + test.explanation
+ if not frame.is_true(test.result):
+ try:
+ raise BuiltinAssertionError
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Assign(Interpretable):
+ __view__ = ast.Assign
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = '... = ' + expr.explanation
+ # fall-back-run the rest of the assignment
+ ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
+ mod = ast.Module(None, ast.Stmt([ass]))
+ mod.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(mod).getCode()
+ try:
+ frame.exec_(co, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Discard(Interpretable):
+ __view__ = ast.Discard
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = expr.explanation
+
+class Stmt(Interpretable):
+ __view__ = ast.Stmt
+
+ def run(self, frame):
+ for stmt in self.nodes:
+ stmt = Interpretable(stmt)
+ stmt.run(frame)
+
+
+def report_failure(e):
+ explanation = e.node.nice_explanation()
+ if explanation:
+ explanation = ", in: " + explanation
+ else:
+ explanation = ""
+ sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
+
+def check(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ expr = parse(s, 'eval')
+ assert isinstance(expr, ast.Expression)
+ node = Interpretable(expr.node)
+ try:
+ node.eval(frame)
+ except passthroughex:
+ raise
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+ else:
+ if not frame.is_true(node.result):
+ sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
+
+
+###########################################################
+# API / Entry points
+# #########################################################
+
+def interpret(source, frame, should_fail=False):
+ module = Interpretable(parse(source, 'exec').node)
+ #print "got module", module
+ if isinstance(frame, py.std.types.FrameType):
+ frame = py.code.Frame(frame)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ return getfailure(e)
+ except passthroughex:
+ raise
+ except:
+ import traceback
+ traceback.print_exc()
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --nomagic)")
+ else:
+ return None
+
+def getmsg(excinfo):
+ if isinstance(excinfo, tuple):
+ excinfo = py.code.ExceptionInfo(excinfo)
+ #frame, line = gettbline(tb)
+ #frame = py.code.Frame(frame)
+ #return interpret(line, frame)
+
+ tb = excinfo.traceback[-1]
+ source = str(tb.statement).strip()
+ x = interpret(source, tb.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ return x
+
+def getfailure(e):
+ explanation = e.node.nice_explanation()
+ if str(e.value):
+ lines = explanation.split('\n')
+ lines[0] += " << %s" % (e.value,)
+ explanation = '\n'.join(lines)
+ text = "%s: %s" % (e.exc.__name__, explanation)
+ if text.startswith('AssertionError: assert '):
+ text = text[16:]
+ return text
+
+def run(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ module = Interpretable(parse(s, 'exec').node)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+
+
+if __name__ == '__main__':
+ # example:
+ def f():
+ return 5
+ def g():
+ return 3
+ def h(x):
+ return 'never'
+ check("f() * g() == 5")
+ check("not f()")
+ check("not (f() and g() or 0)")
+ check("f() == g()")
+ i = 4
+ check("i == f()")
+ check("len(f()) == 0")
+ check("isinstance(2+3+4, float)")
+
+ run("x = i")
+ check("x == 5")
+
+ run("assert not f(), 'oops'")
+ run("a, b, c = 1, 2")
+ run("a, b, c = f()")
+
+ check("max([f(),g()]) == 4")
+ check("'hello'[g()] == 'h'")
+ run("'guk%d' % h(f())")
diff --git a/py/_code/assertion.py b/py/_code/assertion.py
new file mode 100644
--- /dev/null
+++ b/py/_code/assertion.py
@@ -0,0 +1,94 @@
+import sys
+import py
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+
+_reprcompare = None # if set, will be called by assert reinterp for comparison ops
+
+def _format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ raw_lines = (explanation or '').split('\n')
+ # escape newlines not followed by {, } and ~
+ lines = [raw_lines[0]]
+ for l in raw_lines[1:]:
+ if l.startswith('{') or l.startswith('}') or l.startswith('~'):
+ lines.append(l)
+ else:
+ lines[-1] += '\\n' + l
+
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith('{'):
+ if stackcnt[-1]:
+ s = 'and '
+ else:
+ s = 'where '
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
+ elif line.startswith('}'):
+ assert line.startswith('}')
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line.startswith('~')
+ result.append(' '*len(stack) + line[1:])
+ assert len(stack) == 1
+ return '\n'.join(result)
+
+
+class AssertionError(BuiltinAssertionError):
+ def __init__(self, *args):
+ BuiltinAssertionError.__init__(self, *args)
+ if args:
+ try:
+ self.msg = str(args[0])
+ except py.builtin._sysex:
+ raise
+ except:
+ self.msg = "<[broken __repr__] %s at %0xd>" %(
+ args[0].__class__, id(args[0]))
+ else:
+ f = py.code.Frame(sys._getframe(1))
+ try:
+ source = f.code.fullsource
+ if source is not None:
+ try:
+ source = source.getstatement(f.lineno, assertion=True)
+ except IndexError:
+ source = None
+ else:
+ source = str(source.deindent()).strip()
+ except py.error.ENOENT:
+ source = None
+ # this can also occur during reinterpretation, when the
+ # co_filename is set to "<run>".
+ if source:
+ self.msg = reinterpret(source, f, should_fail=True)
+ else:
+ self.msg = "<could not determine information>"
+ if not self.args:
+ self.args = (self.msg,)
+
+if sys.version_info > (3, 0):
+ AssertionError.__module__ = "builtins"
+ reinterpret_old = "old reinterpretation not available for py3"
+else:
+ from py._code._assertionold import interpret as reinterpret_old
+if sys.version_info >= (2, 6) or (sys.platform.startswith("java")):
+ from py._code._assertionnew import interpret as reinterpret
+else:
+ reinterpret = reinterpret_old
+
diff --git a/py/_code/code.py b/py/_code/code.py
--- a/py/_code/code.py
+++ b/py/_code/code.py
@@ -145,6 +145,17 @@
return self.frame.f_locals
locals = property(getlocals, None, None, "locals of underlaying frame")
+ def reinterpret(self):
+ """Reinterpret the failing statement and returns a detailed information
+ about what operations are performed."""
+ if self.exprinfo is None:
+ source = str(self.statement).strip()
+ x = py.code._reinterpret(source, self.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ self.exprinfo = x
+ return self.exprinfo
+
def getfirstlinesource(self):
# on Jython this firstlineno can be -1 apparently
return max(self.frame.code.firstlineno, 0)
@@ -158,13 +169,12 @@
end = self.lineno
try:
_, end = source.getstatementrange(end)
- except IndexError:
+ except (IndexError, ValueError):
end = self.lineno + 1
# heuristic to stop displaying source on e.g.
# if something: # assume this causes a NameError
# # _this_ lines and the one
# below we don't want from entry.getsource()
- end = min(end, len(source))
for i in range(self.lineno, end):
if source[i].rstrip().endswith(':'):
end = i + 1
@@ -273,7 +283,11 @@
"""
cache = {}
for i, entry in enumerate(self):
- key = entry.frame.code.path, entry.lineno
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ #XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
#print "checking for recursion at", key
l = cache.setdefault(key, [])
if l:
@@ -308,7 +322,7 @@
self._striptext = 'AssertionError: '
self._excinfo = tup
self.type, self.value, tb = self._excinfo
- self.typename = getattr(self.type, "__name__", "???")
+ self.typename = self.type.__name__
self.traceback = py.code.Traceback(tb)
def __repr__(self):
@@ -347,14 +361,16 @@
showlocals: show locals per traceback entry
style: long|short|no|native traceback style
tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
"""
if style == 'native':
- import traceback
- return ''.join(traceback.format_exception(
- self.type,
- self.value,
- self.traceback[0]._rawentry,
- ))
+ return ReprExceptionInfo(ReprTracebackNative(
+ py.std.traceback.format_exception(
+ self.type,
+ self.value,
+ self.traceback[0]._rawentry,
+ )), self._getreprcrash())
fmt = FormattedExcinfo(showlocals=showlocals, style=style,
abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
@@ -452,7 +468,7 @@
def repr_locals(self, locals):
if self.showlocals:
lines = []
- keys = list(locals)
+ keys = [loc for loc in locals if loc[0] != "@"]
keys.sort()
for name in keys:
value = locals[name]
@@ -506,7 +522,10 @@
def _makepath(self, path):
if not self.abspath:
- np = py.path.local().bestrelpath(path)
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
if len(np) < len(str(path)):
path = np
return path
@@ -595,6 +614,19 @@
if self.extraline:
tw.line(self.extraline)
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+class ReprEntryNative(TerminalRepr):
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
class ReprEntry(TerminalRepr):
localssep = "_ "
@@ -680,19 +712,26 @@
oldbuiltins = {}
-def patch_builtins(compile=True):
- """ put compile builtins to Python's builtins. """
+def patch_builtins(assertion=True, compile=True):
+ """ put compile and AssertionError builtins to Python's builtins. """
+ if assertion:
+ from py._code import assertion
+ l = oldbuiltins.setdefault('AssertionError', [])
+ l.append(py.builtin.builtins.AssertionError)
+ py.builtin.builtins.AssertionError = assertion.AssertionError
if compile:
l = oldbuiltins.setdefault('compile', [])
l.append(py.builtin.builtins.compile)
py.builtin.builtins.compile = py.code.compile
-def unpatch_builtins(compile=True):
+def unpatch_builtins(assertion=True, compile=True):
""" remove compile and AssertionError builtins from Python builtins. """
+ if assertion:
+ py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
if compile:
py.builtin.builtins.compile = oldbuiltins['compile'].pop()
-def getrawcode(obj):
+def getrawcode(obj, trycall=True):
""" return code object for given function. """
try:
return obj.__code__
@@ -701,5 +740,10 @@
obj = getattr(obj, 'func_code', obj)
obj = getattr(obj, 'f_code', obj)
obj = getattr(obj, '__code__', obj)
+ if trycall and not hasattr(obj, 'co_firstlineno'):
+ if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj):
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, 'co_firstlineno'):
+ return x
return obj
diff --git a/py/_code/source.py b/py/_code/source.py
--- a/py/_code/source.py
+++ b/py/_code/source.py
@@ -108,6 +108,7 @@
def getstatementrange(self, lineno, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
+ raise an IndexError if no such statementrange can be found.
"""
# XXX there must be a better than these heuristic ways ...
# XXX there may even be better heuristics :-)
@@ -116,6 +117,7 @@
# 1. find the start of the statement
from codeop import compile_command
+ end = None
for start in range(lineno, -1, -1):
if assertion:
line = self.lines[start]
@@ -139,7 +141,9 @@
trysource = self[start:end]
if trysource.isparseable():
return start, end
- return start, len(self)
+ if end is None:
+ raise IndexError("no valid source range around line %d " % (lineno,))
+ return start, end
def getblockend(self, lineno):
# XXX
@@ -257,23 +261,29 @@
def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
try:
code = py.code.Code(obj)
except TypeError:
- # fallback to
- fn = (py.std.inspect.getsourcefile(obj) or
- py.std.inspect.getfile(obj))
+ try:
+ fn = (py.std.inspect.getsourcefile(obj) or
+ py.std.inspect.getfile(obj))
+ except TypeError:
+ return "", -1
+
fspath = fn and py.path.local(fn) or None
+ lineno = -1
if fspath:
try:
_, lineno = findsource(obj)
except IOError:
- lineno = None
- else:
- lineno = None
+ pass
else:
fspath = code.path
lineno = code.firstlineno
+ assert isinstance(lineno, int)
return fspath, lineno
#
@@ -286,7 +296,7 @@
except py.builtin._sysex:
raise
except:
- return None, None
+ return None, -1
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
diff --git a/py/_error.py b/py/_error.py
--- a/py/_error.py
+++ b/py/_error.py
@@ -23,6 +23,7 @@
2: errno.ENOENT,
3: errno.ENOENT,
17: errno.EEXIST,
+ 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
22: errno.ENOTDIR,
267: errno.ENOTDIR,
5: errno.EACCES, # anything better?
diff --git a/py/_iniconfig.py b/py/_iniconfig.py
--- a/py/_iniconfig.py
+++ b/py/_iniconfig.py
@@ -103,6 +103,7 @@
def _parseline(self, line, lineno):
# comments
line = line.split('#')[0].rstrip()
+ line = line.split(';')[0].rstrip()
# blank lines
if not line:
return None, None
diff --git a/py/_io/capture.py b/py/_io/capture.py
--- a/py/_io/capture.py
+++ b/py/_io/capture.py
@@ -12,7 +12,7 @@
class TextIO(StringIO):
def write(self, data):
if not isinstance(data, unicode):
- data = unicode(data, getattr(self, '_encoding', 'UTF-8'))
+ data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
StringIO.write(self, data)
else:
TextIO = StringIO
@@ -258,6 +258,9 @@
f = getattr(self, name).tmpfile
f.seek(0)
res = f.read()
+ enc = getattr(f, 'encoding', None)
+ if enc:
+ res = py.builtin._totext(res, enc, 'replace')
f.truncate(0)
f.seek(0)
l.append(res)
diff --git a/py/_io/terminalwriter.py b/py/_io/terminalwriter.py
--- a/py/_io/terminalwriter.py
+++ b/py/_io/terminalwriter.py
@@ -105,6 +105,8 @@
Blue=44, Purple=45, Cyan=46, White=47,
bold=1, light=2, blink=5, invert=7)
+ _newline = None # the last line printed
+
# XXX deprecate stringio argument
def __init__(self, file=None, stringio=False, encoding=None):
if file is None:
@@ -112,11 +114,9 @@
self.stringio = file = py.io.TextIO()
else:
file = py.std.sys.stdout
- if hasattr(file, 'encoding'):
- encoding = file.encoding
elif hasattr(file, '__call__'):
file = WriteFile(file, encoding=encoding)
- self.encoding = encoding
+ self.encoding = encoding or getattr(file, 'encoding', "utf-8")
self._file = file
self.fullwidth = get_terminal_width()
self.hasmarkup = should_do_markup(file)
@@ -182,8 +182,31 @@
return s
def line(self, s='', **kw):
+ if self._newline == False:
+ self.write("\n")
self.write(s, **kw)
self.write('\n')
+ self._newline = True
+
+ def reline(self, line, **opts):
+ if not self.hasmarkup:
+ raise ValueError("cannot use rewrite-line without terminal")
+ if not self._newline:
+ self.write("\r")
+ self.write(line, **opts)
+ # see if we need to fill up some spaces at the end
+ # xxx have a more exact lastlinelen working from self.write?
+ lenline = len(line)
+ try:
+ lastlen = self._lastlinelen
+ except AttributeError:
+ pass
+ else:
+ if lenline < lastlen:
+ self.write(" " * (lastlen - lenline + 1))
+ self._lastlinelen = lenline
+ self._newline = False
+
class Win32ConsoleWriter(TerminalWriter):
def write(self, s, **kw):
@@ -280,10 +303,10 @@
SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
SetConsoleTextAttribute.restype = wintypes.BOOL
-
+
_GetConsoleScreenBufferInfo = \
ctypes.windll.kernel32.GetConsoleScreenBufferInfo
- _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
+ _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
_GetConsoleScreenBufferInfo.restype = wintypes.BOOL
def GetConsoleInfo(handle):
diff --git a/py/_path/common.py b/py/_path/common.py
--- a/py/_path/common.py
+++ b/py/_path/common.py
@@ -64,7 +64,10 @@
else:
if bool(value) ^ bool(meth()) ^ invert:
return False
- except (py.error.ENOENT, py.error.ENOTDIR):
+ except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
+ # EBUSY feels not entirely correct,
+ # but its kind of necessary since ENOMEDIUM
+ # is not accessible in python
for name in self._depend_on_existence:
if name in kw:
if kw.get(name):
@@ -368,6 +371,5 @@
else:
name = str(path) # path.strpath # XXX svn?
pattern = '*' + path.sep + pattern
- from fnmatch import fnmatch
- return fnmatch(name, pattern)
+ return py.std.fnmatch.fnmatch(name, pattern)
diff --git a/py/_path/local.py b/py/_path/local.py
--- a/py/_path/local.py
+++ b/py/_path/local.py
@@ -157,14 +157,16 @@
return str(self) < str(other)
def samefile(self, other):
- """ return True if 'other' references the same file as 'self'. """
- if not iswin32:
- return py.error.checked_call(
- os.path.samefile, str(self), str(other))
+ """ return True if 'other' references the same file as 'self'.
+ """
+ if not isinstance(other, py.path.local):
+ other = os.path.abspath(str(other))
if self == other:
return True
- other = os.path.abspath(str(other))
- return self == other
+ if iswin32:
+ return False # ther is no samefile
+ return py.error.checked_call(
+ os.path.samefile, str(self), str(other))
def remove(self, rec=1, ignore_errors=False):
""" remove a file or directory (or a directory tree if rec=1).
@@ -539,7 +541,11 @@
if self.basename != "__init__.py":
modfile = modfile[:-12]
- if not self.samefile(modfile):
+ try:
+ issame = self.samefile(modfile)
+ except py.error.ENOENT:
+ issame = False
+ if not issame:
raise self.ImportMismatchError(modname, modfile, self)
return mod
else:
diff --git a/py/_path/svnurl.py b/py/_path/svnurl.py
--- a/py/_path/svnurl.py
+++ b/py/_path/svnurl.py
@@ -233,6 +233,8 @@
e = sys.exc_info()[1]
if e.err.find('non-existent in that revision') != -1:
raise py.error.ENOENT(self, e.err)
+ elif e.err.find("E200009:") != -1:
+ raise py.error.ENOENT(self, e.err)
elif e.err.find('File not found') != -1:
raise py.error.ENOENT(self, e.err)
elif e.err.find('not part of a repository')!=-1:
diff --git a/py/_path/svnwc.py b/py/_path/svnwc.py
--- a/py/_path/svnwc.py
+++ b/py/_path/svnwc.py
@@ -482,10 +482,13 @@
except py.process.cmdexec.Error:
e = sys.exc_info()[1]
strerr = e.err.lower()
- if strerr.find('file not found') != -1:
+ if strerr.find('not found') != -1:
+ raise py.error.ENOENT(self)
+ elif strerr.find("E200009:") != -1:
raise py.error.ENOENT(self)
if (strerr.find('file exists') != -1 or
strerr.find('file already exists') != -1 or
+ strerr.find('w150002:') != -1 or
strerr.find("can't create directory") != -1):
raise py.error.EEXIST(self)
raise
@@ -593,7 +596,7 @@
out = self._authsvn('lock').strip()
if not out:
# warning or error, raise exception
- raise Exception(out[4:])
+ raise ValueError("unknown error in svn lock command")
def unlock(self):
""" unset a previously set lock """
@@ -1066,6 +1069,8 @@
modrev = '?'
author = '?'
date = ''
+ elif itemstatus == "replaced":
+ pass
else:
#print entryel.toxml()
commitel = entryel.getElementsByTagName('commit')[0]
@@ -1148,7 +1153,11 @@
raise ValueError("Not a versioned resource")
#raise ValueError, "Not a versioned resource %r" % path
self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
- self.rev = int(d['revision'])
+ try:
+ self.rev = int(d['revision'])
+ except KeyError:
+ self.rev = None
+
self.path = py.path.local(d['path'])
self.size = self.path.size()
if 'lastchangedrev' in d:
diff --git a/py/_xmlgen.py b/py/_xmlgen.py
--- a/py/_xmlgen.py
+++ b/py/_xmlgen.py
@@ -52,7 +52,7 @@
def unicode(self, indent=2):
l = []
SimpleUnicodeVisitor(l.append, indent).visit(self)
- return "".join(l)
+ return u("").join(l)
def __repr__(self):
name = self.__class__.__name__
@@ -122,11 +122,13 @@
if visitmethod is not None:
break
else:
- visitmethod = self.object
+ visitmethod = self.__object
self.cache[cls] = visitmethod
visitmethod(node)
- def object(self, obj):
+ # the default fallback handler is marked private
+ # to avoid clashes with the tag name object
+ def __object(self, obj):
#self.write(obj)
self.write(escape(unicode(obj)))
@@ -136,7 +138,8 @@
def list(self, obj):
assert id(obj) not in self.visited
self.visited[id(obj)] = 1
- map(self.visit, obj)
+ for elem in obj:
+ self.visit(elem)
def Tag(self, tag):
assert id(tag) not in self.visited
@@ -181,7 +184,11 @@
value = getattr(attrs, name)
if name.endswith('_'):
name = name[:-1]
- return ' %s="%s"' % (name, escape(unicode(value)))
+ if isinstance(value, raw):
+ insert = value.uniobj
+ else:
+ insert = escape(unicode(value))
+ return ' %s="%s"' % (name, insert)
def getstyle(self, tag):
""" return attribute list suitable for styling. """
diff --git a/py/bin/_findpy.py b/py/bin/_findpy.py
deleted file mode 100644
--- a/py/bin/_findpy.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-#
-# find and import a version of 'py'
-#
-import sys
-import os
-from os.path import dirname as opd, exists, join, basename, abspath
-
-def searchpy(current):
- while 1:
- last = current
- initpy = join(current, '__init__.py')
- if not exists(initpy):
- pydir = join(current, 'py')
- # recognize py-package and ensure it is importable
- if exists(pydir) and exists(join(pydir, '__init__.py')):
- #for p in sys.path:
- # if p == current:
- # return True
- if current != sys.path[0]: # if we are already first, then ok
- sys.stderr.write("inserting into sys.path: %s\n" % current)
- sys.path.insert(0, current)
- return True
- current = opd(current)
- if last == current:
- return False
-
-if not searchpy(abspath(os.curdir)):
- if not searchpy(opd(abspath(sys.argv[0]))):
- if not searchpy(opd(__file__)):
- pass # let's hope it is just on sys.path
-
-import py
-import pytest
-
-if __name__ == '__main__':
- print ("py lib is at %s" % py.__file__)
diff --git a/py/bin/py.test b/py/bin/py.test
deleted file mode 100755
--- a/py/bin/py.test
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from _findpy import pytest
-raise SystemExit(pytest.main())
diff --git a/pypy/annotation/test/test_annrpython.py b/pypy/annotation/test/test_annrpython.py
--- a/pypy/annotation/test/test_annrpython.py
+++ b/pypy/annotation/test/test_annrpython.py
@@ -483,7 +483,7 @@
return a_str.strip(' ')
elif n == 1:
return a_str.rstrip(' ')
- else:
+ else:
return a_str.lstrip(' ')
s = a.build_types(f, [int, annmodel.SomeString(no_nul=True)])
assert s.no_nul
@@ -3737,6 +3737,25 @@
s = a.build_types(f, [int])
assert s.listdef.listitem.range_step == 0
+ def test_specialize_arg_memo(self):
+ @objectmodel.specialize.memo()
+ def g(n):
+ return n
+ @objectmodel.specialize.arg(0)
+ def f(i):
+ return g(i)
+ def main(i):
+ if i == 2:
+ return f(i)
+ elif i == 3:
+ return f(i)
+ else:
+ raise NotImplementedError
+
+ a = self.RPythonAnnotator()
+ s = a.build_types(main, [int])
+ assert isinstance(s, annmodel.SomeInteger)
+
def g(n):
return [0,1,2,n]
diff --git a/pypy/doc/project-ideas.rst b/pypy/doc/project-ideas.rst
--- a/pypy/doc/project-ideas.rst
+++ b/pypy/doc/project-ideas.rst
@@ -149,6 +149,22 @@
exported. This would give us a one-size-fits-all generic .so file to be
imported by any application that wants to load .so files :-)
+Optimising cpyext (CPython C-API compatibility layer)
+-----------------------------------------------------
+
+A lot of work has gone into PyPy's implementation of CPython's C-API over
+the last years to let it reach a practical level of compatibility, so that
+C extensions for CPython work on PyPy without major rewrites. However,
+there are still many edges and corner cases where it misbehaves, and it has
+not received any substantial optimisation so far.
+
+The objective of this project is to fix bugs in cpyext and to optimise
+several performance critical parts of it, such as the reference counting
+support and other heavily used C-API functions. The net result would be to
+have CPython extensions run much faster on PyPy than they currently do, or
+to make them work at all if they currently don't. A part of this work would
+be to get cpyext into a shape where it supports running Cython generated
+extensions.
.. _`issue tracker`: http://bugs.pypy.org
.. _`mailing list`: http://mail.python.org/mailman/listinfo/pypy-dev
diff --git a/pypy/doc/stackless.rst b/pypy/doc/stackless.rst
--- a/pypy/doc/stackless.rst
+++ b/pypy/doc/stackless.rst
@@ -199,17 +199,11 @@
The following features (present in some past Stackless version of PyPy)
are for the time being not supported any more:
-* Tasklets and channels (currently ``stackless.py`` seems to import,
- but you have tasklets on top of coroutines on top of greenlets on
- top of continulets on top of stacklets, and it's probably not too
- hard to cut two of these levels by adapting ``stackless.py`` to
- use directly continulets)
-
* Coroutines (could be rewritten at app-level)
-* Pickling and unpickling continulets (*)
-
-* Continuing execution of a continulet in a different thread (*)
+* Continuing execution of a continulet in a different thread
+ (but if it is "simple enough", you can pickle it and unpickle it
+ in the other thread).
* Automatic unlimited stack (must be emulated__ so far)
@@ -217,15 +211,6 @@
.. __: `recursion depth limit`_
-(*) Pickling, as well as changing threads, could be implemented by using
-a "soft" stack switching mode again. We would get either "hard" or
-"soft" switches, similarly to Stackless Python 3rd version: you get a
-"hard" switch (like now) when the C stack contains non-trivial C frames
-to save, and a "soft" switch (like previously) when it contains only
-simple calls from Python to Python. Soft-switched continulets would
-also consume a bit less RAM, and the switch might be a bit faster too
-(unsure about that; what is the Stackless Python experience?).
-
Recursion depth limit
+++++++++++++++++++++
diff --git a/pypy/doc/tool/makecontributor.py b/pypy/doc/tool/makecontributor.py
new file mode 100644
--- /dev/null
+++ b/pypy/doc/tool/makecontributor.py
@@ -0,0 +1,133 @@
+import py
+import sys
+from collections import defaultdict
+import operator
+import re
+import mercurial.localrepo
+import mercurial.ui
+
+ROOT = py.path.local(__file__).join('..', '..', '..', '..')
+author_re = re.compile('(.*) <.*>')
+pair_programming_re = re.compile(r'^\((.*?)\)')
+excluded = set(["pypy", "convert-repo"])
+
+alias = {
+ 'Anders Chrigstrom': ['arre'],
+ 'Antonio Cuni': ['antocuni', 'anto'],
+ 'Armin Rigo': ['arigo', 'arfigo', 'armin', 'arigato'],
+ 'Maciej Fijalkowski': ['fijal'],
+ 'Carl Friedrich Bolz': ['cfbolz', 'cf'],
+ 'Samuele Pedroni': ['pedronis', 'samuele', 'samule'],
+ 'Michael Hudson': ['mwh'],
+ 'Holger Krekel': ['hpk', 'holger krekel', 'holger', 'hufpk'],
+ "Amaury Forgeot d'Arc": ['afa'],
+ 'Alex Gaynor': ['alex', 'agaynor'],
+ 'David Schneider': ['bivab', 'david'],
+ 'Christian Tismer': ['chris', 'christian', 'tismer',
+ 'tismer at christia-wjtqxl.localdomain'],
+ 'Benjamin Peterson': ['benjamin'],
+ 'Hakan Ardo': ['hakan', 'hakanardo'],
+ 'Niklaus Haldimann': ['nik'],
+ 'Alexander Schremmer': ['xoraxax'],
+ 'Anders Hammarquist': ['iko'],
+ 'David Edelsohn': ['edelsoh', 'edelsohn'],
+ 'Niko Matsakis': ['niko'],
+ 'Jakub Gustak': ['jlg'],
+ 'Guido Wesdorp': ['guido'],
+ 'Michael Foord': ['mfoord'],
+ 'Mark Pearse': ['mwp'],
+ 'Toon Verwaest': ['tverwaes'],
+ 'Eric van Riet Paap': ['ericvrp'],
+ 'Jacob Hallen': ['jacob', 'jakob'],
+ 'Anders Lehmann': ['ale', 'anders'],
+ 'Bert Freudenberg': ['bert'],
+ 'Boris Feigin': ['boris', 'boria'],
+ 'Valentino Volonghi': ['valentino', 'dialtone'],
+ 'Aurelien Campeas': ['aurelien', 'aureliene'],
+ 'Adrien Di Mascio': ['adim'],
+ 'Jacek Generowicz': ['Jacek', 'jacek'],
+ 'Jim Hunziker': ['landtuna at gmail.com'],
+ 'Kristjan Valur Jonsson': ['kristjan at kristjan-lp.ccp.ad.local'],
+ 'Laura Creighton': ['lac'],
+ 'Aaron Iles': ['aliles'],
+ 'Ludovic Aubry': ['ludal', 'ludovic'],
+ 'Lukas Diekmann': ['l.diekmann', 'ldiekmann'],
+ 'Matti Picus': ['Matti Picus matti.picus at gmail.com',
+ 'matthp', 'mattip', 'mattip>'],
+ 'Michael Cheng': ['mikefc'],
+ 'Richard Emslie': ['rxe'],
+ 'Roberto De Ioris': ['roberto at goyle'],
+ 'Roberto De Ioris': ['roberto at mrspurr'],
+ 'Sven Hager': ['hager'],
+ 'Tomo Cocoa': ['cocoatomo'],
+ }
+
+alias_map = {}
+for name, nicks in alias.iteritems():
+ for nick in nicks:
+ alias_map[nick] = name
+
+def get_canonical_author(name):
+ match = author_re.match(name)
+ if match:
+ name = match.group(1)
+ return alias_map.get(name, name)
+
+ignored_nicknames = defaultdict(int)
+
+def get_more_authors(log):
+ match = pair_programming_re.match(log)
+ if not match:
+ return set()
+ ignore_words = ['around', 'consulting', 'yesterday', 'for a bit', 'thanks',
+ 'in-progress', 'bits of', 'even a little', 'floating',]
+ sep_words = ['and', ';', '+', '/', 'with special by']
+ nicknames = match.group(1)
+ for word in ignore_words:
+ nicknames = nicknames.replace(word, '')
+ for word in sep_words:
+ nicknames = nicknames.replace(word, ',')
+ nicknames = [nick.strip().lower() for nick in nicknames.split(',')]
+ authors = set()
+ for nickname in nicknames:
+ author = alias_map.get(nickname)
+ if not author:
+ ignored_nicknames[nickname] += 1
+ else:
+ authors.add(author)
+ return authors
+
+def main(show_numbers):
+ ui = mercurial.ui.ui()
+ repo = mercurial.localrepo.localrepository(ui, str(ROOT))
+ authors_count = defaultdict(int)
+ for i in repo:
+ ctx = repo[i]
+ authors = set()
+ authors.add(get_canonical_author(ctx.user()))
+ authors.update(get_more_authors(ctx.description()))
+ for author in authors:
+ if author not in excluded:
+ authors_count[author] += 1
+
+ # uncomment the next lines to get the list of nicknamed which could not be
+ # parsed from commit logs
+ ## items = ignored_nicknames.items()
+ ## items.sort(key=operator.itemgetter(1), reverse=True)
+ ## for name, n in items:
+ ## if show_numbers:
+ ## print '%5d %s' % (n, name)
+ ## else:
+ ## print name
+
+ items = authors_count.items()
+ items.sort(key=operator.itemgetter(1), reverse=True)
+ for name, n in items:
+ if show_numbers:
+ print '%5d %s' % (n, name)
+ else:
+ print name
+
+if __name__ == '__main__':
+ show_numbers = '-n' in sys.argv
+ main(show_numbers)
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -296,6 +296,7 @@
self.check_signal_action = None # changed by the signal module
self.user_del_action = UserDelAction(self)
self.frame_trace_action = FrameTraceAction(self)
+ self._code_of_sys_exc_info = None
from pypy.interpreter.pycode import cpython_magic, default_magic
self.our_magic = default_magic
@@ -467,9 +468,9 @@
if name not in modules:
modules.append(name)
- # a bit of custom logic: time2 or rctime take precedence over time
+ # a bit of custom logic: rctime take precedence over time
# XXX this could probably be done as a "requires" in the config
- if ('time2' in modules or 'rctime' in modules) and 'time' in modules:
+ if 'rctime' in modules and 'time' in modules:
modules.remove('time')
if not self.config.objspace.nofaking:
diff --git a/pypy/interpreter/executioncontext.py b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -154,6 +154,7 @@
#operationerr.print_detailed_traceback(self.space)
def _convert_exc(self, operr):
+ # Only for the flow object space
return operr
def sys_exc_info(self): # attn: the result is not the wrapped sys.exc_info() !!!
@@ -166,6 +167,11 @@
frame = self.getnextframe_nohidden(frame)
return None
+ def set_sys_exc_info(self, operror):
+ frame = self.gettopframe_nohidden()
+ if frame: # else, the exception goes nowhere and is lost
+ frame.last_exception = operror
+
def settrace(self, w_func):
"""Set the global trace function."""
if self.space.is_w(w_func, self.space.w_None):
diff --git a/pypy/interpreter/function.py b/pypy/interpreter/function.py
--- a/pypy/interpreter/function.py
+++ b/pypy/interpreter/function.py
@@ -113,6 +113,12 @@
from pypy.interpreter.pycode import PyCode
code = self.getcode() # hook for the jit
+ #
+ if (jit.we_are_jitted() and code is self.space._code_of_sys_exc_info
+ and nargs == 0):
+ from pypy.module.sys.vm import exc_info_direct
+ return exc_info_direct(self.space, frame)
+ #
fast_natural_arity = code.fast_natural_arity
if nargs == fast_natural_arity:
if nargs == 0:
diff --git a/pypy/interpreter/gateway.py b/pypy/interpreter/gateway.py
--- a/pypy/interpreter/gateway.py
+++ b/pypy/interpreter/gateway.py
@@ -874,6 +874,12 @@
fn.add_to_table()
if gateway.as_classmethod:
fn = ClassMethod(space.wrap(fn))
+ #
+ from pypy.module.sys.vm import exc_info
+ if code._bltin is exc_info:
+ assert space._code_of_sys_exc_info is None
+ space._code_of_sys_exc_info = code
+ #
return fn
diff --git a/pypy/interpreter/pyparser/parsestring.py b/pypy/interpreter/pyparser/parsestring.py
--- a/pypy/interpreter/pyparser/parsestring.py
+++ b/pypy/interpreter/pyparser/parsestring.py
@@ -2,34 +2,39 @@
from pypy.interpreter import unicodehelper
from pypy.rlib.rstring import StringBuilder
-def parsestr(space, encoding, s, unicode_literals=False):
- # compiler.transformer.Transformer.decode_literal depends on what
- # might seem like minor details of this function -- changes here
- # must be reflected there.
+def parsestr(space, encoding, s, unicode_literal=False):
+ """Parses a string or unicode literal, and return a wrapped value.
+
+ If encoding=iso8859-1, the source string is also in this encoding.
+ If encoding=None, the source string is ascii only.
+ In other cases, the source string is in utf-8 encoding.
+
+ When a bytes string is returned, it will be encoded with the
+ original encoding.
+
+ Yes, it's very inefficient.
+ Yes, CPython has very similar code.
+ """
# we use ps as "pointer to s"
# q is the virtual last char index of the string
ps = 0
quote = s[ps]
rawmode = False
- unicode = unicode_literals
# string decoration handling
- o = ord(quote)
- isalpha = (o>=97 and o<=122) or (o>=65 and o<=90)
- if isalpha or quote == '_':
- if quote == 'b' or quote == 'B':
- ps += 1
- quote = s[ps]
- unicode = False
- elif quote == 'u' or quote == 'U':
- ps += 1
- quote = s[ps]
- unicode = True
- if quote == 'r' or quote == 'R':
- ps += 1
- quote = s[ps]
- rawmode = True
+ if quote == 'b' or quote == 'B':
+ ps += 1
+ quote = s[ps]
+ unicode_literal = False
+ elif quote == 'u' or quote == 'U':
+ ps += 1
+ quote = s[ps]
+ unicode_literal = True
+ if quote == 'r' or quote == 'R':
+ ps += 1
+ quote = s[ps]
+ rawmode = True
if quote != "'" and quote != '"':
raise_app_valueerror(space,
'Internal error: parser passed unquoted literal')
@@ -46,21 +51,28 @@
'unmatched triple quotes in literal')
q -= 2
- if unicode: # XXX Py_UnicodeFlag is ignored for now
+ if unicode_literal: # XXX Py_UnicodeFlag is ignored for now
if encoding is None or encoding == "iso-8859-1":
+ # 'unicode_escape' expects latin-1 bytes, string is ready.
buf = s
bufp = ps
bufq = q
u = None
else:
- # "\XX" may become "\u005c\uHHLL" (12 bytes)
+ # String is utf8-encoded, but 'unicode_escape' expects
+ # latin-1; So multibyte sequences must be escaped.
lis = [] # using a list to assemble the value
end = q
+ # Worst case: "\XX" may become "\u005c\uHHLL" (12 bytes)
while ps < end:
if s[ps] == '\\':
lis.append(s[ps])
ps += 1
if ord(s[ps]) & 0x80:
+ # A multibyte sequence will follow, it will be
+ # escaped like \u1234. To avoid confusion with
+ # the backslash we just wrote, we emit "\u005c"
+ # instead.
lis.append("u005c")
if ord(s[ps]) & 0x80: # XXX inefficient
w, ps = decode_utf8(space, s, ps, end, "utf-16-be")
@@ -86,13 +98,11 @@
need_encoding = (encoding is not None and
encoding != "utf-8" and encoding != "iso-8859-1")
- # XXX add strchr like interface to rtyper
assert 0 <= ps <= q
substr = s[ps : q]
if rawmode or '\\' not in s[ps:]:
if need_encoding:
w_u = space.wrap(unicodehelper.PyUnicode_DecodeUTF8(space, substr))
- #w_v = space.wrap(space.unwrap(w_u).encode(encoding)) this works
w_v = unicodehelper.PyUnicode_AsEncodedString(space, w_u, space.wrap(encoding))
return w_v
else:
diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py
--- a/pypy/interpreter/typedef.py
+++ b/pypy/interpreter/typedef.py
@@ -321,7 +321,7 @@
def user_setup(self, space, w_subtype):
self.w__dict__ = space.newdict(
- instance=True, classofinstance=w_subtype)
+ instance=True)
base_user_setup(self, space, w_subtype)
def setclass(self, space, w_subtype):
diff --git a/pypy/jit/backend/llgraph/llimpl.py b/pypy/jit/backend/llgraph/llimpl.py
--- a/pypy/jit/backend/llgraph/llimpl.py
+++ b/pypy/jit/backend/llgraph/llimpl.py
@@ -1809,6 +1809,7 @@
if specialize_as_constant:
def specialize_call(self, hop):
llvalue = func(hop.args_s[0].const)
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.typeOf(llvalue), llvalue)
else:
# specialize as direct_call
@@ -1825,6 +1826,7 @@
sm = ootype._static_meth(FUNCTYPE, _name=func.__name__, _callable=func)
cfunc = hop.inputconst(FUNCTYPE, sm)
args_v = hop.inputargs(*hop.args_r)
+ hop.exception_is_here()
return hop.genop('direct_call', [cfunc] + args_v, hop.r_result)
diff --git a/pypy/jit/backend/test/runner_test.py b/pypy/jit/backend/test/runner_test.py
--- a/pypy/jit/backend/test/runner_test.py
+++ b/pypy/jit/backend/test/runner_test.py
@@ -27,6 +27,12 @@
def constfloat(x):
return ConstFloat(longlong.getfloatstorage(x))
+def boxlonglong(ll):
+ if longlong.is_64_bit:
+ return BoxInt(ll)
+ else:
+ return BoxFloat(ll)
+
class Runner(object):
@@ -1782,6 +1788,11 @@
[boxfloat(2.5)], t).value
assert res == longlong2float.float2longlong(2.5)
+ bytes = longlong2float.float2longlong(2.5)
+ res = self.execute_operation(rop.CONVERT_LONGLONG_BYTES_TO_FLOAT,
+ [boxlonglong(res)], 'float').value
+ assert longlong.getrealfloat(res) == 2.5
+
def test_ooops_non_gc(self):
x = lltype.malloc(lltype.Struct('x'), flavor='raw')
v = heaptracker.adr2int(llmemory.cast_ptr_to_adr(x))
diff --git a/pypy/jit/backend/test/test_random.py b/pypy/jit/backend/test/test_random.py
--- a/pypy/jit/backend/test/test_random.py
+++ b/pypy/jit/backend/test/test_random.py
@@ -328,6 +328,15 @@
def produce_into(self, builder, r):
self.put(builder, [r.choice(builder.intvars)])
+class CastLongLongToFloatOperation(AbstractFloatOperation):
+ def produce_into(self, builder, r):
+ if longlong.is_64_bit:
+ self.put(builder, [r.choice(builder.intvars)])
+ else:
+ if not builder.floatvars:
+ raise CannotProduceOperation
+ self.put(builder, [r.choice(builder.floatvars)])
+
class CastFloatToIntOperation(AbstractFloatOperation):
def produce_into(self, builder, r):
if not builder.floatvars:
@@ -450,6 +459,7 @@
OPERATIONS.append(CastFloatToIntOperation(rop.CAST_FLOAT_TO_INT))
OPERATIONS.append(CastIntToFloatOperation(rop.CAST_INT_TO_FLOAT))
OPERATIONS.append(CastFloatToIntOperation(rop.CONVERT_FLOAT_BYTES_TO_LONGLONG))
+OPERATIONS.append(CastLongLongToFloatOperation(rop.CONVERT_LONGLONG_BYTES_TO_FLOAT))
OperationBuilder.OPERATIONS = OPERATIONS
diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py
--- a/pypy/jit/backend/x86/assembler.py
+++ b/pypy/jit/backend/x86/assembler.py
@@ -1251,6 +1251,15 @@
else:
self.mov(loc0, resloc)
+ def genop_convert_longlong_bytes_to_float(self, op, arglocs, resloc):
+ loc0, = arglocs
+ if longlong.is_64_bit:
+ assert isinstance(resloc, RegLoc)
+ assert isinstance(loc0, RegLoc)
+ self.mc.MOVD(resloc, loc0)
+ else:
+ self.mov(loc0, resloc)
+
def genop_guard_int_is_true(self, op, guard_op, guard_token, arglocs, resloc):
guard_opnum = guard_op.getopnum()
self.mc.CMP(arglocs[0], imm0)
diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py
--- a/pypy/jit/backend/x86/regalloc.py
+++ b/pypy/jit/backend/x86/regalloc.py
@@ -719,7 +719,20 @@
loc0 = self.xrm.loc(arg0)
loc1 = self.xrm.force_allocate_reg(op.result, forbidden_vars=[arg0])
self.Perform(op, [loc0], loc1)
- self.xrm.possibly_free_var(op.getarg(0))
+ self.xrm.possibly_free_var(arg0)
+
+ def consider_convert_longlong_bytes_to_float(self, op):
+ if longlong.is_64_bit:
+ loc0 = self.rm.make_sure_var_in_reg(op.getarg(0))
+ loc1 = self.xrm.force_allocate_reg(op.result)
+ self.Perform(op, [loc0], loc1)
+ self.rm.possibly_free_var(op.getarg(0))
+ else:
+ arg0 = op.getarg(0)
+ loc0 = self.xrm.make_sure_var_in_reg(arg0)
+ loc1 = self.xrm.force_allocate_reg(op.result, forbidden_vars=[arg0])
+ self.Perform(op, [loc0], loc1)
+ self.xrm.possibly_free_var(arg0)
def _consider_llong_binop_xx(self, op):
# must force both arguments into xmm registers, because we don't
diff --git a/pypy/jit/codewriter/jtransform.py b/pypy/jit/codewriter/jtransform.py
--- a/pypy/jit/codewriter/jtransform.py
+++ b/pypy/jit/codewriter/jtransform.py
@@ -295,6 +295,7 @@
return op
rewrite_op_convert_float_bytes_to_longlong = _noop_rewrite
+ rewrite_op_convert_longlong_bytes_to_float = _noop_rewrite
# ----------
# Various kinds of calls
diff --git a/pypy/jit/codewriter/test/test_flatten.py b/pypy/jit/codewriter/test/test_flatten.py
--- a/pypy/jit/codewriter/test/test_flatten.py
+++ b/pypy/jit/codewriter/test/test_flatten.py
@@ -968,20 +968,22 @@
int_return %i2
""", transform=True)
- def test_convert_float_bytes_to_int(self):
- from pypy.rlib.longlong2float import float2longlong
+ def test_convert_float_bytes(self):
+ from pypy.rlib.longlong2float import float2longlong, longlong2float
def f(x):
- return float2longlong(x)
+ ll = float2longlong(x)
+ return longlong2float(ll)
if longlong.is_64_bit:
- result_var = "%i0"
- return_op = "int_return"
+ tmp_var = "%i0"
+ result_var = "%f1"
else:
- result_var = "%f1"
- return_op = "float_return"
+ tmp_var = "%f1"
+ result_var = "%f2"
self.encoding_test(f, [25.0], """
- convert_float_bytes_to_longlong %%f0 -> %(result_var)s
- %(return_op)s %(result_var)s
- """ % {"result_var": result_var, "return_op": return_op})
+ convert_float_bytes_to_longlong %%f0 -> %(tmp_var)s
+ convert_longlong_bytes_to_float %(tmp_var)s -> %(result_var)s
+ float_return %(result_var)s
+ """ % {"result_var": result_var, "tmp_var": tmp_var}, transform=True)
def check_force_cast(FROM, TO, operations, value):
diff --git a/pypy/jit/metainterp/blackhole.py b/pypy/jit/metainterp/blackhole.py
--- a/pypy/jit/metainterp/blackhole.py
+++ b/pypy/jit/metainterp/blackhole.py
@@ -672,6 +672,11 @@
a = longlong.getrealfloat(a)
return longlong2float.float2longlong(a)
+ @arguments(LONGLONG_TYPECODE, returns="f")
+ def bhimpl_convert_longlong_bytes_to_float(a):
+ a = longlong2float.longlong2float(a)
+ return longlong.getfloatstorage(a)
+
# ----------
# control flow operations
diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py
--- a/pypy/jit/metainterp/pyjitpl.py
+++ b/pypy/jit/metainterp/pyjitpl.py
@@ -224,6 +224,7 @@
'float_neg', 'float_abs',
'cast_ptr_to_int', 'cast_int_to_ptr',
'convert_float_bytes_to_longlong',
+ 'convert_longlong_bytes_to_float',
]:
exec py.code.Source('''
@arguments("box")
diff --git a/pypy/jit/metainterp/resoperation.py b/pypy/jit/metainterp/resoperation.py
--- a/pypy/jit/metainterp/resoperation.py
+++ b/pypy/jit/metainterp/resoperation.py
@@ -420,6 +420,7 @@
'CAST_FLOAT_TO_SINGLEFLOAT/1',
'CAST_SINGLEFLOAT_TO_FLOAT/1',
'CONVERT_FLOAT_BYTES_TO_LONGLONG/1',
+ 'CONVERT_LONGLONG_BYTES_TO_FLOAT/1',
#
'INT_LT/2b',
'INT_LE/2b',
diff --git a/pypy/jit/metainterp/test/test_ajit.py b/pypy/jit/metainterp/test/test_ajit.py
--- a/pypy/jit/metainterp/test/test_ajit.py
+++ b/pypy/jit/metainterp/test/test_ajit.py
@@ -1,3 +1,4 @@
+import math
import sys
import py
@@ -15,7 +16,7 @@
loop_invariant, elidable, promote, jit_debug, assert_green,
AssertGreenFailed, unroll_safe, current_trace_length, look_inside_iff,
isconstant, isvirtual, promote_string, set_param, record_known_class)
-from pypy.rlib.longlong2float import float2longlong
+from pypy.rlib.longlong2float import float2longlong, longlong2float
from pypy.rlib.rarithmetic import ovfcheck, is_valid_int
from pypy.rpython.lltypesystem import lltype, llmemory, rffi
from pypy.rpython.ootypesystem import ootype
@@ -3795,15 +3796,15 @@
res = self.interp_operations(g, [1])
assert res == 3
- def test_float2longlong(self):
+ def test_float_bytes(self):
def f(n):
- return float2longlong(n)
+ ll = float2longlong(n)
+ return longlong2float(ll)
for x in [2.5, float("nan"), -2.5, float("inf")]:
# There are tests elsewhere to verify the correctness of this.
- expected = float2longlong(x)
res = self.interp_operations(f, [x])
- assert longlong.getfloatstorage(res) == expected
+ assert res == x or math.isnan(x) and math.isnan(res)
class TestLLtype(BaseLLtypeTests, LLJitMixin):
diff --git a/pypy/module/_ssl/interp_ssl.py b/pypy/module/_ssl/interp_ssl.py
--- a/pypy/module/_ssl/interp_ssl.py
+++ b/pypy/module/_ssl/interp_ssl.py
@@ -432,7 +432,8 @@
raise _ssl_seterror(self.space, self, length)
try:
# this is actually an immutable bytes sequence
- return self.space.wrap(rffi.charp2str(buf_ptr[0]))
+ return self.space.wrap(rffi.charpsize2str(buf_ptr[0],
+ length))
finally:
libssl_OPENSSL_free(buf_ptr[0])
else:
diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
--- a/pypy/module/array/interp_array.py
+++ b/pypy/module/array/interp_array.py
@@ -11,7 +11,7 @@
from pypy.objspace.std.register_all import register_all
from pypy.rlib.rarithmetic import ovfcheck
from pypy.rlib.unroll import unrolling_iterable
-from pypy.rlib.objectmodel import specialize
+from pypy.rlib.objectmodel import specialize, keepalive_until_here
from pypy.rpython.lltypesystem import lltype, rffi
@@ -145,18 +145,24 @@
unroll_typecodes = unrolling_iterable(types.keys())
class ArrayBuffer(RWBuffer):
- def __init__(self, data, bytes):
- self.data = data
- self.len = bytes
+ def __init__(self, array):
+ self.array = array
def getlength(self):
- return self.len
+ return self.array.len * self.array.itemsize
def getitem(self, index):
- return self.data[index]
+ array = self.array
+ data = array._charbuf_start()
+ char = data[index]
+ array._charbuf_stop()
+ return char
def setitem(self, index, char):
- self.data[index] = char
+ array = self.array
+ data = array._charbuf_start()
+ data[index] = char
+ array._charbuf_stop()
def make_array(mytype):
@@ -278,9 +284,10 @@
oldlen = self.len
new = len(s) / mytype.bytes
self.setlen(oldlen + new)
- cbuf = self.charbuf()
+ cbuf = self._charbuf_start()
for i in range(len(s)):
cbuf[oldlen * mytype.bytes + i] = s[i]
+ self._charbuf_stop()
def fromlist(self, w_lst):
s = self.len
@@ -310,8 +317,11 @@
else:
self.fromsequence(w_iterable)
- def charbuf(self):
- return rffi.cast(rffi.CCHARP, self.buffer)
+ def _charbuf_start(self):
+ return rffi.cast(rffi.CCHARP, self.buffer)
+
+ def _charbuf_stop(self):
+ keepalive_until_here(self)
def w_getitem(self, space, idx):
item = self.buffer[idx]
@@ -530,8 +540,10 @@
self.fromstring(space.str_w(w_s))
def array_tostring__Array(space, self):
- cbuf = self.charbuf()
- return self.space.wrap(rffi.charpsize2str(cbuf, self.len * mytype.bytes))
+ cbuf = self._charbuf_start()
+ s = rffi.charpsize2str(cbuf, self.len * mytype.bytes)
+ self._charbuf_stop()
+ return self.space.wrap(s)
def array_fromfile__Array_ANY_ANY(space, self, w_f, w_n):
if not isinstance(w_f, W_File):
@@ -613,8 +625,7 @@
# Misc methods
def buffer__Array(space, self):
- b = ArrayBuffer(self.charbuf(), self.len * mytype.bytes)
- return space.wrap(b)
+ return space.wrap(ArrayBuffer(self))
def array_buffer_info__Array(space, self):
w_ptr = space.wrap(rffi.cast(lltype.Unsigned, self.buffer))
@@ -649,7 +660,7 @@
raise OperationError(space.w_RuntimeError, space.wrap(msg))
if self.len == 0:
return
- bytes = self.charbuf()
+ bytes = self._charbuf_start()
tmp = [bytes[0]] * mytype.bytes
for start in range(0, self.len * mytype.bytes, mytype.bytes):
stop = start + mytype.bytes - 1
@@ -657,6 +668,7 @@
tmp[i] = bytes[start + i]
for i in range(mytype.bytes):
bytes[stop - i] = tmp[i]
+ self._charbuf_stop()
def repr__Array(space, self):
if self.len == 0:
diff --git a/pypy/module/array/test/test_array.py b/pypy/module/array/test/test_array.py
--- a/pypy/module/array/test/test_array.py
+++ b/pypy/module/array/test/test_array.py
@@ -433,7 +433,25 @@
a = self.array('h', 'Hi')
buf = buffer(a)
assert buf[1] == 'i'
- #raises(TypeError, buf.__setitem__, 1, 'o')
+
+ def test_buffer_write(self):
+ a = self.array('c', 'hello')
+ buf = buffer(a)
+ print repr(buf)
+ try:
+ buf[3] = 'L'
+ except TypeError:
+ skip("buffer(array) returns a read-only buffer on CPython")
+ assert a.tostring() == 'helLo'
+
+ def test_buffer_keepalive(self):
+ buf = buffer(self.array('c', 'text'))
+ assert buf[2] == 'x'
+ #
+ a = self.array('c', 'foobarbaz')
+ buf = buffer(a)
+ a.fromstring('some extra text')
+ assert buf[:] == 'foobarbazsome extra text'
def test_list_methods(self):
assert repr(self.array('i')) == "array('i')"
diff --git a/pypy/module/cpyext/bufferobject.py b/pypy/module/cpyext/bufferobject.py
--- a/pypy/module/cpyext/bufferobject.py
+++ b/pypy/module/cpyext/bufferobject.py
@@ -2,8 +2,10 @@
from pypy.module.cpyext.api import (
cpython_api, Py_ssize_t, cpython_struct, bootstrap_function,
PyObjectFields, PyObject)
-from pypy.module.cpyext.pyobject import make_typedescr, Py_DecRef
+from pypy.module.cpyext.pyobject import make_typedescr, Py_DecRef, make_ref
from pypy.interpreter.buffer import Buffer, StringBuffer, SubBuffer
+from pypy.interpreter.error import OperationError
+from pypy.module.array.interp_array import ArrayBuffer
PyBufferObjectStruct = lltype.ForwardReference()
@@ -41,26 +43,38 @@
py_buf.c_b_offset = w_obj.offset
w_obj = w_obj.buffer
+ # If w_obj already allocated a fixed buffer, use it, and keep a
+ # reference to w_obj.
+ # Otherwise, b_base stays NULL, and we own the b_ptr.
+
if isinstance(w_obj, StringBuffer):
- py_buf.c_b_base = rffi.cast(PyObject, 0) # space.wrap(w_obj.value)
- py_buf.c_b_ptr = rffi.cast(rffi.VOIDP, rffi.str2charp(w_obj.as_str()))
+ py_buf.c_b_base = lltype.nullptr(PyObject.TO)
+ py_buf.c_b_ptr = rffi.cast(rffi.VOIDP, rffi.str2charp(w_obj.value))
+ py_buf.c_b_size = w_obj.getlength()
+ elif isinstance(w_obj, ArrayBuffer):
+ w_base = w_obj.array
+ py_buf.c_b_base = make_ref(space, w_base)
+ py_buf.c_b_ptr = rffi.cast(rffi.VOIDP, w_obj.array._charbuf_start())
py_buf.c_b_size = w_obj.getlength()
else:
- raise Exception("Fail fail fail fail fail")
+ raise OperationError(space.w_NotImplementedError, space.wrap(
+ "buffer flavor not supported"))
def buffer_realize(space, py_obj):
"""
Creates the buffer in the PyPy interpreter from a cpyext representation.
"""
- raise Exception("realize fail fail fail")
-
+ raise OperationError(space.w_NotImplementedError, space.wrap(
+ "Don't know how to realize a buffer"))
@cpython_api([PyObject], lltype.Void, external=False)
def buffer_dealloc(space, py_obj):
py_buf = rffi.cast(PyBufferObject, py_obj)
- Py_DecRef(space, py_buf.c_b_base)
- rffi.free_charp(rffi.cast(rffi.CCHARP, py_buf.c_b_ptr))
+ if py_buf.c_b_base:
+ Py_DecRef(space, py_buf.c_b_base)
+ else:
+ rffi.free_charp(rffi.cast(rffi.CCHARP, py_buf.c_b_ptr))
from pypy.module.cpyext.object import PyObject_dealloc
PyObject_dealloc(space, py_obj)
diff --git a/pypy/module/cpyext/include/patchlevel.h b/pypy/module/cpyext/include/patchlevel.h
--- a/pypy/module/cpyext/include/patchlevel.h
+++ b/pypy/module/cpyext/include/patchlevel.h
@@ -29,7 +29,7 @@
#define PY_VERSION "2.7.2"
/* PyPy version as a string */
-#define PYPY_VERSION "1.8.1"
+#define PYPY_VERSION "1.9.1"
/* Subversion Revision number of this file (not of the repository).
* Empty since Mercurial migration. */
diff --git a/pypy/module/cpyext/pyerrors.py b/pypy/module/cpyext/pyerrors.py
--- a/pypy/module/cpyext/pyerrors.py
+++ b/pypy/module/cpyext/pyerrors.py
@@ -2,6 +2,7 @@
from pypy.rpython.lltypesystem import rffi, lltype
from pypy.interpreter.error import OperationError
+from pypy.interpreter import pytraceback
from pypy.module.cpyext.api import cpython_api, CANNOT_FAIL, CONST_STRING
from pypy.module.exceptions.interp_exceptions import W_RuntimeWarning
from pypy.module.cpyext.pyobject import (
@@ -315,3 +316,65 @@
It may be called without holding the interpreter lock."""
space.check_signal_action.set_interrupt()
+ at cpython_api([PyObjectP, PyObjectP, PyObjectP], lltype.Void)
+def PyErr_GetExcInfo(space, ptype, pvalue, ptraceback):
+ """---Cython extension---
+
+ Retrieve the exception info, as known from ``sys.exc_info()``. This
+ refers to an exception that was already caught, not to an exception
+ that was freshly raised. Returns new references for the three
+ objects, any of which may be *NULL*. Does not modify the exception
+ info state.
+
+ .. note::
+
+ This function is not normally used by code that wants to handle
+ exceptions. Rather, it can be used when code needs to save and
+ restore the exception state temporarily. Use
+ :c:func:`PyErr_SetExcInfo` to restore or clear the exception
+ state.
+ """
+ ec = space.getexecutioncontext()
+ operror = ec.sys_exc_info()
+ if operror:
+ ptype[0] = make_ref(space, operror.w_type)
+ pvalue[0] = make_ref(space, operror.get_w_value(space))
+ ptraceback[0] = make_ref(space, space.wrap(operror.get_traceback()))
+ else:
+ ptype[0] = lltype.nullptr(PyObject.TO)
+ pvalue[0] = lltype.nullptr(PyObject.TO)
+ ptraceback[0] = lltype.nullptr(PyObject.TO)
+
+ at cpython_api([PyObject, PyObject, PyObject], lltype.Void)
+def PyErr_SetExcInfo(space, w_type, w_value, w_traceback):
+ """---Cython extension---
+
+ Set the exception info, as known from ``sys.exc_info()``. This refers
+ to an exception that was already caught, not to an exception that was
+ freshly raised. This function steals the references of the arguments.
+ To clear the exception state, pass *NULL* for all three arguments.
+ For general rules about the three arguments, see :c:func:`PyErr_Restore`.
+
+ .. note::
+
+ This function is not normally used by code that wants to handle
+ exceptions. Rather, it can be used when code needs to save and
+ restore the exception state temporarily. Use
+ :c:func:`PyErr_GetExcInfo` to read the exception state.
+ """
+ if w_value is None or space.is_w(w_value, space.w_None):
+ operror = None
+ else:
+ tb = None
+ if w_traceback is not None:
+ try:
+ tb = pytraceback.check_traceback(space, w_traceback, '?')
+ except OperationError: # catch and ignore bogus objects
+ pass
+ operror = OperationError(w_type, w_value, tb)
+ #
+ ec = space.getexecutioncontext()
+ ec.set_sys_exc_info(operror)
+ Py_DecRef(space, w_type)
+ Py_DecRef(space, w_value)
+ Py_DecRef(space, w_traceback)
diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py
--- a/pypy/module/cpyext/slotdefs.py
+++ b/pypy/module/cpyext/slotdefs.py
@@ -167,14 +167,16 @@
if rffi.cast(lltype.Signed, res) == -1:
space.fromcache(State).check_and_raise_exception(always=True)
+# Warning, confusing function name (like CPython). Used only for sq_contains.
def wrap_objobjproc(space, w_self, w_args, func):
func_target = rffi.cast(objobjproc, func)
check_num_args(space, w_args, 1)
w_value, = space.fixedview(w_args)
res = generic_cpy_call(space, func_target, w_self, w_value)
- if rffi.cast(lltype.Signed, res) == -1:
+ res = rffi.cast(lltype.Signed, res)
+ if res == -1:
space.fromcache(State).check_and_raise_exception(always=True)
- return space.wrap(res)
+ return space.wrap(bool(res))
def wrap_objobjargproc(space, w_self, w_args, func):
func_target = rffi.cast(objobjargproc, func)
@@ -183,7 +185,7 @@
res = generic_cpy_call(space, func_target, w_self, w_key, w_value)
if rffi.cast(lltype.Signed, res) == -1:
space.fromcache(State).check_and_raise_exception(always=True)
- return space.wrap(res)
+ return space.w_None
def wrap_delitem(space, w_self, w_args, func):
func_target = rffi.cast(objobjargproc, func)
diff --git a/pypy/module/cpyext/test/foo.c b/pypy/module/cpyext/test/foo.c
--- a/pypy/module/cpyext/test/foo.c
+++ b/pypy/module/cpyext/test/foo.c
@@ -176,6 +176,8 @@
{NULL} /* Sentinel */
};
+PyDoc_STRVAR(foo_doc, "foo is for testing.");
+
static PyTypeObject footype = {
PyVarObject_HEAD_INIT(NULL, 0)
"foo.foo", /*tp_name*/
@@ -198,7 +200,7 @@
(setattrofunc)foo_setattro, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT, /*tp_flags*/
- 0, /*tp_doc*/
+ foo_doc, /*tp_doc*/
0, /*tp_traverse*/
0, /*tp_clear*/
0, /*tp_richcompare*/
diff --git a/pypy/module/cpyext/test/test_bufferobject.py b/pypy/module/cpyext/test/test_bufferobject.py
--- a/pypy/module/cpyext/test/test_bufferobject.py
+++ b/pypy/module/cpyext/test/test_bufferobject.py
@@ -48,3 +48,17 @@
])
b = module.buffer_new()
raises(AttributeError, getattr, b, 'x')
+
+ def test_array_buffer(self):
+ module = self.import_extension('foo', [
+ ("roundtrip", "METH_O",
+ """
+ PyBufferObject *buf = (PyBufferObject *)args;
+ return PyString_FromStringAndSize(buf->b_ptr, buf->b_size);
+ """),
+ ])
+ import array
+ a = array.array('c', 'text')
+ b = buffer(a)
+ assert module.roundtrip(b) == 'text'
+
diff --git a/pypy/module/cpyext/test/test_pyerrors.py b/pypy/module/cpyext/test/test_pyerrors.py
--- a/pypy/module/cpyext/test/test_pyerrors.py
+++ b/pypy/module/cpyext/test/test_pyerrors.py
@@ -218,3 +218,51 @@
assert e.filename == "blyf"
assert e.errno == errno.EBADF
assert e.strerror == os.strerror(errno.EBADF)
+
+ def test_GetSetExcInfo(self):
+ import sys
+ module = self.import_extension('foo', [
+ ("getset_exc_info", "METH_VARARGS",
+ r'''
+ PyObject *type, *val, *tb;
+ PyObject *new_type, *new_val, *new_tb;
+ PyObject *result;
+
+ if (!PyArg_ParseTuple(args, "OOO", &new_type, &new_val, &new_tb))
+ return NULL;
+
+ PyErr_GetExcInfo(&type, &val, &tb);
+
+ Py_INCREF(new_type);
+ Py_INCREF(new_val);
+ Py_INCREF(new_tb);
+ PyErr_SetExcInfo(new_type, new_val, new_tb);
+
+ result = Py_BuildValue("OOO",
+ type ? type : Py_None,
+ val ? val : Py_None,
+ tb ? tb : Py_None);
+ Py_XDECREF(type);
+ Py_XDECREF(val);
+ Py_XDECREF(tb);
+ return result;
+ '''
+ ),
+ ])
+ try:
+ raise ValueError(5)
+ except ValueError, old_exc:
+ new_exc = TypeError("TEST")
+ orig_sys_exc_info = sys.exc_info()
+ orig_exc_info = module.getset_exc_info(new_exc.__class__,
+ new_exc, None)
+ new_sys_exc_info = sys.exc_info()
+ new_exc_info = module.getset_exc_info(*orig_exc_info)
+ reset_sys_exc_info = sys.exc_info()
+
+ assert orig_exc_info[0] is old_exc.__class__
+ assert orig_exc_info[1] is old_exc
+ assert orig_exc_info == orig_sys_exc_info
+ assert orig_exc_info == reset_sys_exc_info
+ assert new_exc_info == (new_exc.__class__, new_exc, None)
+ assert new_exc_info == new_sys_exc_info
diff --git a/pypy/module/cpyext/test/test_typeobject.py b/pypy/module/cpyext/test/test_typeobject.py
--- a/pypy/module/cpyext/test/test_typeobject.py
+++ b/pypy/module/cpyext/test/test_typeobject.py
@@ -20,6 +20,7 @@
assert type(obj) is module.fooType
print "type of obj has type", type(type(obj))
print "type of type of obj has type", type(type(type(obj)))
+ assert module.fooType.__doc__ == "foo is for testing."
def test_typeobject_method_descriptor(self):
module = self.import_module(name='foo')
@@ -414,8 +415,11 @@
static int
mp_ass_subscript(PyObject *self, PyObject *key, PyObject *value)
{
- PyErr_SetNone(PyExc_ZeroDivisionError);
- return -1;
+ if (PyInt_Check(key)) {
+ PyErr_SetNone(PyExc_ZeroDivisionError);
+ return -1;
+ }
+ return 0;
}
PyMappingMethods tp_as_mapping;
static PyTypeObject Foo_Type = {
@@ -425,6 +429,36 @@
''')
obj = module.new_obj()
raises(ZeroDivisionError, obj.__setitem__, 5, None)
+ res = obj.__setitem__('foo', None)
+ assert res is None
+
+ def test_sq_contains(self):
+ module = self.import_extension('foo', [
+ ("new_obj", "METH_NOARGS",
+ '''
+ PyObject *obj;
+ Foo_Type.tp_as_sequence = &tp_as_sequence;
+ tp_as_sequence.sq_contains = sq_contains;
+ if (PyType_Ready(&Foo_Type) < 0) return NULL;
+ obj = PyObject_New(PyObject, &Foo_Type);
+ return obj;
+ '''
+ )],
+ '''
+ static int
+ sq_contains(PyObject *self, PyObject *value)
+ {
+ return 42;
+ }
+ PySequenceMethods tp_as_sequence;
+ static PyTypeObject Foo_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "foo.foo",
+ };
+ ''')
+ obj = module.new_obj()
+ res = "foo" in obj
+ assert res is True
def test_tp_iter(self):
module = self.import_extension('foo', [
diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py
--- a/pypy/module/cpyext/typeobject.py
+++ b/pypy/module/cpyext/typeobject.py
@@ -307,6 +307,8 @@
if not space.is_true(space.issubtype(self, space.w_type)):
self.flag_cpytype = True
self.flag_heaptype = False
+ if pto.c_tp_doc:
+ self.w_doc = space.wrap(rffi.charp2str(pto.c_tp_doc))
@bootstrap_function
def init_typeobject(space):
@@ -624,7 +626,6 @@
Creates an interpreter type from a PyTypeObject structure.
"""
# missing:
- # setting __doc__ if not defined and tp_doc defined
# inheriting tp_as_* slots
# unsupported:
# tp_mro, tp_subclasses
diff --git a/pypy/module/micronumpy/__init__.py b/pypy/module/micronumpy/__init__.py
--- a/pypy/module/micronumpy/__init__.py
+++ b/pypy/module/micronumpy/__init__.py
@@ -29,6 +29,7 @@
'flatiter': 'interp_numarray.W_FlatIterator',
'isna': 'interp_numarray.isna',
'concatenate': 'interp_numarray.concatenate',
+ 'repeat': 'interp_numarray.repeat',
'set_string_function': 'appbridge.set_string_function',
@@ -99,9 +100,12 @@
("exp2", "exp2"),
("expm1", "expm1"),
("fabs", "fabs"),
+ ("fmax", "fmax"),
+ ("fmin", "fmin"),
("fmod", "fmod"),
("floor", "floor"),
("ceil", "ceil"),
+ ("trunc", "trunc"),
("greater", "greater"),
("greater_equal", "greater_equal"),
("less", "less"),
@@ -122,12 +126,16 @@
("sinh", "sinh"),
("subtract", "subtract"),
('sqrt', 'sqrt'),
+ ('square', 'square'),
("tan", "tan"),
("tanh", "tanh"),
('bitwise_and', 'bitwise_and'),
('bitwise_or', 'bitwise_or'),
('bitwise_xor', 'bitwise_xor'),
('bitwise_not', 'invert'),
+ ('left_shift', 'left_shift'),
+ ('right_shift', 'right_shift'),
+ ('invert', 'invert'),
('isnan', 'isnan'),
('isinf', 'isinf'),
('isneginf', 'isneginf'),
diff --git a/pypy/module/micronumpy/interp_numarray.py b/pypy/module/micronumpy/interp_numarray.py
--- a/pypy/module/micronumpy/interp_numarray.py
+++ b/pypy/module/micronumpy/interp_numarray.py
@@ -673,6 +673,10 @@
def compute_first_step(self, sig, frame):
pass
+ @unwrap_spec(repeats=int)
+ def descr_repeat(self, space, repeats, w_axis=None):
+ return repeat(space, self, repeats, w_axis)
+
def convert_to_array(space, w_obj):
if isinstance(w_obj, BaseArray):
return w_obj
@@ -1261,6 +1265,31 @@
return convert_to_array(space, w_obj2).descr_dot(space, w_arr)
return w_arr.descr_dot(space, w_obj2)
+ at unwrap_spec(repeats=int)
+def repeat(space, w_arr, repeats, w_axis=None):
+ arr = convert_to_array(space, w_arr)
+ if space.is_w(w_axis, space.w_None):
+ arr = arr.descr_flatten(space).get_concrete()
+ orig_size = arr.shape[0]
+ shape = [arr.shape[0] * repeats]
+ res = W_NDimArray(shape, arr.find_dtype())
+ for i in range(repeats):
+ Chunks([Chunk(i, shape[0] - repeats + i, repeats,
+ orig_size)]).apply(res).setslice(space, arr)
+ else:
+ arr = arr.get_concrete()
+ axis = space.int_w(w_axis)
+ shape = arr.shape[:]
+ chunks = [Chunk(0, i, 1, i) for i in shape]
+ orig_size = shape[axis]
+ shape[axis] *= repeats
+ res = W_NDimArray(shape, arr.find_dtype())
+ for i in range(repeats):
+ chunks[axis] = Chunk(i, shape[axis] - repeats + i, repeats,
+ orig_size)
+ Chunks(chunks).apply(res).setslice(space, arr)
+ return res
+
@unwrap_spec(axis=int)
def concatenate(space, w_args, axis=0):
args_w = space.listview(w_args)
@@ -1386,6 +1415,7 @@
tolist = interp2app(BaseArray.descr_tolist),
take = interp2app(BaseArray.descr_take),
compress = interp2app(BaseArray.descr_compress),
+ repeat = interp2app(BaseArray.descr_repeat),
)
diff --git a/pypy/module/micronumpy/interp_ufuncs.py b/pypy/module/micronumpy/interp_ufuncs.py
--- a/pypy/module/micronumpy/interp_ufuncs.py
+++ b/pypy/module/micronumpy/interp_ufuncs.py
@@ -3,9 +3,11 @@
from pypy.interpreter.gateway import interp2app, unwrap_spec, NoneNotWrapped
from pypy.interpreter.typedef import TypeDef, GetSetProperty, interp_attrproperty
from pypy.module.micronumpy import interp_boxes, interp_dtype, support, loop
+from pypy.rlib import jit
from pypy.rlib.rarithmetic import LONG_BIT
from pypy.tool.sourcetools import func_with_new_name
+
class W_Ufunc(Wrappable):
_attrs_ = ["name", "promote_to_float", "promote_bools", "identity"]
_immutable_fields_ = ["promote_to_float", "promote_bools", "name"]
@@ -28,7 +30,7 @@
return self.identity
def descr_call(self, space, __args__):
- from interp_numarray import BaseArray
+ from interp_numarray import BaseArray
args_w, kwds_w = __args__.unpack()
# it occurs to me that we don't support any datatypes that
# require casting, change it later when we do
@@ -179,7 +181,7 @@
elif out.shape != shape:
raise operationerrfmt(space.w_ValueError,
'output parameter shape mismatch, expecting [%s]' +
- ' , got [%s]',
+ ' , got [%s]',
",".join([str(x) for x in shape]),
",".join([str(x) for x in out.shape]),
)
@@ -204,7 +206,7 @@
else:
arr = ReduceArray(self.func, self.name, self.identity, obj, dtype)
val = loop.compute(arr)
- return val
+ return val
def do_axis_reduce(self, obj, dtype, axis, result):
from pypy.module.micronumpy.interp_numarray import AxisReduce
@@ -253,7 +255,7 @@
if isinstance(w_obj, Scalar):
arr = self.func(calc_dtype, w_obj.value.convert_to(calc_dtype))
if isinstance(out,Scalar):
- out.value=arr
+ out.value = arr
elif isinstance(out, BaseArray):
out.fill(space, arr)
else:
@@ -265,7 +267,7 @@
if not broadcast_shape or broadcast_shape != out.shape:
raise operationerrfmt(space.w_ValueError,
'output parameter shape mismatch, could not broadcast [%s]' +
- ' to [%s]',
+ ' to [%s]',
",".join([str(x) for x in w_obj.shape]),
",".join([str(x) for x in out.shape]),
)
@@ -292,10 +294,11 @@
self.func = func
self.comparison_func = comparison_func
+ @jit.unroll_safe
def call(self, space, args_w):
from pypy.module.micronumpy.interp_numarray import (Call2,
convert_to_array, Scalar, shape_agreement, BaseArray)
- if len(args_w)>2:
+ if len(args_w) > 2:
[w_lhs, w_rhs, w_out] = args_w
else:
[w_lhs, w_rhs] = args_w
@@ -326,7 +329,7 @@
w_rhs.value.convert_to(calc_dtype)
)
if isinstance(out,Scalar):
- out.value=arr
+ out.value = arr
elif isinstance(out, BaseArray):
out.fill(space, arr)
else:
@@ -337,7 +340,7 @@
if out and out.shape != shape_agreement(space, new_shape, out.shape):
raise operationerrfmt(space.w_ValueError,
'output parameter shape mismatch, could not broadcast [%s]' +
- ' to [%s]',
+ ' to [%s]',
",".join([str(x) for x in new_shape]),
",".join([str(x) for x in out.shape]),
)
@@ -347,7 +350,6 @@
w_lhs.add_invalidates(w_res)
w_rhs.add_invalidates(w_res)
if out:
- #out.add_invalidates(w_res) #causes a recursion loop
w_res.get_concrete()
return w_res
@@ -539,14 +541,18 @@
("reciprocal", "reciprocal", 1),
("fabs", "fabs", 1, {"promote_to_float": True}),
+ ("fmax", "fmax", 2, {"promote_to_float": True}),
+ ("fmin", "fmin", 2, {"promote_to_float": True}),
("fmod", "fmod", 2, {"promote_to_float": True}),
("floor", "floor", 1, {"promote_to_float": True}),
("ceil", "ceil", 1, {"promote_to_float": True}),
+ ("trunc", "trunc", 1, {"promote_to_float": True}),
("exp", "exp", 1, {"promote_to_float": True}),
("exp2", "exp2", 1, {"promote_to_float": True}),
("expm1", "expm1", 1, {"promote_to_float": True}),
('sqrt', 'sqrt', 1, {'promote_to_float': True}),
+ ('square', 'square', 1, {'promote_to_float': True}),
("sin", "sin", 1, {"promote_to_float": True}),
("cos", "cos", 1, {"promote_to_float": True}),
diff --git a/pypy/module/micronumpy/signature.py b/pypy/module/micronumpy/signature.py
--- a/pypy/module/micronumpy/signature.py
+++ b/pypy/module/micronumpy/signature.py
@@ -107,6 +107,10 @@
arr.compute_first_step(self, f)
return f
+ def debug_repr(self):
+ # should be overridden, but in case it isn't, provide a default
+ return str(self)
+
class ConcreteSignature(Signature):
_immutable_fields_ = ['dtype']
@@ -207,7 +211,7 @@
def _create_iter(self, iterlist, arraylist, arr, transforms):
from pypy.module.micronumpy.interp_numarray import VirtualSlice
assert isinstance(arr, VirtualSlice)
- transforms = transforms + [ViewTransform(arr.chunks)]
+ transforms = [ViewTransform(arr.chunks)] + transforms
self.child._create_iter(iterlist, arraylist, arr.child, transforms)
def eval(self, frame, arr):
@@ -215,6 +219,9 @@
assert isinstance(arr, VirtualSlice)
return self.child.eval(frame, arr.child)
+ def debug_repr(self):
+ return 'VirtualSlice(%s)' % self.child.debug_repr()
+
class Call1(Signature):
_immutable_fields_ = ['unfunc', 'name', 'child', 'res', 'dtype']
@@ -270,7 +277,7 @@
from pypy.module.micronumpy.interp_numarray import Call1
assert isinstance(arr, Call1)
- vtransforms = transforms + [BroadcastTransform(arr.values.shape)]
+ vtransforms = [BroadcastTransform(arr.values.shape)] + transforms
self.child._create_iter(iterlist, arraylist, arr.values, vtransforms)
self.res._create_iter(iterlist, arraylist, arr.res, transforms)
@@ -348,7 +355,7 @@
from pypy.module.micronumpy.interp_numarray import ResultArray
assert isinstance(arr, ResultArray)
- rtransforms = transforms + [BroadcastTransform(arr.left.shape)]
+ rtransforms = [BroadcastTransform(arr.left.shape)] + transforms
self.left._create_iter(iterlist, arraylist, arr.left, transforms)
self.right._create_iter(iterlist, arraylist, arr.right, rtransforms)
@@ -375,7 +382,7 @@
from pypy.module.micronumpy.interp_numarray import Call2
assert isinstance(arr, Call2)
- ltransforms = transforms + [BroadcastTransform(arr.shape)]
+ ltransforms = [BroadcastTransform(arr.shape)] + transforms
self.left._create_iter(iterlist, arraylist, arr.left, ltransforms)
self.right._create_iter(iterlist, arraylist, arr.right, transforms)
@@ -388,7 +395,7 @@
from pypy.module.micronumpy.interp_numarray import Call2
assert isinstance(arr, Call2)
- rtransforms = transforms + [BroadcastTransform(arr.shape)]
+ rtransforms = [BroadcastTransform(arr.shape)] + transforms
self.left._create_iter(iterlist, arraylist, arr.left, transforms)
self.right._create_iter(iterlist, arraylist, arr.right, rtransforms)
@@ -401,8 +408,8 @@
from pypy.module.micronumpy.interp_numarray import Call2
assert isinstance(arr, Call2)
- rtransforms = transforms + [BroadcastTransform(arr.shape)]
- ltransforms = transforms + [BroadcastTransform(arr.shape)]
+ rtransforms = [BroadcastTransform(arr.shape)] + transforms
+ ltransforms = [BroadcastTransform(arr.shape)] + transforms
self.left._create_iter(iterlist, arraylist, arr.left, ltransforms)
self.right._create_iter(iterlist, arraylist, arr.right, rtransforms)
@@ -424,7 +431,7 @@
frame.cur_value = self.binfunc(self.calc_dtype, frame.cur_value, rval)
def debug_repr(self):
- return 'ReduceSig(%s)' % (self.name, self.right.debug_repr())
+ return 'ReduceSig(%s, %s)' % (self.name, self.right.debug_repr())
class SliceloopSignature(Call2):
def eval(self, frame, arr):
@@ -448,7 +455,7 @@
from pypy.module.micronumpy.interp_numarray import SliceArray
assert isinstance(arr, SliceArray)
- rtransforms = transforms + [BroadcastTransform(arr.shape)]
+ rtransforms = [BroadcastTransform(arr.shape)] + transforms
self.left._create_iter(iterlist, arraylist, arr.left, transforms)
self.right._create_iter(iterlist, arraylist, arr.right, rtransforms)
diff --git a/pypy/module/micronumpy/strides.py b/pypy/module/micronumpy/strides.py
--- a/pypy/module/micronumpy/strides.py
+++ b/pypy/module/micronumpy/strides.py
@@ -1,6 +1,7 @@
from pypy.rlib import jit
from pypy.interpreter.error import OperationError
+ at jit.look_inside_iff(lambda chunks: jit.isconstant(len(chunks)))
def enumerate_chunks(chunks):
result = []
i = -1
@@ -85,9 +86,9 @@
space.isinstance_w(w_item_or_slice, space.w_slice)):
raise OperationError(space.w_IndexError,
space.wrap('unsupported iterator index'))
-
+
start, stop, step, lngth = space.decode_index4(w_item_or_slice, size)
-
+
coords = [0] * len(shape)
i = start
if order == 'C':
diff --git a/pypy/module/micronumpy/support.py b/pypy/module/micronumpy/support.py
--- a/pypy/module/micronumpy/support.py
+++ b/pypy/module/micronumpy/support.py
@@ -1,5 +1,9 @@
+from pypy.rlib import jit
+
+
+ at jit.unroll_safe
def product(s):
i = 1
for x in s:
i *= x
- return i
\ No newline at end of file
+ return i
diff --git a/pypy/module/micronumpy/test/test_numarray.py b/pypy/module/micronumpy/test/test_numarray.py
--- a/pypy/module/micronumpy/test/test_numarray.py
+++ b/pypy/module/micronumpy/test/test_numarray.py
@@ -395,11 +395,19 @@
assert a[3] == 0.
def test_newaxis(self):
- from _numpypy import array
+ import math
+ from _numpypy import array, cos, zeros
from numpypy.core.numeric import newaxis
a = array(range(5))
b = array([range(5)])
assert (a[newaxis] == b).all()
+ a = array(range(3))
+ b = array([1, 3])
+ expected = zeros((3, 2))
+ for x in range(3):
+ for y in range(2):
+ expected[x, y] = math.cos(a[x]) * math.cos(b[y])
+ assert ((cos(a)[:,newaxis] * cos(b).T) == expected).all()
def test_newaxis_slice(self):
from _numpypy import array
@@ -1338,6 +1346,10 @@
dims_disagree = raises(ValueError, concatenate, (a1, b1), axis=0)
assert str(dims_disagree.value) == \
"array dimensions must agree except for axis being concatenated"
+ a = array([1, 2, 3, 4, 5, 6])
+ a = (a + a)[::2]
+ b = concatenate((a[:3], a[-3:]))
+ assert (b == [2, 6, 10, 2, 6, 10]).all()
def test_std(self):
from _numpypy import array
@@ -1387,6 +1399,16 @@
assert (ones(1) + ones(1)).nbytes == 8
assert array(3.0).nbytes == 8
+ def test_repeat(self):
+ from _numpypy import repeat, array
+ assert (repeat([[1, 2], [3, 4]], 3) == [1, 1, 1, 2, 2, 2,
+ 3, 3, 3, 4, 4, 4]).all()
+ assert (repeat([[1, 2], [3, 4]], 2, axis=0) == [[1, 2], [1, 2], [3, 4],
+ [3, 4]]).all()
+ assert (repeat([[1, 2], [3, 4]], 2, axis=1) == [[1, 1, 2, 2], [3, 3,
+ 4, 4]]).all()
+ assert (array([1, 2]).repeat(2) == array([1, 1, 2, 2])).all()
+
class AppTestMultiDim(BaseNumpyAppTest):
def test_init(self):
diff --git a/pypy/module/micronumpy/test/test_ufuncs.py b/pypy/module/micronumpy/test/test_ufuncs.py
--- a/pypy/module/micronumpy/test/test_ufuncs.py
+++ b/pypy/module/micronumpy/test/test_ufuncs.py
@@ -135,6 +135,38 @@
assert fabs(float('-inf')) == float('inf')
assert isnan(fabs(float('nan')))
+ def test_fmax(self):
+ from _numpypy import fmax
+ import math
+
+ nnan, nan, inf, ninf = float('-nan'), float('nan'), float('inf'), float('-inf')
+
+ a = [ninf, -5, 0, 5, inf]
+ assert (fmax(a, [ninf]*5) == a).all()
+ assert (fmax(a, [inf]*5) == [inf]*5).all()
+ assert (fmax(a, [1]*5) == [1, 1, 1, 5, inf]).all()
+ assert math.isnan(fmax(nan, 0))
+ assert math.isnan(fmax(0, nan))
+ assert math.isnan(fmax(nan, nan))
+ # The numpy docs specify that the FIRST NaN should be used if both are NaN
+ assert math.copysign(1.0, fmax(nnan, nan)) == -1.0
+
+ def test_fmin(self):
+ from _numpypy import fmin
+ import math
+
+ nnan, nan, inf, ninf = float('-nan'), float('nan'), float('inf'), float('-inf')
+
+ a = [ninf, -5, 0, 5, inf]
+ assert (fmin(a, [ninf]*5) == [ninf]*5).all()
+ assert (fmin(a, [inf]*5) == a).all()
+ assert (fmin(a, [1]*5) == [ninf, -5, 0, 1, 1]).all()
+ assert math.isnan(fmin(nan, 0))
+ assert math.isnan(fmin(0, nan))
+ assert math.isnan(fmin(nan, nan))
+ # The numpy docs specify that the FIRST NaN should be used if both are NaN
+ assert math.copysign(1.0, fmin(nnan, nan)) == -1.0
+
def test_fmod(self):
from _numpypy import fmod
import math
@@ -221,24 +253,17 @@
for i in range(3):
assert c[i] == a[i] - b[i]
- def test_floorceil(self):
- from _numpypy import array, floor, ceil
+ def test_floorceiltrunc(self):
+ from _numpypy import array, floor, ceil, trunc
import math
- reference = [-2.0, -2.0, -1.0, 0.0, 1.0, 1.0, 0]
- a = array([-1.4, -1.5, -1.0, 0.0, 1.0, 1.4, 0.5])
- b = floor(a)
- for i in range(5):
- assert b[i] == reference[i]
- reference = [-1.0, -1.0, -1.0, 0.0, 1.0, 2.0, 1.0]
- a = array([-1.4, -1.5, -1.0, 0.0, 1.0, 1.4, 0.5])
- b = ceil(a)
- assert (reference == b).all()
- inf = float("inf")
- data = [1.5, 2.9999, -1.999, inf]
- results = [math.floor(x) for x in data]
- assert (floor(data) == results).all()
- results = [math.ceil(x) for x in data]
- assert (ceil(data) == results).all()
+ ninf, inf = float("-inf"), float("inf")
+ a = array([ninf, -1.4, -1.5, -1.0, 0.0, 1.0, 1.4, 0.5, inf])
+ assert ([ninf, -2.0, -2.0, -1.0, 0.0, 1.0, 1.0, 0.0, inf] == floor(a)).all()
+ assert ([ninf, -1.0, -1.0, -1.0, 0.0, 1.0, 2.0, 1.0, inf] == ceil(a)).all()
+ assert ([ninf, -1.0, -1.0, -1.0, 0.0, 1.0, 1.0, 0.0, inf] == trunc(a)).all()
+ assert all([math.isnan(f(float("nan"))) for f in floor, ceil, trunc])
+ assert all([math.copysign(1, f(float("nan"))) == 1 for f in floor, ceil, trunc])
+ assert all([math.copysign(1, f(float("-nan"))) == -1 for f in floor, ceil, trunc])
def test_copysign(self):
from _numpypy import array, copysign
@@ -455,6 +480,19 @@
assert math.isnan(sqrt(-1))
assert math.isnan(sqrt(nan))
+ def test_square(self):
+ import math
+ from _numpypy import square
+
+ nan, inf, ninf = float("nan"), float("inf"), float("-inf")
+
+ assert math.isnan(square(nan))
+ assert math.isinf(square(inf))
+ assert math.isinf(square(ninf))
+ assert square(ninf) > 0
+ assert [square(x) for x in range(-5, 5)] == [x*x for x in range(-5, 5)]
+ assert math.isinf(square(1e300))
+
def test_radians(self):
import math
from _numpypy import radians, array
@@ -546,10 +584,17 @@
raises(TypeError, 'array([1.0]) & 1')
def test_unary_bitops(self):
- from _numpypy import bitwise_not, array
+ from _numpypy import bitwise_not, invert, array
a = array([1, 2, 3, 4])
assert (~a == [-2, -3, -4, -5]).all()
assert (bitwise_not(a) == ~a).all()
+ assert (invert(a) == ~a).all()
+
+ def test_shift(self):
+ from _numpypy import left_shift, right_shift
+
+ assert (left_shift([5, 1], [2, 13]) == [20, 2**13]).all()
+ assert (right_shift(10, range(5)) == [10, 5, 2, 1, 0]).all()
def test_comparisons(self):
import operator
diff --git a/pypy/module/micronumpy/types.py b/pypy/module/micronumpy/types.py
--- a/pypy/module/micronumpy/types.py
+++ b/pypy/module/micronumpy/types.py
@@ -631,6 +631,22 @@
return math.fabs(v)
@simple_binary_op
+ def fmax(self, v1, v2):
+ if math.isnan(v1):
+ return v1
+ elif math.isnan(v2):
+ return v2
+ return max(v1, v2)
+
+ @simple_binary_op
+ def fmin(self, v1, v2):
+ if math.isnan(v1):
+ return v1
+ elif math.isnan(v2):
+ return v2
+ return min(v1, v2)
+
+ @simple_binary_op
def fmod(self, v1, v2):
try:
return math.fmod(v1, v2)
@@ -652,6 +668,13 @@
return math.ceil(v)
@simple_unary_op
+ def trunc(self, v):
+ if v < 0:
+ return math.ceil(v)
+ else:
+ return math.floor(v)
+
+ @simple_unary_op
def exp(self, v):
try:
return math.exp(v)
@@ -741,6 +764,10 @@
except ValueError:
return rfloat.NAN
+ @simple_unary_op
+ def square(self, v):
+ return v*v
+
@raw_unary_op
def isnan(self, v):
return rfloat.isnan(v)
diff --git a/pypy/module/pyexpat/test/__init__.py b/pypy/module/pyexpat/test/__init__.py
new file mode 100644
diff --git a/pypy/module/pypyjit/test_pypy_c/test_misc.py b/pypy/module/pypyjit/test_pypy_c/test_misc.py
--- a/pypy/module/pypyjit/test_pypy_c/test_misc.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_misc.py
@@ -212,7 +212,7 @@
i19 = int_add(i12, 1)
setfield_gc(p9, i19, descr=<FieldS .*W_AbstractSeqIterObject.inst_index .*>)
guard_nonnull_class(p17, 146982464, descr=...)
- i21 = getfield_gc(p17, descr=<FieldS .*W_ArrayTypei.inst_len .*>)
+ i21 = getfield_gc(p17, descr=<FieldS .*W_Array.*.inst_len .*>)
i23 = int_lt(0, i21)
guard_true(i23, descr=...)
i24 = getfield_gc(p17, descr=<FieldU .*W_ArrayTypei.inst_buffer .*>)
@@ -351,3 +351,23 @@
# the following assertion fails if the loop was cancelled due
# to "abort: vable escape"
assert len(log.loops_by_id("eval")) == 1
+
+ def test_sys_exc_info(self):
+ def main():
+ i = 1
+ lst = [i]
+ while i < 1000:
+ try:
+ return lst[i]
+ except:
+ e = sys.exc_info()[1] # ID: exc_info
+ if not isinstance(e, IndexError):
+ raise
+ i += 1
+ return 42
+
+ log = self.run(main)
+ assert log.result == 42
+ # the following assertion fails if the loop was cancelled due
+ # to "abort: vable escape"
+ assert len(log.loops_by_id("exc_info")) == 1
diff --git a/pypy/module/pypyjit/test_pypy_c/test_string.py b/pypy/module/pypyjit/test_pypy_c/test_string.py
--- a/pypy/module/pypyjit/test_pypy_c/test_string.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_string.py
@@ -198,3 +198,37 @@
i49 = call(ConstClass(_ll_2_str_eq_nonnull__rpy_stringPtr_rpy_stringPtr), p35, ConstPtr(ptr48), descr=<Calli [48] rr EF=0 OS=28>)
guard_value(i49, 1, descr=...)
''')
+
+ def test_remove_duplicate_method_calls(self):
+ def main(n):
+ lst = []
+ for i in range(n):
+ s = 'Hello %d' % i
+ t = s.lower() # ID: callone
+ u = s.lower() # ID: calltwo
+ lst.append(t)
+ lst.append(u)
+ return len(','.join(lst))
+ log = self.run(main, [1000])
+ assert log.result == main(1000)
+ loops = log.loops_by_filename(self.filepath)
+ loop, = loops
+ loop.match_by_id('callone', '''
+ p114 = call(ConstClass(ll_lower__rpy_stringPtr), p113, descr=<Callr . r EF=3>)
+ guard_no_exception(descr=...)
+ ''')
+ loop.match_by_id('calltwo', '') # nothing
+
+ def test_move_method_call_out_of_loop(self):
+ def main(n):
+ lst = []
+ s = 'Hello %d' % n
+ for i in range(n):
+ t = s.lower() # ID: callone
+ lst.append(t)
+ return len(','.join(lst))
+ log = self.run(main, [1000])
+ assert log.result == main(1000)
+ loops = log.loops_by_filename(self.filepath)
+ loop, = loops
+ loop.match_by_id('callone', '') # nothing
diff --git a/pypy/module/sys/test/test_sysmodule.py b/pypy/module/sys/test/test_sysmodule.py
--- a/pypy/module/sys/test/test_sysmodule.py
+++ b/pypy/module/sys/test/test_sysmodule.py
@@ -595,3 +595,124 @@
assert len(frames) == 1
_, other_frame = frames.popitem()
assert other_frame.f_code.co_name in ('other_thread', '?')
+
+
+class AppTestSysExcInfoDirect:
+
+ def setup_method(self, meth):
+ self.checking = not option.runappdirect
+ if self.checking:
+ self.seen = []
+ from pypy.module.sys import vm
+ def exc_info_with_tb(*args):
+ self.seen.append("n") # not optimized
+ return self.old[0](*args)
+ def exc_info_without_tb(*args):
+ self.seen.append("y") # optimized
+ return self.old[1](*args)
+ self.old = [vm.exc_info_with_tb, vm.exc_info_without_tb]
+ vm.exc_info_with_tb = exc_info_with_tb
+ vm.exc_info_without_tb = exc_info_without_tb
+ #
+ from pypy.rlib import jit
+ self.old2 = [jit.we_are_jitted]
+ jit.we_are_jitted = lambda: True
+
+ def teardown_method(self, meth):
+ if self.checking:
+ from pypy.module.sys import vm
+ from pypy.rlib import jit
+ vm.exc_info_with_tb = self.old[0]
+ vm.exc_info_without_tb = self.old[1]
+ jit.we_are_jitted = self.old2[0]
+ #
+ assert ''.join(self.seen) == meth.expected
+
+ def test_returns_none(self):
+ import sys
+ assert sys.exc_info() == (None, None, None)
+ assert sys.exc_info()[0] is None
+ assert sys.exc_info()[1] is None
+ assert sys.exc_info()[2] is None
+ assert sys.exc_info()[:2] == (None, None)
+ assert sys.exc_info()[:3] == (None, None, None)
+ assert sys.exc_info()[0:2] == (None, None)
+ assert sys.exc_info()[2:4] == (None,)
+ test_returns_none.expected = 'nnnnnnnn'
+
+ def test_returns_subscr(self):
+ import sys
+ e = KeyError("boom")
+ try:
+ raise e
+ except:
+ assert sys.exc_info()[0] is KeyError # y
+ assert sys.exc_info()[1] is e # y
+ assert sys.exc_info()[2] is not None # n
+ assert sys.exc_info()[-3] is KeyError # y
+ assert sys.exc_info()[-2] is e # y
+ assert sys.exc_info()[-1] is not None # n
+ test_returns_subscr.expected = 'yynyyn'
+
+ def test_returns_slice_2(self):
+ import sys
+ e = KeyError("boom")
+ try:
+ raise e
+ except:
+ foo = sys.exc_info() # n
+ assert sys.exc_info()[:0] == () # y
+ assert sys.exc_info()[:1] == foo[:1] # y
+ assert sys.exc_info()[:2] == foo[:2] # y
+ assert sys.exc_info()[:3] == foo # n
+ assert sys.exc_info()[:4] == foo # n
+ assert sys.exc_info()[:-1] == foo[:2] # y
+ assert sys.exc_info()[:-2] == foo[:1] # y
+ assert sys.exc_info()[:-3] == () # y
+ test_returns_slice_2.expected = 'nyyynnyyy'
+
+ def test_returns_slice_3(self):
+ import sys
+ e = KeyError("boom")
+ try:
+ raise e
+ except:
+ foo = sys.exc_info() # n
+ assert sys.exc_info()[2:2] == () # y
+ assert sys.exc_info()[0:1] == foo[:1] # y
+ assert sys.exc_info()[1:2] == foo[1:2] # y
+ assert sys.exc_info()[0:3] == foo # n
+ assert sys.exc_info()[2:4] == foo[2:] # n
+ assert sys.exc_info()[0:-1] == foo[:2] # y
+ assert sys.exc_info()[0:-2] == foo[:1] # y
+ assert sys.exc_info()[5:-3] == () # y
+ test_returns_slice_3.expected = 'nyyynnyyy'
+
+ def test_strange_invocation(self):
+ import sys
+ e = KeyError("boom")
+ try:
+ raise e
+ except:
+ a = []; k = {}
+ assert sys.exc_info(*a)[:0] == ()
+ assert sys.exc_info(**k)[:0] == ()
+ test_strange_invocation.expected = 'nn'
+
+ def test_call_in_subfunction(self):
+ import sys
+ def g():
+ # this case is not optimized, because we need to search the
+ # frame chain. it's probably not worth the complications
+ return sys.exc_info()[1]
+ e = KeyError("boom")
+ try:
+ raise e
+ except:
+ assert g() is e
+ test_call_in_subfunction.expected = 'n'
+
+
+class AppTestSysExcInfoDirectCallMethod(AppTestSysExcInfoDirect):
+ def setup_class(cls):
+ cls.space = gettestobjspace(**{"objspace.opcodes.CALL_METHOD": True})
diff --git a/pypy/module/sys/version.py b/pypy/module/sys/version.py
--- a/pypy/module/sys/version.py
+++ b/pypy/module/sys/version.py
@@ -10,7 +10,7 @@
CPYTHON_VERSION = (2, 7, 2, "final", 42) #XXX # sync patchlevel.h
CPYTHON_API_VERSION = 1013 #XXX # sync with include/modsupport.h
-PYPY_VERSION = (1, 8, 1, "dev", 0) #XXX # sync patchlevel.h
+PYPY_VERSION = (1, 9, 1, "dev", 0) #XXX # sync patchlevel.h
if platform.name == 'msvc':
COMPILER_INFO = 'MSC v.%d 32 bit' % (platform.version * 10 + 600)
diff --git a/pypy/module/sys/vm.py b/pypy/module/sys/vm.py
--- a/pypy/module/sys/vm.py
+++ b/pypy/module/sys/vm.py
@@ -89,6 +89,9 @@
"""Return the (type, value, traceback) of the most recent exception
caught by an except clause in the current stack frame or in an older stack
frame."""
+ return exc_info_with_tb(space) # indirection for the tests
+
+def exc_info_with_tb(space):
operror = space.getexecutioncontext().sys_exc_info()
if operror is None:
return space.newtuple([space.w_None,space.w_None,space.w_None])
@@ -96,6 +99,59 @@
return space.newtuple([operror.w_type, operror.get_w_value(space),
space.wrap(operror.get_traceback())])
+def exc_info_without_tb(space, frame):
+ operror = frame.last_exception
+ return space.newtuple([operror.w_type, operror.get_w_value(space),
+ space.w_None])
+
+def exc_info_direct(space, frame):
+ from pypy.tool import stdlib_opcode
+ # In order to make the JIT happy, we try to return (exc, val, None)
+ # instead of (exc, val, tb). We can do that only if we recognize
+ # the following pattern in the bytecode:
+ # CALL_FUNCTION/CALL_METHOD <-- invoking me
+ # LOAD_CONST 0, 1, -2 or -3
+ # BINARY_SUBSCR
+ # or:
+ # CALL_FUNCTION/CALL_METHOD
+ # LOAD_CONST <=2
+ # SLICE_2
+ # or:
+ # CALL_FUNCTION/CALL_METHOD
+ # LOAD_CONST any integer
+ # LOAD_CONST <=2
+ # SLICE_3
+ need_all_three_args = True
+ co = frame.getcode().co_code
+ p = frame.last_instr
+ if (ord(co[p]) == stdlib_opcode.CALL_FUNCTION or
+ ord(co[p]) == stdlib_opcode.CALL_METHOD):
+ if ord(co[p+3]) == stdlib_opcode.LOAD_CONST:
+ lo = ord(co[p+4])
+ hi = ord(co[p+5])
+ w_constant = frame.getconstant_w((hi * 256) | lo)
+ if space.isinstance_w(w_constant, space.w_int):
+ constant = space.int_w(w_constant)
+ if ord(co[p+6]) == stdlib_opcode.BINARY_SUBSCR:
+ if -3 <= constant <= 1 and constant != -1:
+ need_all_three_args = False
+ elif ord(co[p+6]) == stdlib_opcode.SLICE+2:
+ if constant <= 2:
+ need_all_three_args = False
+ elif (ord(co[p+6]) == stdlib_opcode.LOAD_CONST and
+ ord(co[p+9]) == stdlib_opcode.SLICE+3):
+ lo = ord(co[p+7])
+ hi = ord(co[p+8])
+ w_constant = frame.getconstant_w((hi * 256) | lo)
+ if space.isinstance_w(w_constant, space.w_int):
+ if space.int_w(w_constant) <= 2:
+ need_all_three_args = False
+ #
+ if need_all_three_args or frame.last_exception is None or frame.hide():
+ return exc_info_with_tb(space)
+ else:
+ return exc_info_without_tb(space, frame)
+
def exc_clear(space):
"""Clear global information on the current exception. Subsequent calls
to exc_info() will return (None,None,None) until another exception is
diff --git a/pypy/module/test_lib_pypy/test_datetime.py b/pypy/module/test_lib_pypy/test_datetime.py
--- a/pypy/module/test_lib_pypy/test_datetime.py
+++ b/pypy/module/test_lib_pypy/test_datetime.py
@@ -44,3 +44,9 @@
assert type(dt.microsecond) is int
copy.copy(dt)
+
+def test_radd():
+ class X(object):
+ def __radd__(self, other):
+ return "radd"
+ assert datetime.date(10, 10, 10) + X() == "radd"
diff --git a/pypy/module/thread/test/test_ll_thread.py b/pypy/module/thread/test/test_ll_thread.py
--- a/pypy/module/thread/test/test_ll_thread.py
+++ b/pypy/module/thread/test/test_ll_thread.py
@@ -66,7 +66,6 @@
def test_gc_locking(self):
import time
from pypy.rlib.objectmodel import invoke_around_extcall
- from pypy.rlib.objectmodel import we_are_translated
from pypy.rlib.debug import ll_assert
class State:
@@ -129,8 +128,6 @@
state.finished = 0
# the next line installs before_extcall() and after_extcall()
# to be called automatically around external function calls.
- # When not translated it does not work around time.sleep(),
- # so we have to call them manually for this test.
invoke_around_extcall(before_extcall, after_extcall)
g(10, 1)
@@ -142,13 +139,9 @@
willing_to_wait_more -= 1
done = len(state.answers) == expected
- if not we_are_translated(): before_extcall()
time.sleep(0.01)
- if not we_are_translated(): after_extcall()
- if not we_are_translated(): before_extcall()
time.sleep(0.1)
- if not we_are_translated(): after_extcall()
return len(state.answers)
@@ -160,12 +153,11 @@
answers = fn()
assert answers == expected
-class TestRunDirectly(AbstractThreadTests):
- def getcompiled(self, f, argtypes):
- return f
-
- def test_start_new_thread(self):
- py.test.skip("deadlocks occasionally -- why???")
+#class TestRunDirectly(AbstractThreadTests):
+# def getcompiled(self, f, argtypes):
+# return f
+# These are disabled because they crash occasionally for bad reasons
+# related to the fact that ll2ctypes is not at all thread-safe
class TestUsingBoehm(AbstractThreadTests):
gcpolicy = 'boehm'
diff --git a/pypy/objspace/descroperation.py b/pypy/objspace/descroperation.py
--- a/pypy/objspace/descroperation.py
+++ b/pypy/objspace/descroperation.py
@@ -43,6 +43,13 @@
return w_eq
type_eq._annspecialcase_ = 'specialize:memo'
+def list_iter(space):
+ "Utility that returns the app-level descriptor list.__iter__."
+ w_src, w_iter = space.lookup_in_type_where(space.w_list,
+ '__iter__')
+ return w_iter
+list_iter._annspecialcase_ = 'specialize:memo'
+
def raiseattrerror(space, w_obj, name, w_descr=None):
w_type = space.type(w_obj)
typename = w_type.getname(space)
diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py
--- a/pypy/objspace/fake/objspace.py
+++ b/pypy/objspace/fake/objspace.py
@@ -86,6 +86,7 @@
return s_None
def specialize_call(self, hop):
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Void, None)
# ____________________________________________________________
@@ -109,7 +110,7 @@
"NOT_RPYTHON"
raise NotImplementedError
- def newdict(self, module=False, instance=False, classofinstance=None,
+ def newdict(self, module=False, instance=False,
strdict=False):
return w_some_obj()
diff --git a/pypy/objspace/flow/flowcontext.py b/pypy/objspace/flow/flowcontext.py
--- a/pypy/objspace/flow/flowcontext.py
+++ b/pypy/objspace/flow/flowcontext.py
@@ -434,6 +434,13 @@
self.lastblock = block
self.pushvalue(w_result)
+ def BUILD_LIST_FROM_ARG(self, _, next_instr):
+ # This opcode was added with pypy-1.8. Here is a simpler
+ # version, enough for annotation.
+ last_val = self.popvalue()
+ self.pushvalue(self.space.newlist([]))
+ self.pushvalue(last_val)
+
# XXX Unimplemented 2.7 opcodes ----------------
# Set literals, set comprehensions
diff --git a/pypy/objspace/std/celldict.py b/pypy/objspace/std/celldict.py
--- a/pypy/objspace/std/celldict.py
+++ b/pypy/objspace/std/celldict.py
@@ -163,7 +163,8 @@
class ModuleDictIteratorImplementation(IteratorImplementation):
def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
+ IteratorImplementation.__init__(
+ self, space, strategy, dictimplementation)
dict_w = strategy.unerase(dictimplementation.dstorage)
self.iterator = dict_w.iteritems()
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -33,8 +33,7 @@
@staticmethod
def allocate_and_init_instance(space, w_type=None, module=False,
- instance=False, classofinstance=None,
- strdict=False):
+ instance=False, strdict=False):
if space.config.objspace.std.withcelldict and module:
from pypy.objspace.std.celldict import ModuleDictStrategy
@@ -247,7 +246,7 @@
return 0
def iter(self, w_dict):
- return EmptyIteratorImplementation(self.space, w_dict)
+ return EmptyIteratorImplementation(self.space, self, w_dict)
def clear(self, w_dict):
return
@@ -263,8 +262,9 @@
# Iterator Implementation base classes
class IteratorImplementation(object):
- def __init__(self, space, implementation):
+ def __init__(self, space, strategy, implementation):
self.space = space
+ self.strategy = strategy
self.dictimplementation = implementation
self.len = implementation.length()
self.pos = 0
@@ -280,7 +280,20 @@
if self.pos < self.len:
result = self.next_entry()
self.pos += 1
- return result
+ if self.strategy is self.dictimplementation.strategy:
+ return result # common case
+ else:
+ # waaa, obscure case: the strategy changed, but not the
+ # length of the dict. The (key, value) pair in 'result'
+ # might be out-of-date. We try to explicitly look up
+ # the key in the dict.
+ w_key = result[0]
+ w_value = self.dictimplementation.getitem(w_key)
+ if w_value is None:
+ self.len = -1 # Make this error state sticky
+ raise OperationError(self.space.w_RuntimeError,
+ self.space.wrap("dictionary changed during iteration"))
+ return (w_key, w_value)
# no more entries
self.dictimplementation = None
return None, None
@@ -489,7 +502,7 @@
_mixin_ = True
def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
+ IteratorImplementation.__init__(self, space, strategy, dictimplementation)
self.iterator = strategy.unerase(dictimplementation.dstorage).iteritems()
def next_entry(self):
@@ -503,7 +516,7 @@
_mixin_ = True
def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
+ IteratorImplementation.__init__(self, space, strategy, dictimplementation)
self.iterator = strategy.unerase(dictimplementation.dstorage).iteritems()
def next_entry(self):
@@ -549,10 +562,7 @@
def listview_int(self, w_dict):
return self.unerase(w_dict.dstorage).keys()
- def w_keys(self, w_dict):
- # XXX there is no space.newlist_int yet
- space = self.space
- return space.call_function(space.w_list, w_dict)
+ # XXX there is no space.newlist_int yet to implement w_keys more efficiently
class IntIteratorImplementation(_WrappedIteratorMixin, IteratorImplementation):
pass
diff --git a/pypy/objspace/std/dictproxyobject.py b/pypy/objspace/std/dictproxyobject.py
--- a/pypy/objspace/std/dictproxyobject.py
+++ b/pypy/objspace/std/dictproxyobject.py
@@ -20,7 +20,17 @@
def getitem(self, w_dict, w_key):
space = self.space
w_lookup_type = space.type(w_key)
- if space.is_w(w_lookup_type, space.w_str):
+ if (space.is_w(w_lookup_type, space.w_str) or # Most common path first
+ space.abstract_issubclass_w(w_lookup_type, space.w_str)):
+ return self.getitem_str(w_dict, space.str_w(w_key))
+ elif space.abstract_issubclass_w(w_lookup_type, space.w_unicode):
+ try:
+ w_key = space.str(w_key)
+ except OperationError, e:
+ if not e.match(space, space.w_UnicodeEncodeError):
+ raise
+ # non-ascii unicode is never equal to a byte string
+ return None
return self.getitem_str(w_dict, space.str_w(w_key))
else:
return None
@@ -98,7 +108,8 @@
class DictProxyIteratorImplementation(IteratorImplementation):
def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
+ IteratorImplementation.__init__(
+ self, space, strategy, dictimplementation)
w_type = strategy.unerase(dictimplementation.dstorage)
self.iterator = w_type.dict_w.iteritems()
diff --git a/pypy/objspace/std/identitydict.py b/pypy/objspace/std/identitydict.py
--- a/pypy/objspace/std/identitydict.py
+++ b/pypy/objspace/std/identitydict.py
@@ -1,5 +1,5 @@
## ----------------------------------------------------------------------------
-## dict strategy (see dict_multiobject.py)
+## dict strategy (see dictmultiobject.py)
from pypy.rlib import rerased
from pypy.rlib.debug import mark_dict_non_null
@@ -80,8 +80,8 @@
def iter(self, w_dict):
return IdentityDictIteratorImplementation(self.space, self, w_dict)
- def keys(self, w_dict):
- return self.unerase(w_dict.dstorage).keys()
+ def w_keys(self, w_dict):
+ return self.space.newlist(self.unerase(w_dict.dstorage).keys())
class IdentityDictIteratorImplementation(_UnwrappedIteratorMixin, IteratorImplementation):
diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py
--- a/pypy/objspace/std/mapdict.py
+++ b/pypy/objspace/std/mapdict.py
@@ -703,7 +703,8 @@
class MapDictIteratorImplementation(IteratorImplementation):
def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
+ IteratorImplementation.__init__(
+ self, space, strategy, dictimplementation)
w_obj = strategy.unerase(dictimplementation.dstorage)
self.w_obj = w_obj
self.orig_map = self.curr_map = w_obj._get_mapdict_map()
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -313,11 +313,10 @@
def newlist_str(self, list_s):
return W_ListObject.newlist_str(self, list_s)
- def newdict(self, module=False, instance=False, classofinstance=None,
+ def newdict(self, module=False, instance=False,
strdict=False):
return W_DictMultiObject.allocate_and_init_instance(
self, module=module, instance=instance,
- classofinstance=classofinstance,
strdict=strdict)
def newset(self):
@@ -439,6 +438,8 @@
t = w_obj.getitems()
elif isinstance(w_obj, W_AbstractTupleObject):
t = w_obj.getitems_copy()
+ elif isinstance(w_obj, W_ListObject) and self._uses_list_iter(w_obj):
+ t = w_obj.getitems()
else:
return ObjSpace.unpackiterable(self, w_obj, expected_length)
if expected_length != -1 and len(t) != expected_length:
@@ -456,6 +457,8 @@
return w_obj.listview_str()
if isinstance(w_obj, W_StringObject):
return w_obj.listview_str()
+ if isinstance(w_obj, W_ListObject) and self._uses_list_iter(w_obj):
+ return w_obj.getitems_str()
return None
def listview_int(self, w_obj):
@@ -465,8 +468,14 @@
return w_obj.listview_int()
if type(w_obj) is W_SetObject or type(w_obj) is W_FrozensetObject:
return w_obj.listview_int()
+ if isinstance(w_obj, W_ListObject) and self._uses_list_iter(w_obj):
+ return w_obj.getitems_int()
return None
+ def _uses_list_iter(self, w_obj):
+ from pypy.objspace.descroperation import list_iter
+ return self.lookup(w_obj, '__iter__') is list_iter(self)
+
def sliceindices(self, w_slice, w_length):
if isinstance(w_slice, W_SliceObject):
a, b, c = w_slice.indices3(self, self.int_w(w_length))
diff --git a/pypy/objspace/std/setobject.py b/pypy/objspace/std/setobject.py
--- a/pypy/objspace/std/setobject.py
+++ b/pypy/objspace/std/setobject.py
@@ -359,7 +359,7 @@
w_set.sstorage = w_other.get_storage_copy()
def iter(self, w_set):
- return EmptyIteratorImplementation(self.space, w_set)
+ return EmptyIteratorImplementation(self.space, self, w_set)
def popitem(self, w_set):
raise OperationError(self.space.w_KeyError,
@@ -784,8 +784,9 @@
d_obj[w_item] = None
class IteratorImplementation(object):
- def __init__(self, space, implementation):
+ def __init__(self, space, strategy, implementation):
self.space = space
+ self.strategy = strategy
self.setimplementation = implementation
self.len = implementation.length()
self.pos = 0
@@ -801,7 +802,17 @@
if self.pos < self.len:
result = self.next_entry()
self.pos += 1
- return result
+ if self.strategy is self.setimplementation.strategy:
+ return result # common case
+ else:
+ # waaa, obscure case: the strategy changed, but not the
+ # length of the set. The 'result' might be out-of-date.
+ # We try to explicitly look it up in the set.
+ if not self.setimplementation.has_key(result):
+ self.len = -1 # Make this error state sticky
+ raise OperationError(self.space.w_RuntimeError,
+ self.space.wrap("dictionary changed during iteration"))
+ return result
# no more entries
self.setimplementation = None
return None
@@ -823,7 +834,7 @@
class StringIteratorImplementation(IteratorImplementation):
def __init__(self, space, strategy, w_set):
- IteratorImplementation.__init__(self, space, w_set)
+ IteratorImplementation.__init__(self, space, strategy, w_set)
d = strategy.unerase(w_set.sstorage)
self.iterator = d.iterkeys()
@@ -835,9 +846,9 @@
class IntegerIteratorImplementation(IteratorImplementation):
#XXX same implementation in dictmultiobject on dictstrategy-branch
- def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
- d = strategy.unerase(dictimplementation.sstorage)
+ def __init__(self, space, strategy, w_set):
+ IteratorImplementation.__init__(self, space, strategy, w_set)
+ d = strategy.unerase(w_set.sstorage)
self.iterator = d.iterkeys()
def next_entry(self):
@@ -848,9 +859,9 @@
return None
class RDictIteratorImplementation(IteratorImplementation):
- def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
- d = strategy.unerase(dictimplementation.sstorage)
+ def __init__(self, space, strategy, w_set):
+ IteratorImplementation.__init__(self, space, strategy, w_set)
+ d = strategy.unerase(w_set.sstorage)
self.iterator = d.iterkeys()
def next_entry(self):
diff --git a/pypy/objspace/std/test/test_dictmultiobject.py b/pypy/objspace/std/test/test_dictmultiobject.py
--- a/pypy/objspace/std/test/test_dictmultiobject.py
+++ b/pypy/objspace/std/test/test_dictmultiobject.py
@@ -804,6 +804,33 @@
assert "IntDictStrategy" in self.get_strategy(d)
assert d[1L] == "hi"
+ def test_iter_dict_length_change(self):
+ d = {1: 2, 3: 4, 5: 6}
+ it = d.iteritems()
+ d[7] = 8
+ # 'd' is now length 4
+ raises(RuntimeError, it.next)
+
+ def test_iter_dict_strategy_only_change_1(self):
+ d = {1: 2, 3: 4, 5: 6}
+ it = d.iteritems()
+ class Foo(object):
+ def __eq__(self, other):
+ return False
+ assert d.get(Foo()) is None # this changes the strategy of 'd'
+ lst = list(it) # but iterating still works
+ assert sorted(lst) == [(1, 2), (3, 4), (5, 6)]
+
+ def test_iter_dict_strategy_only_change_2(self):
+ d = {1: 2, 3: 4, 5: 6}
+ it = d.iteritems()
+ d['foo'] = 'bar'
+ del d[1]
+ # 'd' is still length 3, but its strategy changed. we are
+ # getting a RuntimeError because iterating over the old storage
+ # gives us (1, 2), but 1 is not in the dict any longer.
+ raises(RuntimeError, list, it)
+
class FakeString(str):
hash_count = 0
@@ -858,10 +885,9 @@
def newtuple(self, l):
return tuple(l)
- def newdict(self, module=False, instance=False, classofinstance=None):
+ def newdict(self, module=False, instance=False):
return W_DictMultiObject.allocate_and_init_instance(
- self, module=module, instance=instance,
- classofinstance=classofinstance)
+ self, module=module, instance=instance)
def finditem_str(self, w_dict, s):
return w_dict.getitem_str(s) # assume it's a multidict
@@ -941,6 +967,20 @@
assert type(self.impl.strategy) is self.StrategyClass
#assert self.impl.r_dict_content is None
+ def test_popitem(self):
+ self.fill_impl()
+ assert self.impl.length() == 2
+ a, b = self.impl.popitem()
+ assert self.impl.length() == 1
+ if a == self.string:
+ assert b == 1000
+ assert self.impl.getitem(self.string2) == 2000
+ else:
+ assert a == self.string2
+ assert b == 2000
+ assert self.impl.getitem_str(self.string) == 1000
+ self.check_not_devolved()
+
def test_setitem(self):
self.impl.setitem(self.string, 1000)
assert self.impl.length() == 1
diff --git a/pypy/objspace/std/test/test_dictproxy.py b/pypy/objspace/std/test/test_dictproxy.py
--- a/pypy/objspace/std/test/test_dictproxy.py
+++ b/pypy/objspace/std/test/test_dictproxy.py
@@ -25,6 +25,16 @@
key, value = NotEmpty.__dict__.popitem()
assert (key == 'a' and value == 1) or (key == 'b' and value == 4)
+ def test_dictproxy_getitem(self):
+ class NotEmpty(object):
+ a = 1
+ assert 'a' in NotEmpty.__dict__
+ class substr(str): pass
+ assert substr('a') in NotEmpty.__dict__
+ assert u'a' in NotEmpty.__dict__
+ assert NotEmpty.__dict__[u'a'] == 1
+ assert u'\xe9' not in NotEmpty.__dict__
+
def test_dictproxyeq(self):
class a(object):
pass
diff --git a/pypy/objspace/std/test/test_listobject.py b/pypy/objspace/std/test/test_listobject.py
--- a/pypy/objspace/std/test/test_listobject.py
+++ b/pypy/objspace/std/test/test_listobject.py
@@ -1186,14 +1186,23 @@
# of dicts, because the OrderedDict in the stdlib relies on this.
# we extend the use case to lists and sets, i.e. all types that have
# strategies, to avoid surprizes depending on the strategy.
- for base, arg in [(list, []), (list, [5]), (list, ['x']),
- (set, []), (set, [5]), (set, ['x']),
- (dict, []), (dict, [(5,6)]), (dict, [('x',7)])]:
+ class X: pass
+ for base, arg in [
+ (list, []), (list, [5]), (list, ['x']), (list, [X]),
+ (set, []), (set, [5]), (set, ['x']), (set, [X]),
+ (dict, []), (dict, [(5,6)]), (dict, [('x',7)]), (dict, [(X,8)]),
+ ]:
print base, arg
class SubClass(base):
def __iter__(self):
return iter("foobar")
assert list(SubClass(arg)) == ['f', 'o', 'o', 'b', 'a', 'r']
+ class Sub2(base):
+ pass
+ assert list(Sub2(arg)) == list(base(arg))
+ s = set()
+ s.update(Sub2(arg))
+ assert s == set(base(arg))
class AppTestForRangeLists(AppTestW_ListObject):
diff --git a/pypy/objspace/std/test/test_setobject.py b/pypy/objspace/std/test/test_setobject.py
--- a/pypy/objspace/std/test/test_setobject.py
+++ b/pypy/objspace/std/test/test_setobject.py
@@ -907,3 +907,30 @@
return [5, 3, 4][i]
s = set([10,3,2]).intersection(Obj())
assert list(s) == [3]
+
+ def test_iter_set_length_change(self):
+ s = set([1, 3, 5])
+ it = iter(s)
+ s.add(7)
+ # 's' is now length 4
+ raises(RuntimeError, it.next)
+
+ def test_iter_set_strategy_only_change_1(self):
+ s = set([1, 3, 5])
+ it = iter(s)
+ class Foo(object):
+ def __eq__(self, other):
+ return False
+ assert Foo() not in s # this changes the strategy of 'd'
+ lst = list(s) # but iterating still works
+ assert sorted(lst) == [1, 3, 5]
+
+ def test_iter_set_strategy_only_change_2(self):
+ s = set([1, 3, 5])
+ it = iter(s)
+ s.add('foo')
+ s.remove(1)
+ # 's' is still length 3, but its strategy changed. we are
+ # getting a RuntimeError because iterating over the old storage
+ # gives us 1, but 1 is not in the set any longer.
+ raises(RuntimeError, list, it)
diff --git a/pypy/pytest.ini b/pypy/pytest.ini
--- a/pypy/pytest.ini
+++ b/pypy/pytest.ini
@@ -1,2 +1,2 @@
[pytest]
-addopts = --assertmode=old -rf
+addopts = --assert=plain -rf
diff --git a/pypy/rlib/jit.py b/pypy/rlib/jit.py
--- a/pypy/rlib/jit.py
+++ b/pypy/rlib/jit.py
@@ -382,7 +382,7 @@
pass
def specialize_call(self, hop):
- pass
+ hop.exception_cannot_occur()
vref_None = non_virtual_ref(None)
diff --git a/pypy/rlib/jit_hooks.py b/pypy/rlib/jit_hooks.py
--- a/pypy/rlib/jit_hooks.py
+++ b/pypy/rlib/jit_hooks.py
@@ -22,6 +22,7 @@
c_name = hop.inputconst(lltype.Void, 'access_helper')
args_v = [hop.inputarg(arg, arg=i)
for i, arg in enumerate(hop.args_r)]
+ hop.exception_cannot_occur()
return hop.genop('jit_marker', [c_name, c_func] + args_v,
resulttype=hop.r_result)
return helper
diff --git a/pypy/rlib/longlong2float.py b/pypy/rlib/longlong2float.py
--- a/pypy/rlib/longlong2float.py
+++ b/pypy/rlib/longlong2float.py
@@ -21,7 +21,7 @@
FLOAT_ARRAY_PTR = lltype.Ptr(lltype.Array(rffi.FLOAT))
# these definitions are used only in tests, when not translated
-def longlong2float_emulator(llval):
+def longlong2float(llval):
with lltype.scoped_alloc(DOUBLE_ARRAY_PTR.TO, 1) as d_array:
ll_array = rffi.cast(LONGLONG_ARRAY_PTR, d_array)
ll_array[0] = llval
@@ -51,12 +51,6 @@
eci = ExternalCompilationInfo(includes=['string.h', 'assert.h'],
post_include_bits=["""
-static double pypy__longlong2float(long long x) {
- double dd;
- assert(sizeof(double) == 8 && sizeof(long long) == 8);
- memcpy(&dd, &x, 8);
- return dd;
-}
static float pypy__uint2singlefloat(unsigned int x) {
float ff;
assert(sizeof(float) == 4 && sizeof(unsigned int) == 4);
@@ -71,12 +65,6 @@
}
"""])
-longlong2float = rffi.llexternal(
- "pypy__longlong2float", [rffi.LONGLONG], rffi.DOUBLE,
- _callable=longlong2float_emulator, compilation_info=eci,
- _nowrapper=True, elidable_function=True, sandboxsafe=True,
- oo_primitive="pypy__longlong2float")
-
uint2singlefloat = rffi.llexternal(
"pypy__uint2singlefloat", [rffi.UINT], rffi.FLOAT,
_callable=uint2singlefloat_emulator, compilation_info=eci,
@@ -99,4 +87,17 @@
def specialize_call(self, hop):
[v_float] = hop.inputargs(lltype.Float)
- return hop.genop("convert_float_bytes_to_longlong", [v_float], resulttype=hop.r_result)
+ hop.exception_cannot_occur()
+ return hop.genop("convert_float_bytes_to_longlong", [v_float], resulttype=lltype.SignedLongLong)
+
+class LongLong2FloatEntry(ExtRegistryEntry):
+ _about_ = longlong2float
+
+ def compute_result_annotation(self, s_longlong):
+ assert annmodel.SomeInteger(knowntype=r_int64).contains(s_longlong)
+ return annmodel.SomeFloat()
+
+ def specialize_call(self, hop):
+ [v_longlong] = hop.inputargs(lltype.SignedLongLong)
+ hop.exception_cannot_occur()
+ return hop.genop("convert_longlong_bytes_to_float", [v_longlong], resulttype=lltype.Float)
diff --git a/pypy/rlib/objectmodel.py b/pypy/rlib/objectmodel.py
--- a/pypy/rlib/objectmodel.py
+++ b/pypy/rlib/objectmodel.py
@@ -215,6 +215,7 @@
def specialize_call(self, hop):
from pypy.rpython.lltypesystem import lltype
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Bool, hop.s_result.const)
# ____________________________________________________________
@@ -397,6 +398,7 @@
r_obj, = hop.args_r
v_obj, = hop.inputargs(r_obj)
ll_fn = r_obj.get_ll_hash_function()
+ hop.exception_is_here()
return hop.gendirectcall(ll_fn, v_obj)
class Entry(ExtRegistryEntry):
@@ -419,6 +421,7 @@
from pypy.rpython.error import TyperError
raise TyperError("compute_identity_hash() cannot be applied to"
" %r" % (vobj.concretetype,))
+ hop.exception_cannot_occur()
return hop.genop('gc_identityhash', [vobj], resulttype=lltype.Signed)
class Entry(ExtRegistryEntry):
@@ -441,6 +444,7 @@
from pypy.rpython.error import TyperError
raise TyperError("compute_unique_id() cannot be applied to"
" %r" % (vobj.concretetype,))
+ hop.exception_cannot_occur()
return hop.genop('gc_id', [vobj], resulttype=lltype.Signed)
class Entry(ExtRegistryEntry):
@@ -452,6 +456,7 @@
def specialize_call(self, hop):
vobj, = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
if hop.rtyper.type_system.name == 'lltypesystem':
from pypy.rpython.lltypesystem import lltype
if isinstance(vobj.concretetype, lltype.Ptr):
diff --git a/pypy/rlib/rbigint.py b/pypy/rlib/rbigint.py
--- a/pypy/rlib/rbigint.py
+++ b/pypy/rlib/rbigint.py
@@ -85,7 +85,7 @@
s_DIGIT = self.bookkeeper.valueoftype(type(NULLDIGIT))
assert s_DIGIT.contains(s_list.listdef.listitem.s_value)
def specialize_call(self, hop):
- pass
+ hop.exception_cannot_occur()
class rbigint(object):
diff --git a/pypy/rlib/rerased.py b/pypy/rlib/rerased.py
--- a/pypy/rlib/rerased.py
+++ b/pypy/rlib/rerased.py
@@ -100,6 +100,7 @@
def specialize_call(self, hop):
bk = hop.rtyper.annotator.bookkeeper
s_obj = identity.get_input_annotation(bk)
+ hop.exception_cannot_occur()
return hop.r_result.rtype_erase(hop, s_obj)
class Entry(ExtRegistryEntry):
@@ -110,6 +111,7 @@
return identity.leave_tunnel(self.bookkeeper)
def specialize_call(self, hop):
+ hop.exception_cannot_occur()
if hop.r_result.lowleveltype is lltype.Void:
return hop.inputconst(lltype.Void, None)
[v] = hop.inputargs(hop.args_r[0])
@@ -214,6 +216,7 @@
return hop.genop('cast_opaque_ptr', [v], resulttype=hop.r_result)
def rtype_unerase_int(self, hop, v):
+ hop.exception_cannot_occur()
return hop.gendirectcall(ll_unerase_int, v)
def rtype_erase_int(self, hop):
@@ -264,6 +267,7 @@
def rtype_unerase_int(self, hop, v):
c_one = hop.inputconst(lltype.Signed, 1)
+ hop.exception_cannot_occur()
v2 = hop.genop('oounbox_int', [v], resulttype=hop.r_result)
return hop.genop('int_rshift', [v2, c_one], resulttype=lltype.Signed)
diff --git a/pypy/rlib/rgc.py b/pypy/rlib/rgc.py
--- a/pypy/rlib/rgc.py
+++ b/pypy/rlib/rgc.py
@@ -382,6 +382,7 @@
def compute_result_annotation(self):
return s_list_of_gcrefs()
def specialize_call(self, hop):
+ hop.exception_cannot_occur()
return hop.genop('gc_get_rpy_roots', [], resulttype = hop.r_result)
class Entry(ExtRegistryEntry):
@@ -392,6 +393,7 @@
return s_list_of_gcrefs()
def specialize_call(self, hop):
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('gc_get_rpy_referents', vlist,
resulttype = hop.r_result)
@@ -402,6 +404,7 @@
return annmodel.SomeInteger()
def specialize_call(self, hop):
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('gc_get_rpy_memory_usage', vlist,
resulttype = hop.r_result)
@@ -412,6 +415,7 @@
return annmodel.SomeInteger()
def specialize_call(self, hop):
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('gc_get_rpy_type_index', vlist,
resulttype = hop.r_result)
@@ -430,6 +434,7 @@
return annmodel.SomeBool()
def specialize_call(self, hop):
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('gc_is_rpy_instance', vlist,
resulttype = hop.r_result)
@@ -449,6 +454,7 @@
classrepr = getclassrepr(hop.rtyper, classdef)
vtable = classrepr.getvtable()
assert lltype.typeOf(vtable) == rclass.CLASSTYPE
+ hop.exception_cannot_occur()
return Constant(vtable, concretetype=rclass.CLASSTYPE)
class Entry(ExtRegistryEntry):
diff --git a/pypy/rlib/rsre/rsre_re.py b/pypy/rlib/rsre/rsre_re.py
--- a/pypy/rlib/rsre/rsre_re.py
+++ b/pypy/rlib/rsre/rsre_re.py
@@ -172,8 +172,9 @@
self._ctx = ctx
def span(self, groupnum=0):
- if not isinstance(groupnum, (int, long)):
- groupnum = self.re.groupindex[groupnum]
+# if not isinstance(groupnum, (int, long)):
+# groupnum = self.re.groupindex[groupnum]
+
return self._ctx.span(groupnum)
def start(self, groupnum=0):
@@ -182,19 +183,25 @@
def end(self, groupnum=0):
return self.span(groupnum)[1]
- def group(self, *groups):
- groups = groups or (0,)
- result = []
- for group in groups:
- frm, to = self.span(group)
- if 0 <= frm <= to:
- result.append(self._ctx._string[frm:to])
- else:
- result.append(None)
- if len(result) > 1:
- return tuple(result)
+ def group(self, group=0):
+ frm, to = self.span(group)
+ if 0 <= frm <= to:
+ return self._ctx._string[frm:to]
else:
- return result[0]
+ return None
+
+# def group(self, *groups):
+# groups = groups or (0,)
+# result = []
+# for group in groups:
+# frm, to = self.span(group)
+# if 0 <= frm <= to:
+# result.append(self._ctx._string[frm:to])
+# else:
+# result.append(None)
+# if len(result) > 1:
+# return tuple(result)
+
def groups(self, default=None):
fmarks = self._ctx.flatten_marks()
diff --git a/pypy/rlib/rsre/test/test_re.py b/pypy/rlib/rsre/test/test_re.py
--- a/pypy/rlib/rsre/test/test_re.py
+++ b/pypy/rlib/rsre/test/test_re.py
@@ -204,7 +204,7 @@
assert re.match('(a)', 'a').groups() == ('a',)
assert re.match(r'(a)', 'a').group(0) == 'a'
assert re.match(r'(a)', 'a').group(1) == 'a'
- assert re.match(r'(a)', 'a').group(1, 1) == ('a', 'a')
+ #assert re.match(r'(a)', 'a').group(1, 1) == ('a', 'a')
pat = re.compile('((a)|(b))(c)?')
assert pat.match('a').groups() == ('a', 'a', None, None)
@@ -218,13 +218,13 @@
assert m.group(0) == 'a'
assert m.group(0) == 'a'
assert m.group(1) == 'a'
- assert m.group(1, 1) == ('a', 'a')
+ #assert m.group(1, 1) == ('a', 'a')
pat = re.compile('(?:(?P<a1>a)|(?P<b2>b))(?P<c3>c)?')
- assert pat.match('a').group(1, 2, 3) == ('a', None, None)
- assert pat.match('b').group('a1', 'b2', 'c3') == (
- (None, 'b', None))
- assert pat.match('ac').group(1, 'b2', 3) == ('a', None, 'c')
+ #assert pat.match('a').group(1, 2, 3) == ('a', None, None)
+ #assert pat.match('b').group('a1', 'b2', 'c3') == (
+ # (None, 'b', None))
+ #assert pat.match('ac').group(1, 'b2', 3) == ('a', None, 'c')
def test_bug_923(self):
# Issue923: grouping inside optional lookahead problem
diff --git a/pypy/rlib/rsre/test/test_zinterp.py b/pypy/rlib/rsre/test/test_zinterp.py
--- a/pypy/rlib/rsre/test/test_zinterp.py
+++ b/pypy/rlib/rsre/test/test_zinterp.py
@@ -1,7 +1,8 @@
# minimal test: just checks that (parts of) rsre can be translated
-from pypy.rpython.test.test_llinterp import gengraph
+from pypy.rpython.test.test_llinterp import gengraph, interpret
from pypy.rlib.rsre import rsre_core
+from pypy.rlib.rsre.rsre_re import compile
def main(n):
assert n >= 0
@@ -19,3 +20,18 @@
def test_gengraph():
t, typer, graph = gengraph(main, [int])
+
+m = compile("(a|b)aaaaa")
+
+def test_match():
+ def f(i):
+ if i:
+ s = "aaaaaa"
+ else:
+ s = "caaaaa"
+ g = m.match(s)
+ if g is None:
+ return 3
+ return int("aaaaaa" == g.group(0))
+ assert interpret(f, [3]) == 1
+ assert interpret(f, [0]) == 3
diff --git a/pypy/rlib/rstring.py b/pypy/rlib/rstring.py
--- a/pypy/rlib/rstring.py
+++ b/pypy/rlib/rstring.py
@@ -245,5 +245,5 @@
raise ValueError("Value is not no_nul")
def specialize_call(self, hop):
- pass
+ hop.exception_cannot_occur()
diff --git a/pypy/rlib/test/test_longlong2float.py b/pypy/rlib/test/test_longlong2float.py
--- a/pypy/rlib/test/test_longlong2float.py
+++ b/pypy/rlib/test/test_longlong2float.py
@@ -2,6 +2,7 @@
from pypy.rlib.longlong2float import longlong2float, float2longlong
from pypy.rlib.longlong2float import uint2singlefloat, singlefloat2uint
from pypy.rlib.rarithmetic import r_singlefloat
+from pypy.rpython.test.test_llinterp import interpret
def fn(f1):
@@ -31,6 +32,18 @@
res = fn2(x)
assert repr(res) == repr(x)
+def test_interpreted():
+ def f(f1):
+ try:
+ ll = float2longlong(f1)
+ return longlong2float(ll)
+ except Exception:
+ return 500
+
+ for x in enum_floats():
+ res = interpret(f, [x])
+ assert repr(res) == repr(x)
+
# ____________________________________________________________
def fnsingle(f1):
diff --git a/pypy/rpython/annlowlevel.py b/pypy/rpython/annlowlevel.py
--- a/pypy/rpython/annlowlevel.py
+++ b/pypy/rpython/annlowlevel.py
@@ -543,11 +543,11 @@
else:
assert False
+ hop.exception_cannot_occur()
if isinstance(hop.args_r[1], rpbc.NoneFrozenPBCRepr):
return hop.inputconst(PTR, null)
v_arg = hop.inputarg(hop.args_r[1], arg=1)
assert isinstance(v_arg.concretetype, T)
- hop.exception_cannot_occur()
return hop.genop(opname, [v_arg], resulttype = PTR)
diff --git a/pypy/rpython/controllerentry.py b/pypy/rpython/controllerentry.py
--- a/pypy/rpython/controllerentry.py
+++ b/pypy/rpython/controllerentry.py
@@ -201,6 +201,7 @@
def specialize_call(self, hop):
from pypy.rpython.lltypesystem import lltype
assert hop.s_result.is_constant()
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Bool, hop.s_result.const)
# ____________________________________________________________
diff --git a/pypy/rpython/lltypesystem/lloperation.py b/pypy/rpython/lltypesystem/lloperation.py
--- a/pypy/rpython/lltypesystem/lloperation.py
+++ b/pypy/rpython/lltypesystem/lloperation.py
@@ -130,6 +130,7 @@
def specialize_call(self, hop):
from pypy.rpython.lltypesystem import lltype
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Void, None)
def enum_ops_without_sideeffects(raising_is_ok=False):
@@ -350,6 +351,7 @@
'truncate_longlong_to_int':LLOp(canfold=True),
'force_cast': LLOp(sideeffects=False), # only for rffi.cast()
'convert_float_bytes_to_longlong': LLOp(canfold=True),
+ 'convert_longlong_bytes_to_float': LLOp(canfold=True),
# __________ pointer operations __________
diff --git a/pypy/rpython/lltypesystem/opimpl.py b/pypy/rpython/lltypesystem/opimpl.py
--- a/pypy/rpython/lltypesystem/opimpl.py
+++ b/pypy/rpython/lltypesystem/opimpl.py
@@ -431,6 +431,10 @@
from pypy.rlib.longlong2float import float2longlong
return float2longlong(a)
+def op_convert_longlong_bytes_to_float(a):
+ from pypy.rlib.longlong2float import longlong2float
+ return longlong2float(a)
+
def op_unichar_eq(x, y):
assert isinstance(x, unicode) and len(x) == 1
diff --git a/pypy/rpython/lltypesystem/rbuiltin.py b/pypy/rpython/lltypesystem/rbuiltin.py
--- a/pypy/rpython/lltypesystem/rbuiltin.py
+++ b/pypy/rpython/lltypesystem/rbuiltin.py
@@ -9,6 +9,7 @@
from pypy.rpython.rbool import bool_repr
def rtype_builtin_isinstance(hop):
+ hop.exception_cannot_occur()
if hop.s_result.is_constant():
return hop.inputconst(lltype.Bool, hop.s_result.const)
if hop.args_r[0] == pyobj_repr or hop.args_r[1] == pyobj_repr:
@@ -33,6 +34,7 @@
return my_instantiate()
def rtype_instantiate(hop):
+ hop.exception_cannot_occur()
s_class = hop.args_s[0]
assert isinstance(s_class, annmodel.SomePBC)
if len(s_class.descriptions) != 1:
@@ -46,6 +48,7 @@
return rclass.rtype_new_instance(hop.rtyper, classdef, hop.llops)
def rtype_builtin_hasattr(hop):
+ hop.exception_cannot_occur()
if hop.s_result.is_constant():
return hop.inputconst(lltype.Bool, hop.s_result.const)
if hop.args_r[0] == pyobj_repr:
@@ -56,6 +59,7 @@
raise TyperError("hasattr is only suported on a constant or on PyObject")
def rtype_builtin___import__(hop):
+ xxx # should not be used any more
args_v = hop.inputargs(*[pyobj_repr for ign in hop.args_r])
c = hop.inputconst(pyobj_repr, __import__)
return hop.genop('simple_call', [c] + args_v, resulttype = pyobj_repr)
diff --git a/pypy/rpython/lltypesystem/rclass.py b/pypy/rpython/lltypesystem/rclass.py
--- a/pypy/rpython/lltypesystem/rclass.py
+++ b/pypy/rpython/lltypesystem/rclass.py
@@ -746,4 +746,5 @@
assert isinstance(TYPE, GcStruct)
assert lltype._castdepth(TYPE, OBJECT) > 0
hop.rtyper.set_type_for_typeptr(vtable, TYPE)
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Void, None)
diff --git a/pypy/rpython/lltypesystem/rstr.py b/pypy/rpython/lltypesystem/rstr.py
--- a/pypy/rpython/lltypesystem/rstr.py
+++ b/pypy/rpython/lltypesystem/rstr.py
@@ -765,7 +765,11 @@
def _ll_stringslice(s1, start, stop):
lgt = stop - start
assert start >= 0
- assert lgt >= 0
+ # If start > stop, return a empty string. This can happen if the start
+ # is greater than the length of the string. Use < instead of <= to avoid
+ # creating another path for the JIT when start == stop.
+ if lgt < 0:
+ return s1.empty()
newstr = s1.malloc(lgt)
s1.copy_contents(s1, newstr, start, 0, lgt)
return newstr
diff --git a/pypy/rpython/lltypesystem/rtuple.py b/pypy/rpython/lltypesystem/rtuple.py
--- a/pypy/rpython/lltypesystem/rtuple.py
+++ b/pypy/rpython/lltypesystem/rtuple.py
@@ -55,6 +55,7 @@
vtup = hop.inputarg(self, 0)
LIST = hop.r_result.lowleveltype.TO
cno = inputconst(Signed, nitems)
+ hop.exception_is_here()
vlist = hop.gendirectcall(LIST.ll_newlist, cno)
v_func = hop.inputconst(Void, rlist.dum_nocheck)
for index in range(nitems):
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -1426,23 +1426,25 @@
self._visit_young_rawmalloced_object(obj)
return
#
- # If 'obj' was already forwarded, change it to its forwarding address.
- if self.is_forwarded(obj):
+ size_gc_header = self.gcheaderbuilder.size_gc_header
+ if self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
+ #
+ # Common case: 'obj' was not already forwarded (otherwise
+ # tid == -42, containing all flags), and it doesn't have the
+ # HAS_SHADOW flag either. We must move it out of the nursery,
+ # into a new nonmovable location.
+ totalsize = size_gc_header + self.get_size(obj)
+ newhdr = self._malloc_out_of_nursery(totalsize)
+ #
+ elif self.is_forwarded(obj):
+ #
+ # 'obj' was already forwarded. Change the original reference
+ # to point to its forwarding address, and we're done.
root.address[0] = self.get_forwarding_address(obj)
return
- #
- # First visit to 'obj': we must move it out of the nursery.
- size_gc_header = self.gcheaderbuilder.size_gc_header
- size = self.get_size(obj)
- totalsize = size_gc_header + size
- #
- if self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
- #
- # Common case: allocate a new nonmovable location for it.
- newhdr = self._malloc_out_of_nursery(totalsize)
#
else:
- # The object has already a shadow.
+ # First visit to an object that has already a shadow.
newobj = self.nursery_objects_shadows.get(obj)
ll_assert(newobj != NULL, "GCFLAG_HAS_SHADOW but no shadow found")
newhdr = newobj - size_gc_header
@@ -1450,6 +1452,8 @@
# Remove the flag GCFLAG_HAS_SHADOW, so that it doesn't get
# copied to the shadow itself.
self.header(obj).tid &= ~GCFLAG_HAS_SHADOW
+ #
+ totalsize = size_gc_header + self.get_size(obj)
#
# Copy it. Note that references to other objects in the
# nursery are kept unchanged in this step.
diff --git a/pypy/rpython/module/ll_os_stat.py b/pypy/rpython/module/ll_os_stat.py
--- a/pypy/rpython/module/ll_os_stat.py
+++ b/pypy/rpython/module/ll_os_stat.py
@@ -455,6 +455,6 @@
return intmask(time), intmask(nsec)
def time_t_to_FILE_TIME(time, filetime):
- ft = (rffi.r_longlong(time) + secs_between_epochs) * 10000000
+ ft = rffi.r_longlong((time + secs_between_epochs) * 10000000)
filetime.c_dwHighDateTime = rffi.r_uint(ft >> 32)
filetime.c_dwLowDateTime = rffi.r_uint(ft) # masking off high bits
diff --git a/pypy/rpython/module/r_os_stat.py b/pypy/rpython/module/r_os_stat.py
--- a/pypy/rpython/module/r_os_stat.py
+++ b/pypy/rpython/module/r_os_stat.py
@@ -65,4 +65,5 @@
r_StatResult = hop.rtyper.getrepr(ll_os_stat.s_StatResult)
[v_result] = hop.inputargs(r_StatResult.r_tuple)
# no-op conversion from r_StatResult.r_tuple to r_StatResult
+ hop.exception_cannot_occur()
return v_result
diff --git a/pypy/rpython/ootypesystem/ooregistry.py b/pypy/rpython/ootypesystem/ooregistry.py
--- a/pypy/rpython/ootypesystem/ooregistry.py
+++ b/pypy/rpython/ootypesystem/ooregistry.py
@@ -22,6 +22,7 @@
annmodel.SomeOOInstance,
annmodel.SomeString))
vlist = hop.inputargs(hop.args_r[0], ootype.Signed)
+ hop.exception_cannot_occur()
return hop.genop('oostring', vlist, resulttype = ootype.String)
class Entry_oounicode(ExtRegistryEntry):
@@ -38,6 +39,7 @@
assert isinstance(hop.args_s[0], (annmodel.SomeUnicodeCodePoint,
annmodel.SomeOOInstance))
vlist = hop.inputargs(hop.args_r[0], ootype.Signed)
+ hop.exception_cannot_occur()
return hop.genop('oounicode', vlist, resulttype = ootype.Unicode)
diff --git a/pypy/rpython/ootypesystem/rbuiltin.py b/pypy/rpython/ootypesystem/rbuiltin.py
--- a/pypy/rpython/ootypesystem/rbuiltin.py
+++ b/pypy/rpython/ootypesystem/rbuiltin.py
@@ -7,12 +7,14 @@
from pypy.rpython.error import TyperError
def rtype_new(hop):
+ hop.exception_cannot_occur()
assert hop.args_s[0].is_constant()
vlist = hop.inputargs(ootype.Void)
return hop.genop('new', vlist,
resulttype = hop.r_result.lowleveltype)
def rtype_oonewarray(hop):
+ hop.exception_cannot_occur()
assert hop.args_s[0].is_constant()
vlist = hop.inputarg(ootype.Void, arg=0)
vlength = hop.inputarg(ootype.Signed, arg=1)
@@ -20,23 +22,27 @@
resulttype = hop.r_result.lowleveltype)
def rtype_null(hop):
+ hop.exception_cannot_occur()
assert hop.args_s[0].is_constant()
TYPE = hop.args_s[0].const
nullvalue = ootype.null(TYPE)
return hop.inputconst(TYPE, nullvalue)
def rtype_classof(hop):
+ hop.exception_cannot_occur()
assert isinstance(hop.args_s[0], annmodel.SomeOOInstance)
vlist = hop.inputargs(hop.args_r[0])
return hop.genop('classof', vlist,
resulttype = ootype.Class)
def rtype_subclassof(hop):
+ hop.exception_cannot_occur()
vlist = hop.inputargs(rootype.ooclass_repr, rootype.ooclass_repr)
return hop.genop('subclassof', vlist,
resulttype = ootype.Bool)
def rtype_instanceof(hop):
+ hop.exception_cannot_occur()
INSTANCE = hop.args_v[1].value
v_inst = hop.inputarg(hop.args_r[0], arg=0)
c_cls = hop.inputconst(ootype.Void, INSTANCE)
@@ -44,23 +50,27 @@
resulttype=ootype.Bool)
def rtype_runtimenew(hop):
+ hop.exception_cannot_occur()
vlist = hop.inputargs(rootype.ooclass_repr)
return hop.genop('runtimenew', vlist,
resulttype = hop.r_result.lowleveltype)
def rtype_ooupcast(hop):
+ hop.exception_cannot_occur()
assert isinstance(hop.args_s[0].const, ootype.Instance)
assert isinstance(hop.args_s[1], annmodel.SomeOOInstance)
v_inst = hop.inputarg(hop.args_r[1], arg=1)
return hop.genop('ooupcast', [v_inst], resulttype = hop.r_result.lowleveltype)
def rtype_oodowncast(hop):
+ hop.exception_cannot_occur()
assert isinstance(hop.args_s[0].const, ootype.Instance)
assert isinstance(hop.args_s[1], annmodel.SomeOOInstance)
v_inst = hop.inputarg(hop.args_r[1], arg=1)
return hop.genop('oodowncast', [v_inst], resulttype = hop.r_result.lowleveltype)
def rtype_cast_to_object(hop):
+ hop.exception_cannot_occur()
assert isinstance(hop.args_s[0], annmodel.SomeOOStaticMeth) or \
isinstance(hop.args_s[0], annmodel.SomeOOClass) or \
isinstance(hop.args_s[0].ootype, ootype.OOType)
@@ -68,12 +78,14 @@
return hop.genop('cast_to_object', [v_inst], resulttype = hop.r_result.lowleveltype)
def rtype_cast_from_object(hop):
+ hop.exception_cannot_occur()
assert isinstance(hop.args_s[0].const, ootype.OOType)
assert isinstance(hop.args_s[1], annmodel.SomeOOObject)
v_inst = hop.inputarg(hop.args_r[1], arg=1)
return hop.genop('cast_from_object', [v_inst], resulttype = hop.r_result.lowleveltype)
def rtype_builtin_isinstance(hop):
+ hop.exception_cannot_occur()
if hop.s_result.is_constant():
return hop.inputconst(ootype.Bool, hop.s_result.const)
@@ -99,6 +111,7 @@
return ootype.subclassof(c1, class_)
def rtype_instantiate(hop):
+ hop.exception_cannot_occur()
if hop.args_s[0].is_constant():
## INSTANCE = hop.s_result.rtyper_makerepr(hop.rtyper).lowleveltype
## v_instance = hop.inputconst(ootype.Void, INSTANCE)
diff --git a/pypy/rpython/ootypesystem/rstr.py b/pypy/rpython/ootypesystem/rstr.py
--- a/pypy/rpython/ootypesystem/rstr.py
+++ b/pypy/rpython/ootypesystem/rstr.py
@@ -222,6 +222,10 @@
length = s.ll_strlen()
if stop > length:
stop = length
+ # If start > stop, return a empty string. This can happen if the start
+ # is greater than the length of the string.
+ if start > stop:
+ start = stop
return s.ll_substring(start, stop-start)
def ll_stringslice_minusone(s):
diff --git a/pypy/rpython/ootypesystem/rtuple.py b/pypy/rpython/ootypesystem/rtuple.py
--- a/pypy/rpython/ootypesystem/rtuple.py
+++ b/pypy/rpython/ootypesystem/rtuple.py
@@ -39,6 +39,7 @@
RESULT = hop.r_result.lowleveltype
c_resulttype = inputconst(ootype.Void, RESULT)
c_length = inputconst(ootype.Signed, len(self.items_r))
+ hop.exception_is_here()
if isinstance(RESULT, ootype.Array):
v_list = hop.genop('oonewarray', [c_resulttype, c_length], resulttype=RESULT)
else:
diff --git a/pypy/rpython/rbool.py b/pypy/rpython/rbool.py
--- a/pypy/rpython/rbool.py
+++ b/pypy/rpython/rbool.py
@@ -34,6 +34,7 @@
def rtype_float(_, hop):
vlist = hop.inputargs(Float)
+ hop.exception_cannot_occur()
return vlist[0]
#
diff --git a/pypy/rpython/rbuiltin.py b/pypy/rpython/rbuiltin.py
--- a/pypy/rpython/rbuiltin.py
+++ b/pypy/rpython/rbuiltin.py
@@ -111,25 +111,32 @@
raise TyperError("don't know about built-in function %r" % (
self.builtinfunc,))
+ def _call(self, hop2, **kwds_i):
+ bltintyper = self.findbltintyper(hop2.rtyper)
+ hop2.llops._called_exception_is_here_or_cannot_occur = False
+ v_result = bltintyper(hop2, **kwds_i)
+ if not hop2.llops._called_exception_is_here_or_cannot_occur:
+ raise TyperError("missing hop.exception_cannot_occur() or "
+ "hop.exception_is_here() in %s" % bltintyper)
+ return v_result
+
def rtype_simple_call(self, hop):
- bltintyper = self.findbltintyper(hop.rtyper)
hop2 = hop.copy()
hop2.r_s_popfirstarg()
- return bltintyper(hop2)
+ return self._call(hop2)
def rtype_call_args(self, hop):
# calling a built-in function with keyword arguments:
# mostly for rpython.objectmodel.hint()
hop, kwds_i = call_args_expand(hop)
- bltintyper = self.findbltintyper(hop.rtyper)
hop2 = hop.copy()
hop2.r_s_popfirstarg()
hop2.r_s_popfirstarg()
# the RPython-level keyword args are passed with an 'i_' prefix and
# the corresponding value is an *index* in the hop2 arguments,
# to be used with hop.inputarg(arg=..)
- return bltintyper(hop2, **kwds_i)
+ return self._call(hop2, **kwds_i)
class BuiltinMethodRepr(Repr):
@@ -198,6 +205,7 @@
# ____________________________________________________________
def rtype_builtin_bool(hop):
+ # not called any more?
assert hop.nb_args == 1
return hop.args_r[0].rtype_is_true(hop)
@@ -241,6 +249,7 @@
def rtype_builtin_min(hop):
v1, v2 = hop.inputargs(hop.r_result, hop.r_result)
+ hop.exception_cannot_occur()
return hop.gendirectcall(ll_min, v1, v2)
def ll_min(i1, i2):
@@ -250,6 +259,7 @@
def rtype_builtin_max(hop):
v1, v2 = hop.inputargs(hop.r_result, hop.r_result)
+ hop.exception_cannot_occur()
return hop.gendirectcall(ll_max, v1, v2)
def ll_max(i1, i2):
@@ -264,6 +274,7 @@
pass
def rtype_OSError__init__(hop):
+ hop.exception_cannot_occur()
if hop.nb_args == 2:
raise TyperError("OSError() should not be called with "
"a single argument")
@@ -274,6 +285,7 @@
r_self.setfield(v_self, 'errno', v_errno, hop.llops)
def rtype_WindowsError__init__(hop):
+ hop.exception_cannot_occur()
if hop.nb_args == 2:
raise TyperError("WindowsError() should not be called with "
"a single argument")
@@ -442,6 +454,7 @@
assert hop.args_s[0].is_constant()
TGT = hop.args_s[0].const
v_type, v_value = hop.inputargs(lltype.Void, hop.args_r[1])
+ hop.exception_cannot_occur()
return gen_cast(hop.llops, TGT, v_value)
_cast_to_Signed = {
@@ -523,11 +536,13 @@
def rtype_identity_hash(hop):
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('gc_identityhash', vlist, resulttype=lltype.Signed)
def rtype_runtime_type_info(hop):
assert isinstance(hop.args_r[0], rptr.PtrRepr)
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('runtime_type_info', vlist,
resulttype = hop.r_result.lowleveltype)
@@ -558,6 +573,7 @@
def rtype_raw_malloc(hop):
v_size, = hop.inputargs(lltype.Signed)
+ hop.exception_cannot_occur()
return hop.genop('raw_malloc', [v_size], resulttype=llmemory.Address)
def rtype_raw_malloc_usage(hop):
@@ -586,6 +602,7 @@
if s_addr.is_null_address():
raise TyperError("raw_memclear(x, n) where x is the constant NULL")
v_list = hop.inputargs(llmemory.Address, lltype.Signed)
+ hop.exception_cannot_occur()
return hop.genop('raw_memclear', v_list)
BUILTIN_TYPER[llmemory.raw_malloc] = rtype_raw_malloc
@@ -596,6 +613,7 @@
def rtype_offsetof(hop):
TYPE, field = hop.inputargs(lltype.Void, lltype.Void)
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Signed,
llmemory.offsetof(TYPE.value, field.value))
@@ -605,6 +623,7 @@
# non-gc objects
def rtype_free_non_gc_object(hop):
+ hop.exception_cannot_occur()
vinst, = hop.inputargs(hop.args_r[0])
flavor = hop.args_r[0].gcflavor
assert flavor != 'gc'
@@ -617,6 +636,7 @@
# keepalive_until_here
def rtype_keepalive_until_here(hop):
+ hop.exception_cannot_occur()
for v in hop.args_v:
hop.genop('keepalive', [v], resulttype=lltype.Void)
return hop.inputconst(lltype.Void, None)
diff --git a/pypy/rpython/rfloat.py b/pypy/rpython/rfloat.py
--- a/pypy/rpython/rfloat.py
+++ b/pypy/rpython/rfloat.py
@@ -136,7 +136,10 @@
hop.exception_cannot_occur()
return hop.genop('cast_float_to_int', vlist, resulttype=Signed)
- rtype_float = rtype_pos
+ def rtype_float(_, hop):
+ vlist = hop.inputargs(Float)
+ hop.exception_cannot_occur()
+ return vlist[0]
# version picked by specialisation based on which
# type system rtyping is using, from <type_system>.ll_str module
diff --git a/pypy/rpython/rint.py b/pypy/rpython/rint.py
--- a/pypy/rpython/rint.py
+++ b/pypy/rpython/rint.py
@@ -310,6 +310,8 @@
if hop.has_implicit_exception(ValueError):
hop.exception_is_here()
hop.gendirectcall(ll_check_chr, vlist[0])
+ else:
+ hop.exception_cannot_occur()
return hop.genop('cast_int_to_char', vlist, resulttype=Char)
def rtype_unichr(_, hop):
@@ -317,6 +319,8 @@
if hop.has_implicit_exception(ValueError):
hop.exception_is_here()
hop.gendirectcall(ll_check_unichr, vlist[0])
+ else:
+ hop.exception_cannot_occur()
return hop.genop('cast_int_to_unichar', vlist, resulttype=UniChar)
def rtype_is_true(self, hop):
diff --git a/pypy/rpython/rlist.py b/pypy/rpython/rlist.py
--- a/pypy/rpython/rlist.py
+++ b/pypy/rpython/rlist.py
@@ -115,6 +115,7 @@
def rtype_bltn_list(self, hop):
v_lst = hop.inputarg(self, 0)
cRESLIST = hop.inputconst(Void, hop.r_result.LIST)
+ hop.exception_is_here()
return hop.gendirectcall(ll_copy, cRESLIST, v_lst)
def rtype_len(self, hop):
diff --git a/pypy/rpython/rrange.py b/pypy/rpython/rrange.py
--- a/pypy/rpython/rrange.py
+++ b/pypy/rpython/rrange.py
@@ -107,8 +107,10 @@
if isinstance(hop.r_result, AbstractRangeRepr):
if hop.r_result.step != 0:
c_rng = hop.inputconst(Void, hop.r_result.RANGE)
+ hop.exception_is_here()
return hop.gendirectcall(hop.r_result.ll_newrange, c_rng, vstart, vstop)
else:
+ hop.exception_is_here()
return hop.gendirectcall(hop.r_result.ll_newrangest, vstart, vstop, vstep)
else:
# cannot build a RANGE object, needs a real list
@@ -117,6 +119,7 @@
if isinstance(ITEMTYPE, Ptr):
ITEMTYPE = ITEMTYPE.TO
cLIST = hop.inputconst(Void, ITEMTYPE)
+ hop.exception_is_here()
return hop.gendirectcall(ll_range2list, cLIST, vstart, vstop, vstep)
rtype_builtin_xrange = rtype_builtin_range
@@ -212,4 +215,5 @@
[v_index, v_item])
def rtype_builtin_enumerate(hop):
+ hop.exception_cannot_occur()
return hop.r_result.r_baseiter.newiter(hop)
diff --git a/pypy/rpython/rstr.py b/pypy/rpython/rstr.py
--- a/pypy/rpython/rstr.py
+++ b/pypy/rpython/rstr.py
@@ -288,6 +288,8 @@
def rtype_unicode(self, hop):
if hop.args_s[0].is_constant():
+ # convertion errors occur during annotation, so cannot any more:
+ hop.exception_cannot_occur()
return hop.inputconst(hop.r_result, hop.s_result.const)
repr = hop.args_r[0].repr
v_str = hop.inputarg(repr, 0)
diff --git a/pypy/rpython/rtyper.py b/pypy/rpython/rtyper.py
--- a/pypy/rpython/rtyper.py
+++ b/pypy/rpython/rtyper.py
@@ -846,6 +846,7 @@
return result
def exception_is_here(self):
+ self.llops._called_exception_is_here_or_cannot_occur = True
if self.llops.llop_raising_exceptions is not None:
raise TyperError("cannot catch an exception at more than one llop")
if not self.exceptionlinks:
@@ -861,6 +862,7 @@
self.llops.llop_raising_exceptions = len(self.llops)
def exception_cannot_occur(self):
+ self.llops._called_exception_is_here_or_cannot_occur = True
if self.llops.llop_raising_exceptions is not None:
raise TyperError("cannot catch an exception at more than one llop")
if not self.exceptionlinks:
diff --git a/pypy/rpython/test/test_extregistry.py b/pypy/rpython/test/test_extregistry.py
--- a/pypy/rpython/test/test_extregistry.py
+++ b/pypy/rpython/test/test_extregistry.py
@@ -114,6 +114,7 @@
_about_ = dummy_func
s_result_annotation = annmodel.SomeInteger()
def specialize_call(self, hop):
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Signed, 42)
def func():
diff --git a/pypy/rpython/test/test_rclass.py b/pypy/rpython/test/test_rclass.py
--- a/pypy/rpython/test/test_rclass.py
+++ b/pypy/rpython/test/test_rclass.py
@@ -1085,6 +1085,7 @@
return annmodel.SomeInteger()
def specialize_call(self, hop):
[v_instance] = hop.inputargs(*hop.args_r)
+ hop.exception_is_here()
return hop.gendirectcall(ll_my_gethash, v_instance)
def f(n):
diff --git a/pypy/rpython/test/test_rstr.py b/pypy/rpython/test/test_rstr.py
--- a/pypy/rpython/test/test_rstr.py
+++ b/pypy/rpython/test/test_rstr.py
@@ -477,7 +477,11 @@
s1 = s[:3]
s2 = s[3:]
s3 = s[3:10]
- return s1+s2 == s and s2+s1 == const('lohel') and s1+s3 == s
+ s4 = s[42:44]
+ return (s1+s2 == s and
+ s2+s1 == const('lohel') and
+ s1+s3 == s and
+ s4 == const(''))
res = self.interpret(fn, [0])
assert res
diff --git a/pypy/test_all.py b/pypy/test_all.py
old mode 100755
new mode 100644
--- a/pypy/test_all.py
+++ b/pypy/test_all.py
@@ -11,11 +11,12 @@
"""
import sys, os
-if len(sys.argv) == 1 and os.path.dirname(sys.argv[0]) in '.':
- print >> sys.stderr, __doc__
- sys.exit(2)
if __name__ == '__main__':
+ if len(sys.argv) == 1 and os.path.dirname(sys.argv[0]) in '.':
+ print >> sys.stderr, __doc__
+ sys.exit(2)
+
import tool.autopath
import pytest
import pytest_cov
diff --git a/pypy/tool/clean_old_branches.py b/pypy/tool/clean_old_branches.py
--- a/pypy/tool/clean_old_branches.py
+++ b/pypy/tool/clean_old_branches.py
@@ -38,7 +38,7 @@
closed_heads.reverse()
for head, branch in closed_heads:
- print '\t', branch
+ print '\t', head, '\t', branch
print
print 'The branches listed above will be merged to "closed-branches".'
print 'You need to run this script in a clean working copy where you'
diff --git a/pypy/tool/jitlogparser/test/__init__.py b/pypy/tool/jitlogparser/test/__init__.py
new file mode 100644
diff --git a/pypy/tool/pytest/test/test_pytestsupport.py b/pypy/tool/pytest/test/test_pytestsupport.py
--- a/pypy/tool/pytest/test/test_pytestsupport.py
+++ b/pypy/tool/pytest/test/test_pytestsupport.py
@@ -165,7 +165,10 @@
def test_one(self): exec 'blow'
""")
- ev, = sorter.getreports("pytest_runtest_logreport")
+ reports = sorter.getreports("pytest_runtest_logreport")
+ setup, ev, teardown = reports
assert ev.failed
+ assert setup.passed
+ assert teardown.passed
assert 'NameError' in ev.longrepr.reprcrash.message
assert 'blow' in ev.longrepr.reprcrash.message
diff --git a/pypy/translator/c/gcc/test/test_asmgcroot.py b/pypy/translator/c/gcc/test/test_asmgcroot.py
--- a/pypy/translator/c/gcc/test/test_asmgcroot.py
+++ b/pypy/translator/c/gcc/test/test_asmgcroot.py
@@ -7,10 +7,17 @@
from pypy import conftest
from pypy.translator.tool.cbuild import ExternalCompilationInfo
from pypy.translator.platform import platform as compiler
+from pypy.rlib.rarithmetic import is_emulated_long
from pypy.rpython.lltypesystem import lltype, rffi
from pypy.rlib.entrypoint import entrypoint, secondary_entrypoints
from pypy.rpython.lltypesystem.lloperation import llop
+_MSVC = compiler.name == "msvc"
+_MINGW = compiler.name == "mingw32"
+_WIN32 = _MSVC or _MINGW
+_WIN64 = _WIN32 and is_emulated_long
+# XXX get rid of 'is_emulated_long' and have a real config here.
+
class AbstractTestAsmGCRoot:
# the asmgcroot gc transformer doesn't generate gc_reload_possibly_moved
# instructions:
@@ -18,8 +25,8 @@
@classmethod
def make_config(cls):
- if compiler.name == "msvc":
- py.test.skip("all asmgcroot tests disabled for MSVC")
+ if _MSVC and _WIN64:
+ py.test.skip("all asmgcroot tests disabled for MSVC X64")
from pypy.config.pypyoption import get_pypy_config
config = get_pypy_config(translating=True)
config.translation.gc = cls.gcpolicy
diff --git a/pypy/translator/c/gcc/trackgcroot.py b/pypy/translator/c/gcc/trackgcroot.py
--- a/pypy/translator/c/gcc/trackgcroot.py
+++ b/pypy/translator/c/gcc/trackgcroot.py
@@ -847,6 +847,10 @@
if sources:
target, = sources
+ if target.endswith('@PLT'):
+ # In -fPIC mode, all functions calls have this suffix
+ target = target[:-4]
+
if target in self.FUNCTIONS_NOT_RETURNING:
return [InsnStop(target)]
if self.format == 'mingw32' and target == '__alloca':
@@ -1137,7 +1141,7 @@
r_jump_rel_label = re.compile(r"\tj\w+\s+"+"(\d+)f"+"\s*$")
r_unaryinsn_star= re.compile(r"\t[a-z]\w*\s+[*]("+OPERAND+")\s*$")
- r_jmptable_item = re.compile(r"\t.quad\t"+LABEL+"(-\"[A-Za-z0-9$]+\")?\s*$")
+ r_jmptable_item = re.compile(r"\t.(?:quad|long)\t"+LABEL+"(-\"[A-Za-z0-9$]+\"|-"+LABEL+")?\s*$")
r_jmptable_end = re.compile(r"\t.text|\t.section\s+.text|\t\.align|"+LABEL)
r_gcroot_marker = re.compile(r"\t/[*] GCROOT ("+LOCALVARFP+") [*]/")
diff --git a/pypy/translator/c/src/float.h b/pypy/translator/c/src/float.h
--- a/pypy/translator/c/src/float.h
+++ b/pypy/translator/c/src/float.h
@@ -43,5 +43,6 @@
#define OP_CAST_FLOAT_TO_LONGLONG(x,r) r = (long long)(x)
#define OP_CAST_FLOAT_TO_ULONGLONG(x,r) r = (unsigned long long)(x)
#define OP_CONVERT_FLOAT_BYTES_TO_LONGLONG(x,r) memcpy(&r, &x, sizeof(double))
+#define OP_CONVERT_LONGLONG_BYTES_TO_FLOAT(x,r) memcpy(&r, &x, sizeof(long long))
#endif
diff --git a/pypy/translator/c/test/test_extfunc.py b/pypy/translator/c/test/test_extfunc.py
--- a/pypy/translator/c/test/test_extfunc.py
+++ b/pypy/translator/c/test/test_extfunc.py
@@ -919,4 +919,5 @@
t, cbuilder = self.compile(does_stuff)
data = cbuilder.cmdexec('')
res = os.nice(0) + 3
+ if res > 19: res = 19 # xxx Linux specific, probably
assert data.startswith('os.nice returned %d\n' % res)
diff --git a/pypy/translator/cli/dotnet.py b/pypy/translator/cli/dotnet.py
--- a/pypy/translator/cli/dotnet.py
+++ b/pypy/translator/cli/dotnet.py
@@ -459,6 +459,7 @@
def specialize_call(self, hop):
+ hop.exception_cannot_occur()
assert hop.args_s[1].is_constant()
TYPE = hop.args_s[1].const
v_obj = hop.inputarg(hop.args_r[0], arg=0)
@@ -507,6 +508,7 @@
def specialize_call(self, hop):
v_obj, = hop.inputargs(*hop.args_r)
+ hop.exception_cannot_occur()
return hop.genop('same_as', [v_obj], hop.r_result.lowleveltype)
def new_array(type, length):
@@ -608,6 +610,7 @@
def specialize_call(self, hop):
v_type, = hop.inputargs(*hop.args_r)
+ hop.exception_cannot_occur()
return hop.genop('cli_typeof', [v_type], hop.r_result.lowleveltype)
@@ -626,6 +629,7 @@
v_obj, = hop.inputargs(*hop.args_r)
methodname = hop.args_r[0].methodname
c_methodname = hop.inputconst(ootype.Void, methodname)
+ hop.exception_cannot_occur()
return hop.genop('cli_eventhandler', [v_obj, c_methodname], hop.r_result.lowleveltype)
@@ -647,6 +651,7 @@
def specialize_call(self, hop):
assert isinstance(hop.args_s[0], annmodel.SomeOOInstance)
v_inst = hop.inputarg(hop.args_r[0], arg=0)
+ hop.exception_cannot_occur()
return hop.genop('oodowncast', [v_inst], resulttype = hop.r_result.lowleveltype)
@@ -668,6 +673,7 @@
def specialize_call(self, hop):
assert isinstance(hop.args_s[0], annmodel.SomeOOInstance)
v_inst = hop.inputarg(hop.args_r[0], arg=0)
+ hop.exception_cannot_occur()
return hop.genop('ooupcast', [v_inst], resulttype = hop.r_result.lowleveltype)
@@ -701,6 +707,7 @@
def specialize_call(self, hop):
v_obj = hop.inputarg(hop.args_r[0], arg=0)
+ hop.exception_cannot_occur()
return hop.genop('oodowncast', [v_obj], hop.r_result.lowleveltype)
diff --git a/pypy/translator/jvm/opcodes.py b/pypy/translator/jvm/opcodes.py
--- a/pypy/translator/jvm/opcodes.py
+++ b/pypy/translator/jvm/opcodes.py
@@ -243,4 +243,5 @@
'force_cast': [PushAllArgs, CastPrimitive, StoreResult],
'convert_float_bytes_to_longlong': jvm.PYPYDOUBLEBYTESTOLONG,
+ 'convert_longlong_bytes_to_float': jvm.PYPYLONGBYTESTODOUBLE,
})
diff --git a/pypy/translator/jvm/typesystem.py b/pypy/translator/jvm/typesystem.py
--- a/pypy/translator/jvm/typesystem.py
+++ b/pypy/translator/jvm/typesystem.py
@@ -942,6 +942,7 @@
PYPYULONGTODOUBLE = Method.s(jPyPy, 'ulong_to_double', (jLong,), jDouble)
PYPYLONGBITWISENEGATE = Method.v(jPyPy, 'long_bitwise_negate', (jLong,), jLong)
PYPYDOUBLEBYTESTOLONG = Method.v(jPyPy, 'pypy__float2longlong', (jDouble,), jLong)
+PYPYLONGBYTESTODOUBLE = Method.v(jPyPy, 'pypy__longlong2float', (jLong,), jDouble)
PYPYSTRTOINT = Method.v(jPyPy, 'str_to_int', (jString,), jInt)
PYPYSTRTOUINT = Method.v(jPyPy, 'str_to_uint', (jString,), jInt)
PYPYSTRTOLONG = Method.v(jPyPy, 'str_to_long', (jString,), jLong)
diff --git a/testrunner/runner.py b/testrunner/runner.py
--- a/testrunner/runner.py
+++ b/testrunner/runner.py
@@ -110,7 +110,10 @@
do_dry_run=False, timeout=None,
_win32=(sys.platform=='win32')):
args = interp + test_driver
- args += ['-p', 'resultlog', '--resultlog=%s' % logfname, test]
+ args += ['-p', 'resultlog',
+ '--resultlog=%s' % logfname,
+ '--junitxml=%s.junit' % logfname,
+ test]
args = map(str, args)
interp0 = args[0]
diff --git a/testrunner/scratchbox_runner.py b/testrunner/scratchbox_runner.py
--- a/testrunner/scratchbox_runner.py
+++ b/testrunner/scratchbox_runner.py
@@ -14,14 +14,14 @@
def dry_run_scratchbox(args, cwd, out, timeout=None):
return dry_run(args_for_scratchbox(cwd, args), cwd, out, timeout)
-import runner
-# XXX hack hack hack
-dry_run = runner.dry_run
-run = runner.run
+if __name__ == '__main__':
+ import runner
+ # XXX hack hack hack
+ dry_run = runner.dry_run
+ run = runner.run
-runner.dry_run = dry_run_scratchbox
-runner.run = run_scratchbox
+ runner.dry_run = dry_run_scratchbox
+ runner.run = run_scratchbox
-if __name__ == '__main__':
import sys
runner.main(sys.argv)
diff --git a/testrunner/test/conftest.py b/testrunner/test/conftest.py
new file mode 100644
--- /dev/null
+++ b/testrunner/test/conftest.py
@@ -0,0 +1,6 @@
+
+def pytest_runtest_makereport(__multicall__, item):
+ report = __multicall__.execute()
+ if 'out' in item.funcargs:
+ report.sections.append(('out', item.funcargs['out'].read()))
+ return report
diff --git a/testrunner/test/test_runner.py b/testrunner/test/test_runner.py
--- a/testrunner/test/test_runner.py
+++ b/testrunner/test/test_runner.py
@@ -53,49 +53,44 @@
assert not should_report_failure("F Def\n. Ghi\n. Jkl\n")
+
class TestRunHelper(object):
+ def pytest_funcarg__out(self, request):
+ tmpdir = request.getfuncargvalue('tmpdir')
+ return tmpdir.ensure('out')
- def setup_method(self, meth):
- h, self.fn = tempfile.mkstemp()
- os.close(h)
+ def test_run(self, out):
+ res = runner.run([sys.executable, "-c", "print 42"], '.', out)
+ assert res == 0
+ assert out.read() == "42\n"
- def teardown_method(self, meth):
- os.unlink(self.fn)
-
- def test_run(self):
- res = runner.run([sys.executable, "-c", "print 42"], '.',
- py.path.local(self.fn))
- assert res == 0
- out = py.path.local(self.fn).read('r')
- assert out == "42\n"
-
- def test_error(self):
- res = runner.run([sys.executable, "-c", "import sys; sys.exit(3)"], '.', py.path.local(self.fn))
+ def test_error(self, out):
+ res = runner.run([sys.executable, "-c", "import sys; sys.exit(3)"], '.', out)
assert res == 3
- def test_signal(self):
+ def test_signal(self, out):
if sys.platform == 'win32':
py.test.skip("no death by signal on windows")
- res = runner.run([sys.executable, "-c", "import os; os.kill(os.getpid(), 9)"], '.', py.path.local(self.fn))
+ res = runner.run([sys.executable, "-c", "import os; os.kill(os.getpid(), 9)"], '.', out)
assert res == -9
- def test_timeout(self):
- res = runner.run([sys.executable, "-c", "while True: pass"], '.', py.path.local(self.fn), timeout=3)
+ def test_timeout(self, out):
+ res = runner.run([sys.executable, "-c", "while True: pass"], '.', out, timeout=3)
assert res == -999
- def test_timeout_lock(self):
- res = runner.run([sys.executable, "-c", "import threading; l=threading.Lock(); l.acquire(); l.acquire()"], '.', py.path.local(self.fn), timeout=3)
+ def test_timeout_lock(self, out):
+ res = runner.run([sys.executable, "-c", "import threading; l=threading.Lock(); l.acquire(); l.acquire()"], '.', out, timeout=3)
assert res == -999
- def test_timeout_syscall(self):
- res = runner.run([sys.executable, "-c", "import socket; s=s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM); s.bind(('', 0)); s.recv(1000)"], '.', py.path.local(self.fn), timeout=3)
+ def test_timeout_syscall(self, out):
+ res = runner.run([sys.executable, "-c", "import socket; s=s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM); s.bind(('', 0)); s.recv(1000)"], '.', out, timeout=3)
assert res == -999
- def test_timeout_success(self):
+ def test_timeout_success(self, out):
res = runner.run([sys.executable, "-c", "print 42"], '.',
- py.path.local(self.fn), timeout=2)
+ out, timeout=2)
assert res == 0
- out = py.path.local(self.fn).read('r')
+ out = out.read()
assert out == "42\n"
@@ -122,7 +117,10 @@
expected = ['INTERP', 'IARG',
'driver', 'darg',
+ '-p', 'resultlog',
'--resultlog=LOGFILE',
+ '--junitxml=LOGFILE.junit',
+
'test_one']
assert self.called == (expected, '/wd', 'out', 'secs')
@@ -138,9 +136,11 @@
expected = ['/wd' + os.sep + './INTERP', 'IARG',
'driver', 'darg',
+ '-p', 'resultlog',
'--resultlog=LOGFILE',
+ '--junitxml=LOGFILE.junit',
'test_one']
-
+ assert self.called[0] == expected
assert self.called == (expected, '/wd', 'out', 'secs')
assert res == 0
@@ -251,7 +251,7 @@
assert '\n' in log
log_lines = log.splitlines()
- assert log_lines[0] == ". test_normal/test_example.py:test_one"
+ assert ". test_normal/test_example.py::test_one" in log_lines
nfailures = 0
noutcomes = 0
for line in log_lines:
More information about the pypy-commit
mailing list