[pypy-commit] pypy fix-result-types: hg merge default
rlamy
noreply at buildbot.pypy.org
Sat May 9 19:16:04 CEST 2015
Author: Ronan Lamy <ronan.lamy at gmail.com>
Branch: fix-result-types
Changeset: r77255:50cf56895737
Date: 2015-05-09 18:16 +0100
http://bitbucket.org/pypy/pypy/changeset/50cf56895737/
Log: hg merge default
diff --git a/lib_pypy/_functools.py b/lib_pypy/_functools.py
--- a/lib_pypy/_functools.py
+++ b/lib_pypy/_functools.py
@@ -8,16 +8,16 @@
partial(func, *args, **keywords) - new function with partial application
of the given arguments and keywords.
"""
-
- def __init__(self, *args, **keywords):
- if not args:
- raise TypeError('__init__() takes at least 2 arguments (1 given)')
- func, args = args[0], args[1:]
+ def __init__(*args, **keywords):
+ if len(args) < 2:
+ raise TypeError('__init__() takes at least 2 arguments (%d given)'
+ % len(args))
+ self, func, args = args[0], args[1], args[2:]
if not callable(func):
raise TypeError("the first argument must be callable")
self._func = func
self._args = args
- self._keywords = keywords or None
+ self._keywords = keywords
def __delattr__(self, key):
if key == '__dict__':
@@ -37,19 +37,22 @@
return self._keywords
def __call__(self, *fargs, **fkeywords):
- if self.keywords is not None:
- fkeywords = dict(self.keywords, **fkeywords)
- return self.func(*(self.args + fargs), **fkeywords)
+ if self._keywords:
+ fkeywords = dict(self._keywords, **fkeywords)
+ return self._func(*(self._args + fargs), **fkeywords)
def __reduce__(self):
d = dict((k, v) for k, v in self.__dict__.iteritems() if k not in
('_func', '_args', '_keywords'))
if len(d) == 0:
d = None
- return (type(self), (self.func,),
- (self.func, self.args, self.keywords, d))
+ return (type(self), (self._func,),
+ (self._func, self._args, self._keywords, d))
def __setstate__(self, state):
- self._func, self._args, self._keywords, d = state
+ func, args, keywords, d = state
if d is not None:
self.__dict__.update(d)
+ self._func = func
+ self._args = args
+ self._keywords = keywords
diff --git a/lib_pypy/gdbm.py b/lib_pypy/gdbm.py
--- a/lib_pypy/gdbm.py
+++ b/lib_pypy/gdbm.py
@@ -1,4 +1,6 @@
import cffi, os, sys
+import thread
+_lock = thread.allocate_lock()
ffi = cffi.FFI()
ffi.cdef('''
@@ -40,6 +42,7 @@
try:
verify_code = '''
+ #include <stdlib.h>
#include "gdbm.h"
static datum pygdbm_fetch(GDBM_FILE gdbm_file, char *dptr, int dsize) {
@@ -86,101 +89,121 @@
return {'dptr': ffi.new("char[]", key), 'dsize': len(key)}
class gdbm(object):
- ll_dbm = None
+ __ll_dbm = None
+
+ # All public methods need to acquire the lock; all private methods
+ # assume the lock is already held. Thus public methods cannot call
+ # other public methods.
def __init__(self, filename, iflags, mode):
- res = lib.gdbm_open(filename, 0, iflags, mode, ffi.NULL)
- self.size = -1
- if not res:
- self._raise_from_errno()
- self.ll_dbm = res
+ with _lock:
+ res = lib.gdbm_open(filename, 0, iflags, mode, ffi.NULL)
+ self.__size = -1
+ if not res:
+ self.__raise_from_errno()
+ self.__ll_dbm = res
def close(self):
- if self.ll_dbm:
- lib.gdbm_close(self.ll_dbm)
- self.ll_dbm = None
+ with _lock:
+ if self.__ll_dbm:
+ lib.gdbm_close(self.__ll_dbm)
+ self.__ll_dbm = None
- def _raise_from_errno(self):
+ def __raise_from_errno(self):
if ffi.errno:
raise error(ffi.errno, os.strerror(ffi.errno))
raise error(lib.gdbm_errno, lib.gdbm_strerror(lib.gdbm_errno))
def __len__(self):
- if self.size < 0:
- self.size = len(self.keys())
- return self.size
+ with _lock:
+ if self.__size < 0:
+ self.__size = len(self.__keys())
+ return self.__size
def __setitem__(self, key, value):
- self._check_closed()
- self._size = -1
- r = lib.gdbm_store(self.ll_dbm, _fromstr(key), _fromstr(value),
- lib.GDBM_REPLACE)
- if r < 0:
- self._raise_from_errno()
+ with _lock:
+ self.__check_closed()
+ self.__size = -1
+ r = lib.gdbm_store(self.__ll_dbm, _fromstr(key), _fromstr(value),
+ lib.GDBM_REPLACE)
+ if r < 0:
+ self.__raise_from_errno()
def __delitem__(self, key):
- self._check_closed()
- res = lib.gdbm_delete(self.ll_dbm, _fromstr(key))
- if res < 0:
- raise KeyError(key)
+ with _lock:
+ self.__check_closed()
+ self.__size = -1
+ res = lib.gdbm_delete(self.__ll_dbm, _fromstr(key))
+ if res < 0:
+ raise KeyError(key)
def __contains__(self, key):
- self._check_closed()
- key = _checkstr(key)
- return lib.pygdbm_exists(self.ll_dbm, key, len(key))
+ with _lock:
+ self.__check_closed()
+ key = _checkstr(key)
+ return lib.pygdbm_exists(self.__ll_dbm, key, len(key))
has_key = __contains__
def __getitem__(self, key):
- self._check_closed()
- key = _checkstr(key)
- drec = lib.pygdbm_fetch(self.ll_dbm, key, len(key))
- if not drec.dptr:
- raise KeyError(key)
- res = str(ffi.buffer(drec.dptr, drec.dsize))
- lib.free(drec.dptr)
- return res
+ with _lock:
+ self.__check_closed()
+ key = _checkstr(key)
+ drec = lib.pygdbm_fetch(self.__ll_dbm, key, len(key))
+ if not drec.dptr:
+ raise KeyError(key)
+ res = str(ffi.buffer(drec.dptr, drec.dsize))
+ lib.free(drec.dptr)
+ return res
- def keys(self):
- self._check_closed()
+ def __keys(self):
+ self.__check_closed()
l = []
- key = lib.gdbm_firstkey(self.ll_dbm)
+ key = lib.gdbm_firstkey(self.__ll_dbm)
while key.dptr:
l.append(str(ffi.buffer(key.dptr, key.dsize)))
- nextkey = lib.gdbm_nextkey(self.ll_dbm, key)
+ nextkey = lib.gdbm_nextkey(self.__ll_dbm, key)
lib.free(key.dptr)
key = nextkey
return l
+ def keys(self):
+ with _lock:
+ return self.__keys()
+
def firstkey(self):
- self._check_closed()
- key = lib.gdbm_firstkey(self.ll_dbm)
- if key.dptr:
- res = str(ffi.buffer(key.dptr, key.dsize))
- lib.free(key.dptr)
- return res
+ with _lock:
+ self.__check_closed()
+ key = lib.gdbm_firstkey(self.__ll_dbm)
+ if key.dptr:
+ res = str(ffi.buffer(key.dptr, key.dsize))
+ lib.free(key.dptr)
+ return res
def nextkey(self, key):
- self._check_closed()
- key = lib.gdbm_nextkey(self.ll_dbm, _fromstr(key))
- if key.dptr:
- res = str(ffi.buffer(key.dptr, key.dsize))
- lib.free(key.dptr)
- return res
+ with _lock:
+ self.__check_closed()
+ key = lib.gdbm_nextkey(self.__ll_dbm, _fromstr(key))
+ if key.dptr:
+ res = str(ffi.buffer(key.dptr, key.dsize))
+ lib.free(key.dptr)
+ return res
def reorganize(self):
- self._check_closed()
- if lib.gdbm_reorganize(self.ll_dbm) < 0:
- self._raise_from_errno()
+ with _lock:
+ self.__check_closed()
+ if lib.gdbm_reorganize(self.__ll_dbm) < 0:
+ self.__raise_from_errno()
- def _check_closed(self):
- if not self.ll_dbm:
+ def __check_closed(self):
+ if not self.__ll_dbm:
raise error(0, "GDBM object has already been closed")
__del__ = close
def sync(self):
- self._check_closed()
- lib.gdbm_sync(self.ll_dbm)
+ with _lock:
+ self.__check_closed()
+ lib.gdbm_sync(self.__ll_dbm)
def open(filename, flags='r', mode=0666):
if flags[0] == 'r':
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -74,3 +74,10 @@
.. branch: jit_hint_docs
Add more detail to @jit.elidable and @jit.promote in rpython/rlib/jit.py
+
+.. branch: remove-frame-debug-attrs
+Remove the debug attributes from frames only used for tracing and replace
+them with a debug object that is created on-demand
+
+.. branch: can_cast
+Implement np.can_cast, np.min_scalar_type and missing dtype comparison operations.
diff --git a/pypy/interpreter/executioncontext.py b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -288,7 +288,6 @@
# field of all frames, during the loop below.)
frame = self.gettopframe_nohidden()
while frame:
- frame.getorcreatedebug().f_lineno = frame.get_last_lineno()
if is_being_profiled:
frame.getorcreatedebug().is_being_profiled = True
frame = self.getnextframe_nohidden(frame)
diff --git a/pypy/interpreter/generator.py b/pypy/interpreter/generator.py
--- a/pypy/interpreter/generator.py
+++ b/pypy/interpreter/generator.py
@@ -97,7 +97,7 @@
self.frame = None
raise
# if the frame is now marked as finished, it was RETURNed from
- if frame.frame_finished_execution():
+ if frame.frame_finished_execution:
self.frame = None
raise OperationError(space.w_StopIteration, space.w_None)
else:
@@ -149,7 +149,7 @@
raise OperationError(space.w_RuntimeError, space.wrap(msg))
def descr_gi_frame(self, space):
- if self.frame is not None and not self.frame.frame_finished_execution():
+ if self.frame is not None and not self.frame.frame_finished_execution:
return self.frame
else:
return space.w_None
@@ -193,7 +193,7 @@
raise
break
# if the frame is now marked as finished, it was RETURNed from
- if frame.frame_finished_execution():
+ if frame.frame_finished_execution:
break
results.append(w_result) # YIELDed
finally:
diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py
--- a/pypy/interpreter/pyframe.py
+++ b/pypy/interpreter/pyframe.py
@@ -34,6 +34,9 @@
is_being_profiled = False
w_locals = None
+ def __init__(self, pycode):
+ self.f_lineno = pycode.co_firstlineno
+
class PyFrame(W_Root):
"""Represents a frame for a regular Python function
that needs to be interpreted.
@@ -56,6 +59,7 @@
__metaclass__ = extendabletype
+ frame_finished_execution = False
last_instr = -1
last_exception = None
f_backref = jit.vref_None
@@ -105,7 +109,7 @@
def getorcreatedebug(self):
if self.debugdata is None:
- self.debugdata = FrameDebugData()
+ self.debugdata = FrameDebugData(self.pycode)
return self.debugdata
def get_w_f_trace(self):
@@ -126,9 +130,6 @@
return None
return d.w_locals
- def frame_finished_execution(self):
- return self.last_instr == -2
-
def __repr__(self):
# NOT_RPYTHON: useful in tracebacks
return "<%s.%s executing %s at line %s" % (
@@ -446,6 +447,7 @@
w_tb, #
self.w_globals,
w(self.last_instr),
+ w(self.frame_finished_execution),
w(f_lineno),
w_fastlocals,
space.w_None, #XXX placeholder for f_locals
@@ -465,9 +467,9 @@
from pypy.module._pickle_support import maker # helper fns
from pypy.interpreter.pycode import PyCode
from pypy.interpreter.module import Module
- args_w = space.unpackiterable(w_args, 17)
+ args_w = space.unpackiterable(w_args, 18)
w_f_back, w_builtin, w_pycode, w_valuestack, w_blockstack, w_exc_value, w_tb,\
- w_globals, w_last_instr, w_f_lineno, w_fastlocals, w_f_locals, \
+ w_globals, w_last_instr, w_finished, w_f_lineno, w_fastlocals, w_f_locals, \
w_f_trace, w_instr_lb, w_instr_ub, w_instr_prev_plus_one, w_cells = args_w
new_frame = self
@@ -512,6 +514,7 @@
w_exc_value, tb
)
new_frame.last_instr = space.int_w(w_last_instr)
+ new_frame.frame_finished_execution = space.is_true(w_finished)
d = new_frame.getorcreatedebug()
d.f_lineno = space.int_w(w_f_lineno)
fastlocals_w = maker.slp_from_tuple_with_nulls(space, w_fastlocals)
@@ -822,7 +825,7 @@
else:
d = self.getorcreatedebug()
d.w_f_trace = w_trace
- d = self.get_last_lineno()
+ d.f_lineno = self.get_last_lineno()
def fdel_f_trace(self, space):
self.getorcreatedebug().w_f_trace = None
diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py
--- a/pypy/interpreter/pyopcode.py
+++ b/pypy/interpreter/pyopcode.py
@@ -449,7 +449,7 @@
if (block.handling_mask & unroller_kind) != 0:
return block
block.cleanupstack(self)
- self.last_instr = -2 # makes frame_finished_execution return True
+ self.frame_finished_execution = True # for generators
return None
def unrollstack_and_jump(self, unroller):
@@ -1015,7 +1015,11 @@
if w_import is None:
raise OperationError(space.w_ImportError,
space.wrap("__import__ not found"))
- w_locals = self.getorcreatedebug().w_locals
+ d = self.getdebug()
+ if d is None:
+ w_locals = None
+ else:
+ w_locals = d.w_locals
if w_locals is None: # CPython does this
w_locals = space.w_None
w_modulename = space.wrap(modulename)
diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py
--- a/pypy/interpreter/typedef.py
+++ b/pypy/interpreter/typedef.py
@@ -536,7 +536,7 @@
__objclass__ = GetSetProperty(GetSetProperty.descr_get_objclass),
__doc__ = interp_attrproperty('doc', cls=GetSetProperty),
)
-GetSetProperty.typedef.acceptable_as_base_class = False
+assert not GetSetProperty.typedef.acceptable_as_base_class # no __new__
class Member(W_Root):
@@ -590,7 +590,7 @@
__name__ = interp_attrproperty('name', cls=Member),
__objclass__ = interp_attrproperty_w('w_cls', cls=Member),
)
-Member.typedef.acceptable_as_base_class = False
+assert not Member.typedef.acceptable_as_base_class # no __new__
# ____________________________________________________________
@@ -706,7 +706,7 @@
co_flags = GetSetProperty(fget_co_flags, cls=Code),
co_consts = GetSetProperty(fget_co_consts, cls=Code),
)
-Code.typedef.acceptable_as_base_class = False
+assert not Code.typedef.acceptable_as_base_class # no __new__
BuiltinCode.typedef = TypeDef('builtin-code',
__reduce__ = interp2app(BuiltinCode.descr__reduce__),
@@ -716,7 +716,7 @@
co_flags = GetSetProperty(fget_co_flags, cls=BuiltinCode),
co_consts = GetSetProperty(fget_co_consts, cls=BuiltinCode),
)
-BuiltinCode.typedef.acceptable_as_base_class = False
+assert not BuiltinCode.typedef.acceptable_as_base_class # no __new__
PyCode.typedef = TypeDef('code',
@@ -761,7 +761,7 @@
f_locals = GetSetProperty(PyFrame.fget_getdictscope),
f_globals = interp_attrproperty_w('w_globals', cls=PyFrame),
)
-PyFrame.typedef.acceptable_as_base_class = False
+assert not PyFrame.typedef.acceptable_as_base_class # no __new__
Module.typedef = TypeDef("module",
__new__ = interp2app(Module.descr_module__new__.im_func),
@@ -907,7 +907,7 @@
tb_lineno = GetSetProperty(PyTraceback.descr_tb_lineno),
tb_next = interp_attrproperty('next', cls=PyTraceback),
)
-PyTraceback.typedef.acceptable_as_base_class = False
+assert not PyTraceback.typedef.acceptable_as_base_class # no __new__
GeneratorIterator.typedef = TypeDef("generator",
__repr__ = interp2app(GeneratorIterator.descr__repr__),
@@ -929,7 +929,7 @@
__name__ = GetSetProperty(GeneratorIterator.descr__name__),
__weakref__ = make_weakref_descr(GeneratorIterator),
)
-GeneratorIterator.typedef.acceptable_as_base_class = False
+assert not GeneratorIterator.typedef.acceptable_as_base_class # no __new__
Cell.typedef = TypeDef("cell",
__cmp__ = interp2app(Cell.descr__cmp__),
@@ -939,17 +939,17 @@
__setstate__ = interp2app(Cell.descr__setstate__),
cell_contents= GetSetProperty(Cell.descr__cell_contents, cls=Cell),
)
-Cell.typedef.acceptable_as_base_class = False
+assert not Cell.typedef.acceptable_as_base_class # no __new__
Ellipsis.typedef = TypeDef("Ellipsis",
__repr__ = interp2app(Ellipsis.descr__repr__),
)
-Ellipsis.typedef.acceptable_as_base_class = False
+assert not Ellipsis.typedef.acceptable_as_base_class # no __new__
NotImplemented.typedef = TypeDef("NotImplemented",
__repr__ = interp2app(NotImplemented.descr__repr__),
)
-NotImplemented.typedef.acceptable_as_base_class = False
+assert not NotImplemented.typedef.acceptable_as_base_class # no __new__
SuspendedUnroller.typedef = TypeDef("SuspendedUnroller")
-SuspendedUnroller.typedef.acceptable_as_base_class = False
+assert not SuspendedUnroller.typedef.acceptable_as_base_class # no __new__
diff --git a/pypy/module/_cffi_backend/libraryobj.py b/pypy/module/_cffi_backend/libraryobj.py
--- a/pypy/module/_cffi_backend/libraryobj.py
+++ b/pypy/module/_cffi_backend/libraryobj.py
@@ -91,7 +91,7 @@
read_variable = interp2app(W_Library.read_variable),
write_variable = interp2app(W_Library.write_variable),
)
-W_Library.acceptable_as_base_class = False
+W_Library.typedef.acceptable_as_base_class = False
@unwrap_spec(filename="str_or_None", flags=int)
diff --git a/pypy/module/_hashlib/interp_hashlib.py b/pypy/module/_hashlib/interp_hashlib.py
--- a/pypy/module/_hashlib/interp_hashlib.py
+++ b/pypy/module/_hashlib/interp_hashlib.py
@@ -156,7 +156,7 @@
block_size=GetSetProperty(W_Hash.get_block_size),
name=GetSetProperty(W_Hash.get_name),
)
-W_Hash.acceptable_as_base_class = False
+W_Hash.typedef.acceptable_as_base_class = False
@unwrap_spec(name=str, string='bufferstr')
def new(space, name, string=''):
diff --git a/pypy/module/pypyjit/interp_resop.py b/pypy/module/pypyjit/interp_resop.py
--- a/pypy/module/pypyjit/interp_resop.py
+++ b/pypy/module/pypyjit/interp_resop.py
@@ -245,7 +245,7 @@
WrappedOp.descr_setresult),
offset = interp_attrproperty("offset", cls=WrappedOp),
)
-WrappedOp.acceptable_as_base_class = False
+WrappedOp.typedef.acceptable_as_base_class = False
DebugMergePoint.typedef = TypeDef(
'DebugMergePoint', WrappedOp.typedef,
@@ -266,7 +266,7 @@
doc="Name of the jitdriver 'pypyjit' in the case "
"of the main interpreter loop"),
)
-DebugMergePoint.acceptable_as_base_class = False
+DebugMergePoint.typedef.acceptable_as_base_class = False
class W_JitLoopInfo(W_Root):
@@ -359,7 +359,7 @@
doc="Length of machine code"),
__repr__ = interp2app(W_JitLoopInfo.descr_repr),
)
-W_JitLoopInfo.acceptable_as_base_class = False
+W_JitLoopInfo.typedef.acceptable_as_base_class = False
class W_JitInfoSnapshot(W_Root):
@@ -379,7 +379,7 @@
cls=W_JitInfoSnapshot,
doc="various JIT timers")
)
-W_JitInfoSnapshot.acceptable_as_base_class = False
+W_JitInfoSnapshot.typedef.acceptable_as_base_class = False
def get_stats_snapshot(space):
""" Get the jit status in the specific moment in time. Note that this
diff --git a/pypy/module/test_lib_pypy/test_functools.py b/pypy/module/test_lib_pypy/test_functools.py
--- a/pypy/module/test_lib_pypy/test_functools.py
+++ b/pypy/module/test_lib_pypy/test_functools.py
@@ -6,8 +6,10 @@
def test_partial_reduce():
partial = _functools.partial(test_partial_reduce)
state = partial.__reduce__()
+ d = state[2][2]
assert state == (type(partial), (test_partial_reduce,),
- (test_partial_reduce, (), None, None))
+ (test_partial_reduce, (), d, None))
+ assert d is None or d == {} # both are acceptable
def test_partial_setstate():
partial = _functools.partial(object)
@@ -30,3 +32,15 @@
assert str(exc.value) == "a partial object's dictionary may not be deleted"
with pytest.raises(AttributeError):
del partial.zzz
+
+def test_self_keyword():
+ partial = _functools.partial(dict, self=42)
+ assert partial(other=43) == {'self': 42, 'other': 43}
+
+def test_no_keywords():
+ kw1 = _functools.partial(dict).keywords
+ kw2 = _functools.partial(dict, **{}).keywords
+ # CPython gives different results for these two cases, which is not
+ # possible to emulate in pure Python; see issue #2043
+ assert kw1 == {} or kw1 is None
+ assert kw2 == {}
diff --git a/pypy/module/test_lib_pypy/test_gdbm_extra.py b/pypy/module/test_lib_pypy/test_gdbm_extra.py
new file mode 100644
--- /dev/null
+++ b/pypy/module/test_lib_pypy/test_gdbm_extra.py
@@ -0,0 +1,17 @@
+from __future__ import absolute_import
+import py
+from rpython.tool.udir import udir
+try:
+ from lib_pypy import gdbm
+except ImportError, e:
+ py.test.skip(e)
+
+def test_len():
+ path = str(udir.join('test_gdbm_extra'))
+ g = gdbm.open(path, 'c')
+ g['abc'] = 'def'
+ assert len(g) == 1
+ g['bcd'] = 'efg'
+ assert len(g) == 2
+ del g['abc']
+ assert len(g) == 1
diff --git a/rpython/jit/metainterp/blackhole.py b/rpython/jit/metainterp/blackhole.py
--- a/rpython/jit/metainterp/blackhole.py
+++ b/rpython/jit/metainterp/blackhole.py
@@ -1225,32 +1225,39 @@
@arguments("cpu", "r", "i", "d", "d", returns="i")
def bhimpl_getarrayitem_vable_i(cpu, vable, index, fielddescr, arraydescr):
+ fielddescr.get_vinfo().clear_vable_token(vable)
array = cpu.bh_getfield_gc_r(vable, fielddescr)
return cpu.bh_getarrayitem_gc_i(array, index, arraydescr)
@arguments("cpu", "r", "i", "d", "d", returns="r")
def bhimpl_getarrayitem_vable_r(cpu, vable, index, fielddescr, arraydescr):
+ fielddescr.get_vinfo().clear_vable_token(vable)
array = cpu.bh_getfield_gc_r(vable, fielddescr)
return cpu.bh_getarrayitem_gc_r(array, index, arraydescr)
@arguments("cpu", "r", "i", "d", "d", returns="f")
def bhimpl_getarrayitem_vable_f(cpu, vable, index, fielddescr, arraydescr):
+ fielddescr.get_vinfo().clear_vable_token(vable)
array = cpu.bh_getfield_gc_r(vable, fielddescr)
return cpu.bh_getarrayitem_gc_f(array, index, arraydescr)
@arguments("cpu", "r", "i", "i", "d", "d")
def bhimpl_setarrayitem_vable_i(cpu, vable, index, newval, fdescr, adescr):
+ fdescr.get_vinfo().clear_vable_token(vable)
array = cpu.bh_getfield_gc_r(vable, fdescr)
cpu.bh_setarrayitem_gc_i(array, index, newval, adescr)
@arguments("cpu", "r", "i", "r", "d", "d")
def bhimpl_setarrayitem_vable_r(cpu, vable, index, newval, fdescr, adescr):
+ fdescr.get_vinfo().clear_vable_token(vable)
array = cpu.bh_getfield_gc_r(vable, fdescr)
cpu.bh_setarrayitem_gc_r(array, index, newval, adescr)
@arguments("cpu", "r", "i", "f", "d", "d")
def bhimpl_setarrayitem_vable_f(cpu, vable, index, newval, fdescr, adescr):
+ fdescr.get_vinfo().clear_vable_token(vable)
array = cpu.bh_getfield_gc_r(vable, fdescr)
cpu.bh_setarrayitem_gc_f(array, index, newval, adescr)
@arguments("cpu", "r", "d", "d", returns="i")
def bhimpl_arraylen_vable(cpu, vable, fdescr, adescr):
+ fdescr.get_vinfo().clear_vable_token(vable)
array = cpu.bh_getfield_gc_r(vable, fdescr)
return cpu.bh_arraylen_gc(array, adescr)
@@ -1288,9 +1295,20 @@
bhimpl_getfield_gc_r_pure = bhimpl_getfield_gc_r
bhimpl_getfield_gc_f_pure = bhimpl_getfield_gc_f
- bhimpl_getfield_vable_i = bhimpl_getfield_gc_i
- bhimpl_getfield_vable_r = bhimpl_getfield_gc_r
- bhimpl_getfield_vable_f = bhimpl_getfield_gc_f
+ @arguments("cpu", "r", "d", returns="i")
+ def bhimpl_getfield_vable_i(cpu, struct, fielddescr):
+ fielddescr.get_vinfo().clear_vable_token(struct)
+ return cpu.bh_getfield_gc_i(struct, fielddescr)
+
+ @arguments("cpu", "r", "d", returns="r")
+ def bhimpl_getfield_vable_r(cpu, struct, fielddescr):
+ fielddescr.get_vinfo().clear_vable_token(struct)
+ return cpu.bh_getfield_gc_r(struct, fielddescr)
+
+ @arguments("cpu", "r", "d", returns="f")
+ def bhimpl_getfield_vable_f(cpu, struct, fielddescr):
+ fielddescr.get_vinfo().clear_vable_token(struct)
+ return cpu.bh_getfield_gc_f(struct, fielddescr)
bhimpl_getfield_gc_i_greenfield = bhimpl_getfield_gc_i
bhimpl_getfield_gc_r_greenfield = bhimpl_getfield_gc_r
@@ -1321,9 +1339,18 @@
def bhimpl_setfield_gc_f(cpu, struct, newvalue, fielddescr):
cpu.bh_setfield_gc_f(struct, newvalue, fielddescr)
- bhimpl_setfield_vable_i = bhimpl_setfield_gc_i
- bhimpl_setfield_vable_r = bhimpl_setfield_gc_r
- bhimpl_setfield_vable_f = bhimpl_setfield_gc_f
+ @arguments("cpu", "r", "i", "d")
+ def bhimpl_setfield_vable_i(cpu, struct, newvalue, fielddescr):
+ fielddescr.get_vinfo().clear_vable_token(struct)
+ cpu.bh_setfield_gc_i(struct, newvalue, fielddescr)
+ @arguments("cpu", "r", "r", "d")
+ def bhimpl_setfield_vable_r(cpu, struct, newvalue, fielddescr):
+ fielddescr.get_vinfo().clear_vable_token(struct)
+ cpu.bh_setfield_gc_r(struct, newvalue, fielddescr)
+ @arguments("cpu", "r", "f", "d")
+ def bhimpl_setfield_vable_f(cpu, struct, newvalue, fielddescr):
+ fielddescr.get_vinfo().clear_vable_token(struct)
+ cpu.bh_setfield_gc_f(struct, newvalue, fielddescr)
@arguments("cpu", "i", "i", "d")
def bhimpl_setfield_raw_i(cpu, struct, newvalue, fielddescr):
diff --git a/rpython/jit/metainterp/test/test_virtualizable.py b/rpython/jit/metainterp/test/test_virtualizable.py
--- a/rpython/jit/metainterp/test/test_virtualizable.py
+++ b/rpython/jit/metainterp/test/test_virtualizable.py
@@ -1701,6 +1701,78 @@
res = self.meta_interp(f, [], listops=True)
assert res == 0
+ def test_tracing_sees_nonstandard_vable_twice(self):
+ # This test might fall we try to remove heapcache.clear_caches()'s
+ # call to reset_keep_likely_virtuals() for CALL_MAY_FORCE, and doing
+ # so, we forget to clean up the "nonstandard_virtualizable" fields.
+
+ class A:
+ _virtualizable_ = ['x']
+ @dont_look_inside
+ def __init__(self, x):
+ self.x = x
+ def check(self, expected_x):
+ if self.x != expected_x:
+ raise ValueError
+
+ driver1 = JitDriver(greens=[], reds=['a'], virtualizables=['a'])
+ driver2 = JitDriver(greens=[], reds=['i'])
+
+ def f(a):
+ while a.x > 0:
+ driver1.jit_merge_point(a=a)
+ a.x -= 1
+
+ def main():
+ i = 10
+ while i > 0:
+ driver2.jit_merge_point(i=i)
+ a = A(10)
+ a.check(10) # first time, 'a' has got no vable_token
+ f(a)
+ a.check(0) # second time, the same 'a' has got one!
+ i -= 1
+ return 42
+
+ res = self.meta_interp(main, [], listops=True)
+ assert res == 42
+
+ def test_blackhole_should_also_force_virtualizables(self):
+ class A:
+ _virtualizable_ = ['x']
+ def __init__(self, x):
+ self.x = x
+
+ driver1 = JitDriver(greens=[], reds=['a'], virtualizables=['a'])
+ driver2 = JitDriver(greens=[], reds=['i'])
+
+ def f(a):
+ while a.x > 0:
+ driver1.jit_merge_point(a=a)
+ a.x -= 1
+
+ def main():
+ i = 10
+ while i > 0:
+ driver2.jit_merge_point(i=i)
+ a = A(10)
+ f(a)
+ # The interesting case is i==2. We're running the rest of
+ # this function in the blackhole interp, because of this:
+ if i == 2:
+ pass
+ # Here, 'a' has got a non-null vtable_token because f()
+ # is already completely JITted. But the blackhole interp
+ # ignores it and reads the bogus value currently physically
+ # stored in a.x...
+ if a.x != 0:
+ raise ValueError
+ i -= 1
+ return 42
+
+ res = self.meta_interp(main, [], listops=True, repeat=7)
+ assert res == 42
+
class TestLLtype(ExplicitVirtualizableTests,
ImplicitVirtualizableTests,
More information about the pypy-commit
mailing list