[pypy-svn] r70389 - in pypy/branch/virtual-forcing/pypy/jit/metainterp: . test
arigo at codespeak.net
arigo at codespeak.net
Sun Jan 3 17:47:15 CET 2010
Author: arigo
Date: Sun Jan 3 17:47:14 2010
New Revision: 70389
Modified:
pypy/branch/virtual-forcing/pypy/jit/metainterp/codewriter.py
pypy/branch/virtual-forcing/pypy/jit/metainterp/compile.py
pypy/branch/virtual-forcing/pypy/jit/metainterp/optimizeopt.py
pypy/branch/virtual-forcing/pypy/jit/metainterp/pyjitpl.py
pypy/branch/virtual-forcing/pypy/jit/metainterp/test/test_basic.py
pypy/branch/virtual-forcing/pypy/jit/metainterp/test/test_virtualref.py
pypy/branch/virtual-forcing/pypy/jit/metainterp/virtualref.py
pypy/branch/virtual-forcing/pypy/jit/metainterp/warmspot.py
Log:
Rewrite virtualref.py to have everything in a class instead
of globally, like virtualizable.py. Fixes an issue with
running many tests in the same process: the JIT_VIRTUAL_REF
was a global GcStruct that was transformed by ll2ctypes and
later that was passed to the C backend.
Modified: pypy/branch/virtual-forcing/pypy/jit/metainterp/codewriter.py
==============================================================================
--- pypy/branch/virtual-forcing/pypy/jit/metainterp/codewriter.py (original)
+++ pypy/branch/virtual-forcing/pypy/jit/metainterp/codewriter.py Sun Jan 3 17:47:14 2010
@@ -1308,10 +1308,9 @@
self.emit(self.var_position(args[0]))
self.register_var(op.result)
#
- from pypy.jit.metainterp.virtualref import jit_virtual_ref_vtable
- from pypy.jit.metainterp.virtualref import JIT_VIRTUAL_REF
- self.codewriter.register_known_gctype(jit_virtual_ref_vtable,
- JIT_VIRTUAL_REF)
+ vrefinfo = self.codewriter.metainterp_sd.virtualref_info
+ self.codewriter.register_known_gctype(vrefinfo.jit_virtual_ref_vtable,
+ vrefinfo.JIT_VIRTUAL_REF)
def _array_of_voids(self, ARRAY):
if isinstance(ARRAY, ootype.Array):
Modified: pypy/branch/virtual-forcing/pypy/jit/metainterp/compile.py
==============================================================================
--- pypy/branch/virtual-forcing/pypy/jit/metainterp/compile.py (original)
+++ pypy/branch/virtual-forcing/pypy/jit/metainterp/compile.py Sun Jan 3 17:47:14 2010
@@ -256,7 +256,6 @@
def handle_async_forcing(self, force_token):
from pypy.jit.metainterp.pyjitpl import MetaInterp
from pypy.jit.metainterp.resume import force_from_resumedata
- from pypy.jit.metainterp.virtualref import forced_single_vref
# To handle the forcing itself, we create a temporary MetaInterp
# as a convenience to move the various data to its proper place.
metainterp_sd = self.metainterp_sd
@@ -270,10 +269,12 @@
virtualizable_boxes, virtualref_boxes, all_virtuals = forced_data
#
# Handle virtualref_boxes: mark each JIT_VIRTUAL_REF as forced
+ vrefinfo = metainterp_sd.virtualref_info
for i in range(0, len(virtualref_boxes), 2):
virtualbox = virtualref_boxes[i]
vrefbox = virtualref_boxes[i+1]
- forced_single_vref(vrefbox.getref_base(), virtualbox.getref_base())
+ vrefinfo.forced_single_vref(vrefbox.getref_base(),
+ virtualbox.getref_base())
# Handle virtualizable_boxes: store them on the real virtualizable now
if expect_virtualizable:
metainterp_sd.virtualizable_info.forced_vable(virtualizable_boxes)
Modified: pypy/branch/virtual-forcing/pypy/jit/metainterp/optimizeopt.py
==============================================================================
--- pypy/branch/virtual-forcing/pypy/jit/metainterp/optimizeopt.py (original)
+++ pypy/branch/virtual-forcing/pypy/jit/metainterp/optimizeopt.py Sun Jan 3 17:47:14 2010
@@ -740,12 +740,11 @@
def optimize_VIRTUAL_REF(self, op):
indexbox = op.args[1]
#
- # get some constants (these calls are all 'memo')
- from pypy.jit.metainterp import virtualref
- cpu = self.cpu
- c_cls = virtualref.get_jit_virtual_ref_const_class(cpu)
- descr_virtual_token = virtualref.get_descr_virtual_token(cpu)
- descr_virtualref_index = virtualref.get_descr_virtualref_index(cpu)
+ # get some constants
+ vrefinfo = self.metainterp_sd.virtualref_info
+ c_cls = vrefinfo.jit_virtual_ref_const_class
+ descr_virtual_token = vrefinfo.descr_virtual_token
+ descr_virtualref_index = vrefinfo.descr_virtualref_index
#
# Replace the VIRTUAL_REF operation with a virtual structure of type
# 'jit_virtual_ref'. The jit_virtual_ref structure may be forced soon,
@@ -764,15 +763,15 @@
# opposed to much earlier. This is important because the object is
# typically a PyPy PyFrame, and now is the end of its execution, so
# forcing it now does not have catastrophic effects.
- from pypy.jit.metainterp import virtualref
+ vrefinfo = self.metainterp_sd.virtualref_info
# - set 'forced' to point to the real object
op1 = ResOperation(rop.SETFIELD_GC, op.args, None,
- descr = virtualref.get_descr_forced(self.cpu))
+ descr = vrefinfo.descr_forced)
self.optimize_SETFIELD_GC(op1)
# - set 'virtual_token' to TOKEN_NONE
args = [op.args[0], ConstInt(0)]
op1 = ResOperation(rop.SETFIELD_GC, args, None,
- descr = virtualref.get_descr_virtual_token(self.cpu))
+ descr = vrefinfo.descr_virtual_token)
self.optimize_SETFIELD_GC(op1)
# Note that in some cases the virtual in op.args[1] has been forced
# already. This is fine. In that case, and *if* a residual
Modified: pypy/branch/virtual-forcing/pypy/jit/metainterp/pyjitpl.py
==============================================================================
--- pypy/branch/virtual-forcing/pypy/jit/metainterp/pyjitpl.py (original)
+++ pypy/branch/virtual-forcing/pypy/jit/metainterp/pyjitpl.py Sun Jan 3 17:47:14 2010
@@ -5,7 +5,7 @@
from pypy.rlib.unroll import unrolling_iterable
from pypy.rlib.debug import debug_start, debug_stop, debug_print
-from pypy.jit.metainterp import history, compile, resume, virtualref
+from pypy.jit.metainterp import history, compile, resume
from pypy.jit.metainterp.history import Const, ConstInt, Box
from pypy.jit.metainterp.resoperation import rop
from pypy.jit.metainterp import codewriter, executor
@@ -903,8 +903,9 @@
if metainterp.is_blackholing():
resbox = box # good enough when blackholing
else:
+ vrefinfo = metainterp.staticdata.virtualref_info
obj = box.getref_base()
- vref = virtualref.virtual_ref_during_tracing(obj)
+ vref = vrefinfo.virtual_ref_during_tracing(obj)
resbox = history.BoxPtr(vref)
cindex = history.ConstInt(len(metainterp.virtualref_boxes) // 2)
metainterp.history.record(rop.VIRTUAL_REF, [box, cindex], resbox)
@@ -928,8 +929,9 @@
lastbox = metainterp.virtualref_boxes.pop()
assert box.getref_base() == lastbox.getref_base()
if not metainterp.is_blackholing():
+ vrefinfo = metainterp.staticdata.virtualref_info
vref = vrefbox.getref_base()
- if virtualref.is_virtual_ref(vref):
+ if vrefinfo.is_virtual_ref(vref):
metainterp.history.record(rop.VIRTUAL_REF_FINISH,
[vrefbox, lastbox], None)
@@ -1775,10 +1777,11 @@
if self.is_blackholing():
return
#
+ vrefinfo = self.staticdata.virtualref_info
for i in range(1, len(self.virtualref_boxes), 2):
vrefbox = self.virtualref_boxes[i]
vref = vrefbox.getref_base()
- virtualref.tracing_before_residual_call(vref)
+ vrefinfo.tracing_before_residual_call(vref)
# the FORCE_TOKEN is already set at runtime in each vref when
# it is created, by optimizeopt.py.
#
@@ -1800,11 +1803,12 @@
else:
escapes = False
#
+ vrefinfo = self.staticdata.virtualref_info
for i in range(0, len(self.virtualref_boxes), 2):
virtualbox = self.virtualref_boxes[i]
vrefbox = self.virtualref_boxes[i+1]
vref = vrefbox.getref_base()
- if virtualref.tracing_after_residual_call(vref):
+ if vrefinfo.tracing_after_residual_call(vref):
# this vref was really a virtual_ref, but it escaped
# during this CALL_MAY_FORCE. Mark this fact by
# generating a VIRTUAL_REF_FINISH on it and replacing
@@ -1874,11 +1878,12 @@
#
# virtual refs: make the vrefs point to the freshly allocated virtuals
self.virtualref_boxes = virtualref_boxes
+ vrefinfo = self.staticdata.virtualref_info
for i in range(0, len(virtualref_boxes), 2):
virtualbox = virtualref_boxes[i]
vrefbox = virtualref_boxes[i+1]
- virtualref.continue_tracing(vrefbox.getref_base(),
- virtualbox.getref_base())
+ vrefinfo.continue_tracing(vrefbox.getref_base(),
+ virtualbox.getref_base())
#
# virtualizable: synchronize the real virtualizable and the local
# boxes, in whichever direction is appropriate
Modified: pypy/branch/virtual-forcing/pypy/jit/metainterp/test/test_basic.py
==============================================================================
--- pypy/branch/virtual-forcing/pypy/jit/metainterp/test/test_basic.py (original)
+++ pypy/branch/virtual-forcing/pypy/jit/metainterp/test/test_basic.py Sun Jan 3 17:47:14 2010
@@ -86,6 +86,9 @@
metainterp, rtyper = _get_bare_metainterp(f, args, self.CPUClass,
self.type_system,
**kwds)
+ metainterp.staticdata.state = FakeWarmRunnerState()
+ metainterp.staticdata.state.cpu = metainterp.staticdata.cpu
+ self.finish_metainterp_for_interp_operations(metainterp)
portal_graph = rtyper.annotator.translator.graphs[0]
cw = codewriter.CodeWriter(rtyper)
@@ -97,7 +100,6 @@
cw.finish_making_bytecodes()
metainterp.staticdata.portal_code = maingraph
metainterp.staticdata._class_sizes = cw.class_sizes
- metainterp.staticdata.state = FakeWarmRunnerState()
metainterp.staticdata.DoneWithThisFrameInt = DoneWithThisFrame
metainterp.staticdata.DoneWithThisFrameRef = DoneWithThisFrameRef
metainterp.staticdata.DoneWithThisFrameFloat = DoneWithThisFrame
@@ -111,6 +113,9 @@
else:
raise Exception("FAILED")
+ def finish_metainterp_for_interp_operations(self, metainterp):
+ pass
+
def check_history(self, expected=None, **isns):
# this can be used after calling meta_interp
get_stats().check_history(expected, **isns)
Modified: pypy/branch/virtual-forcing/pypy/jit/metainterp/test/test_virtualref.py
==============================================================================
--- pypy/branch/virtual-forcing/pypy/jit/metainterp/test/test_virtualref.py (original)
+++ pypy/branch/virtual-forcing/pypy/jit/metainterp/test/test_virtualref.py Sun Jan 3 17:47:14 2010
@@ -5,13 +5,17 @@
from pypy.rlib.objectmodel import compute_unique_id
from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin
from pypy.jit.metainterp.resoperation import rop
-from pypy.jit.metainterp.virtualref import JIT_VIRTUAL_REF
+from pypy.jit.metainterp.virtualref import VirtualRefInfo
debug_print = lloperation.llop.debug_print
class VRefTests:
+ def finish_metainterp_for_interp_operations(self, metainterp):
+ self.vrefinfo = VirtualRefInfo(metainterp.staticdata.state)
+ metainterp.staticdata.virtualref_info = self.vrefinfo
+
def test_make_vref_simple(self):
class X:
pass
@@ -73,6 +77,7 @@
bxs2 = [box for box in guard_op.fail_args
if str(box._getrepr_()).endswith('JitVirtualRef')]
assert len(bxs2) == 1
+ JIT_VIRTUAL_REF = self.vrefinfo.JIT_VIRTUAL_REF
bxs2[0].getref(lltype.Ptr(JIT_VIRTUAL_REF)).virtual_token = 1234567
#
self.metainterp.rebuild_state_after_failure(guard_op.descr,
Modified: pypy/branch/virtual-forcing/pypy/jit/metainterp/virtualref.py
==============================================================================
--- pypy/branch/virtual-forcing/pypy/jit/metainterp/virtualref.py (original)
+++ pypy/branch/virtual-forcing/pypy/jit/metainterp/virtualref.py Sun Jan 3 17:47:14 2010
@@ -1,157 +1,155 @@
from pypy.rpython.rmodel import inputconst, log
from pypy.rpython.lltypesystem import lltype, llmemory, rffi, rclass
-from pypy.rlib.objectmodel import specialize
from pypy.jit.metainterp import history
-def replace_force_virtual_with_call(warmrunnerdesc, graphs):
- # similar to rvirtualizable2.replace_force_virtualizable_with_call().
- c_funcptr = None
- count = 0
- for graph in graphs:
- for block in graph.iterblocks():
- for op in block.operations:
- if op.opname == 'jit_force_virtual':
- # first compute c_funcptr, but only if there is any
- # 'jit_force_virtual' around
- if c_funcptr is None:
- c_funcptr = get_force_virtual_fnptr(warmrunnerdesc)
- #
- op.opname = 'direct_call'
- op.args = [c_funcptr, op.args[0]]
- count += 1
- if c_funcptr is not None:
- log("replaced %d 'jit_force_virtual' with %r" % (count,
- c_funcptr.value))
- #
- # record the type JIT_VIRTUAL_REF explicitly in the rtyper, too
- warmrunnerdesc.rtyper.set_type_for_typeptr(jit_virtual_ref_vtable,
- JIT_VIRTUAL_REF)
-
-# ____________________________________________________________
-
-
-# we make the low-level type of an RPython class directly
-JIT_VIRTUAL_REF = lltype.GcStruct('JitVirtualRef',
- ('super', rclass.OBJECT),
- ('virtual_token', lltype.Signed),
- ('virtualref_index', lltype.Signed),
- ('forced', rclass.OBJECTPTR))
-jit_virtual_ref_vtable = lltype.malloc(rclass.OBJECT_VTABLE, zero=True,
- flavor='raw')
-jit_virtual_ref_vtable.name = rclass.alloc_array_name('jit_virtual_ref')
-
-# The 'virtual_token' field has the same meaning as the 'vable_token' field
-# of a virtualizable. It is equal to:
-# * 0 (TOKEN_NONE) when tracing, except as described below;
-# * -1 (TOKEN_TRACING_RESCALL) during tracing when we do a residual call;
-# * addr in the CPU stack (set by FORCE_TOKEN) when running the assembler;
-# * 0 (TOKEN_NONE) after the virtual is forced, if it is forced at all.
-TOKEN_NONE = 0
-TOKEN_TRACING_RESCALL = -1
-
- at specialize.memo()
-def get_jit_virtual_ref_const_class(cpu):
- adr = llmemory.cast_ptr_to_adr(jit_virtual_ref_vtable)
- return history.ConstAddr(adr, cpu)
-
- at specialize.memo()
-def get_descr_virtual_token(cpu):
- return cpu.fielddescrof(JIT_VIRTUAL_REF, 'virtual_token')
-
- at specialize.memo()
-def get_descr_virtualref_index(cpu):
- return cpu.fielddescrof(JIT_VIRTUAL_REF, 'virtualref_index')
-
- at specialize.memo()
-def get_descr_forced(cpu):
- return cpu.fielddescrof(JIT_VIRTUAL_REF, 'forced')
-
-def virtual_ref_during_tracing(real_object):
- assert real_object
- vref = lltype.malloc(JIT_VIRTUAL_REF)
- p = lltype.cast_pointer(rclass.OBJECTPTR, vref)
- p.typeptr = jit_virtual_ref_vtable
- vref.virtual_token = TOKEN_NONE
- vref.forced = lltype.cast_opaque_ptr(rclass.OBJECTPTR, real_object)
- return lltype.cast_opaque_ptr(llmemory.GCREF, vref)
-
-def is_virtual_ref(gcref):
- if not gcref:
- return False
- inst = lltype.cast_opaque_ptr(rclass.OBJECTPTR, gcref)
- return inst.typeptr == jit_virtual_ref_vtable
-
-def tracing_before_residual_call(gcref):
- if not is_virtual_ref(gcref):
- return
- vref = lltype.cast_opaque_ptr(lltype.Ptr(JIT_VIRTUAL_REF), gcref)
- assert not vref.virtual_token
- vref.virtual_token = TOKEN_TRACING_RESCALL
-
-def tracing_after_residual_call(gcref):
- if not is_virtual_ref(gcref):
- return False
- vref = lltype.cast_opaque_ptr(lltype.Ptr(JIT_VIRTUAL_REF), gcref)
- if vref.virtual_token:
- # not modified by the residual call; assert that it is still
- # set to TOKEN_TRACING_RESCALL and clear it.
- assert vref.virtual_token == TOKEN_TRACING_RESCALL
- vref.virtual_token = TOKEN_NONE
- return False
- else:
- # marker "modified during residual call" set.
+class VirtualRefInfo:
+
+ def __init__(self, warmrunnerdesc):
+ self.warmrunnerdesc = warmrunnerdesc
+ self.cpu = warmrunnerdesc.cpu
+ # we make the low-level type of an RPython class directly
+ self.JIT_VIRTUAL_REF = lltype.GcStruct('JitVirtualRef',
+ ('super', rclass.OBJECT),
+ ('virtual_token', lltype.Signed),
+ ('virtualref_index', lltype.Signed),
+ ('forced', rclass.OBJECTPTR))
+ self.jit_virtual_ref_vtable = lltype.malloc(rclass.OBJECT_VTABLE,
+ zero=True, flavor='raw')
+ self.jit_virtual_ref_vtable.name = rclass.alloc_array_name(
+ 'jit_virtual_ref')
+ # build some constants
+ adr = llmemory.cast_ptr_to_adr(self.jit_virtual_ref_vtable)
+ self.jit_virtual_ref_const_class = history.ConstAddr(adr, self.cpu)
+ fielddescrof = self.cpu.fielddescrof
+ self.descr_virtual_token = fielddescrof(self.JIT_VIRTUAL_REF,
+ 'virtual_token')
+ self.descr_virtualref_index = fielddescrof(self.JIT_VIRTUAL_REF,
+ 'virtualref_index')
+ self.descr_forced = fielddescrof(self.JIT_VIRTUAL_REF, 'forced')
+
+ def _freeze_(self):
return True
-def forced_single_vref(gcref, real_object):
- if not is_virtual_ref(gcref):
- return
- assert real_object
- vref = lltype.cast_opaque_ptr(lltype.Ptr(JIT_VIRTUAL_REF), gcref)
- assert (vref.virtual_token != TOKEN_NONE and
- vref.virtual_token != TOKEN_TRACING_RESCALL)
- vref.virtual_token = TOKEN_NONE
- vref.forced = lltype.cast_opaque_ptr(rclass.OBJECTPTR, real_object)
-
-def continue_tracing(gcref, real_object):
- if not is_virtual_ref(gcref):
- return
- vref = lltype.cast_opaque_ptr(lltype.Ptr(JIT_VIRTUAL_REF), gcref)
- assert vref.virtual_token != TOKEN_TRACING_RESCALL
- vref.virtual_token = TOKEN_NONE
- vref.forced = lltype.cast_opaque_ptr(rclass.OBJECTPTR, real_object)
-
-# ____________________________________________________________
-
-def get_force_virtual_fnptr(warmrunnerdesc):
- cpu = warmrunnerdesc.cpu
- #
- def force_virtual_if_necessary(inst):
- if not inst or inst.typeptr != jit_virtual_ref_vtable:
- return inst # common, fast case
- return force_virtual(cpu, inst)
- #
- FUNC = lltype.FuncType([rclass.OBJECTPTR], rclass.OBJECTPTR)
- funcptr = warmrunnerdesc.helper_func(
- lltype.Ptr(FUNC),
- force_virtual_if_necessary)
- return inputconst(lltype.typeOf(funcptr), funcptr)
-
-def force_virtual(cpu, inst):
- vref = lltype.cast_pointer(lltype.Ptr(JIT_VIRTUAL_REF), inst)
- token = vref.virtual_token
- if token != TOKEN_NONE:
- if token == TOKEN_TRACING_RESCALL:
- # The "virtual" is not a virtual at all during tracing.
- # We only need to reset virtual_token to TOKEN_NONE
- # as a marker for the tracing, to tell it that this
- # "virtual" escapes.
- vref.virtual_token = TOKEN_NONE
+ def replace_force_virtual_with_call(self, graphs):
+ # similar to rvirtualizable2.replace_force_virtualizable_with_call().
+ c_funcptr = None
+ count = 0
+ for graph in graphs:
+ for block in graph.iterblocks():
+ for op in block.operations:
+ if op.opname == 'jit_force_virtual':
+ # first compute c_funcptr, but only if there is any
+ # 'jit_force_virtual' around
+ if c_funcptr is None:
+ c_funcptr = self.get_force_virtual_fnptr()
+ #
+ op.opname = 'direct_call'
+ op.args = [c_funcptr, op.args[0]]
+ count += 1
+ if c_funcptr is not None:
+ log("replaced %d 'jit_force_virtual' with %r" % (count,
+ c_funcptr.value))
+ #
+ # record the type JIT_VIRTUAL_REF explicitly in the rtyper, too
+ self.warmrunnerdesc.rtyper.set_type_for_typeptr(
+ self.jit_virtual_ref_vtable, self.JIT_VIRTUAL_REF)
+
+ # ____________________________________________________________
+
+ # The 'virtual_token' field has the same meaning as the 'vable_token' field
+ # of a virtualizable. It is equal to:
+ # * 0 (TOKEN_NONE) when tracing, except as described below;
+ # * -1 (TOKEN_TRACING_RESCALL) during tracing when we do a residual call;
+ # * addr in the CPU stack (set by FORCE_TOKEN) when running the assembler;
+ # * 0 (TOKEN_NONE) after the virtual is forced, if it is forced at all.
+ TOKEN_NONE = 0
+ TOKEN_TRACING_RESCALL = -1
+
+ def virtual_ref_during_tracing(self, real_object):
+ assert real_object
+ vref = lltype.malloc(self.JIT_VIRTUAL_REF)
+ p = lltype.cast_pointer(rclass.OBJECTPTR, vref)
+ p.typeptr = self.jit_virtual_ref_vtable
+ vref.virtual_token = self.TOKEN_NONE
+ vref.forced = lltype.cast_opaque_ptr(rclass.OBJECTPTR, real_object)
+ return lltype.cast_opaque_ptr(llmemory.GCREF, vref)
+
+ def is_virtual_ref(self, gcref):
+ if not gcref:
+ return False
+ inst = lltype.cast_opaque_ptr(rclass.OBJECTPTR, gcref)
+ return inst.typeptr == self.jit_virtual_ref_vtable
+
+ def tracing_before_residual_call(self, gcref):
+ if not self.is_virtual_ref(gcref):
+ return
+ vref = lltype.cast_opaque_ptr(lltype.Ptr(self.JIT_VIRTUAL_REF), gcref)
+ assert not vref.virtual_token
+ vref.virtual_token = self.TOKEN_TRACING_RESCALL
+
+ def tracing_after_residual_call(self, gcref):
+ if not self.is_virtual_ref(gcref):
+ return False
+ vref = lltype.cast_opaque_ptr(lltype.Ptr(self.JIT_VIRTUAL_REF), gcref)
+ if vref.virtual_token:
+ # not modified by the residual call; assert that it is still
+ # set to TOKEN_TRACING_RESCALL and clear it.
+ assert vref.virtual_token == self.TOKEN_TRACING_RESCALL
+ vref.virtual_token = self.TOKEN_NONE
+ return False
else:
- assert not vref.forced
- from pypy.jit.metainterp.compile import ResumeGuardForcedDescr
- ResumeGuardForcedDescr.force_now(cpu, token)
- assert vref.virtual_token == TOKEN_NONE
- assert vref.forced
- return vref.forced
-force_virtual._dont_inline_ = True
+ # marker "modified during residual call" set.
+ return True
+
+ def forced_single_vref(self, gcref, real_object):
+ if not self.is_virtual_ref(gcref):
+ return
+ assert real_object
+ vref = lltype.cast_opaque_ptr(lltype.Ptr(self.JIT_VIRTUAL_REF), gcref)
+ assert (vref.virtual_token != self.TOKEN_NONE and
+ vref.virtual_token != self.TOKEN_TRACING_RESCALL)
+ vref.virtual_token = self.TOKEN_NONE
+ vref.forced = lltype.cast_opaque_ptr(rclass.OBJECTPTR, real_object)
+
+ def continue_tracing(self, gcref, real_object):
+ if not self.is_virtual_ref(gcref):
+ return
+ vref = lltype.cast_opaque_ptr(lltype.Ptr(self.JIT_VIRTUAL_REF), gcref)
+ assert vref.virtual_token != self.TOKEN_TRACING_RESCALL
+ vref.virtual_token = self.TOKEN_NONE
+ vref.forced = lltype.cast_opaque_ptr(rclass.OBJECTPTR, real_object)
+
+ # ____________________________________________________________
+
+ def get_force_virtual_fnptr(self):
+ #
+ def force_virtual_if_necessary(inst):
+ if not inst or inst.typeptr != self.jit_virtual_ref_vtable:
+ return inst # common, fast case
+ return self.force_virtual(inst)
+ #
+ FUNC = lltype.FuncType([rclass.OBJECTPTR], rclass.OBJECTPTR)
+ funcptr = self.warmrunnerdesc.helper_func(
+ lltype.Ptr(FUNC),
+ force_virtual_if_necessary)
+ return inputconst(lltype.typeOf(funcptr), funcptr)
+
+ def force_virtual(self, inst):
+ vref = lltype.cast_pointer(lltype.Ptr(self.JIT_VIRTUAL_REF), inst)
+ token = vref.virtual_token
+ if token != self.TOKEN_NONE:
+ if token == self.TOKEN_TRACING_RESCALL:
+ # The "virtual" is not a virtual at all during tracing.
+ # We only need to reset virtual_token to TOKEN_NONE
+ # as a marker for the tracing, to tell it that this
+ # "virtual" escapes.
+ vref.virtual_token = self.TOKEN_NONE
+ else:
+ assert not vref.forced
+ from pypy.jit.metainterp.compile import ResumeGuardForcedDescr
+ ResumeGuardForcedDescr.force_now(self.cpu, token)
+ assert vref.virtual_token == self.TOKEN_NONE
+ assert vref.forced
+ return vref.forced
+ force_virtual._dont_inline_ = True
Modified: pypy/branch/virtual-forcing/pypy/jit/metainterp/warmspot.py
==============================================================================
--- pypy/branch/virtual-forcing/pypy/jit/metainterp/warmspot.py (original)
+++ pypy/branch/virtual-forcing/pypy/jit/metainterp/warmspot.py Sun Jan 3 17:47:14 2010
@@ -17,7 +17,7 @@
from pypy.translator.unsimplify import call_final_function
from pypy.jit.metainterp import codewriter
-from pypy.jit.metainterp import support, history, pyjitpl, gc, virtualref
+from pypy.jit.metainterp import support, history, pyjitpl, gc
from pypy.jit.metainterp.pyjitpl import MetaInterpStaticData, MetaInterp
from pypy.jit.metainterp.policy import JitPolicy
from pypy.jit.metainterp.typesystem import LLTypeHelper, OOTypeHelper
@@ -162,9 +162,13 @@
self.build_meta_interp(CPUClass, **kwds)
self.make_args_specification()
+ #
+ from pypy.jit.metainterp.virtualref import VirtualRefInfo
+ self.metainterp_sd.virtualref_info = VirtualRefInfo(self)
if self.jitdriver.virtualizables:
from pypy.jit.metainterp.virtualizable import VirtualizableInfo
self.metainterp_sd.virtualizable_info = VirtualizableInfo(self)
+ #
self.make_exception_classes()
self.make_driverhook_graphs()
self.make_enter_function()
@@ -604,7 +608,8 @@
if self.cpu.ts.name != 'lltype':
py.test.skip("rewrite_force_virtual: port it to ootype")
all_graphs = self.translator.graphs
- virtualref.replace_force_virtual_with_call(self, all_graphs)
+ vrefinfo = self.metainterp_sd.virtualref_info
+ vrefinfo.replace_force_virtual_with_call(all_graphs)
def decode_hp_hint_args(op):
More information about the Pypy-commit
mailing list