[pypy-commit] pypy stmgc-c8: fixes to dataflow.py and use in gctransform.framework
Raemi
noreply at buildbot.pypy.org
Thu Nov 26 10:26:41 EST 2015
Author: Remi Meier <remi.meier at gmail.com>
Branch: stmgc-c8
Changeset: r80977:fce7bdeba033
Date: 2015-11-26 16:19 +0100
http://bitbucket.org/pypy/pypy/changeset/fce7bdeba033/
Log: fixes to dataflow.py and use in gctransform.framework
diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py
--- a/rpython/memory/gctransform/framework.py
+++ b/rpython/memory/gctransform/framework.py
@@ -13,6 +13,7 @@
from rpython.memory.gctypelayout import ll_weakref_deref, WEAKREF, WEAKREFPTR
from rpython.tool.sourcetools import func_with_new_name
from rpython.translator.backendopt import graphanalyze
+from rpython.translator.backendopt.dataflow import AbstractForwardDataFlowAnalysis
from rpython.translator.backendopt.finalizer import FinalizerAnalyzer
from rpython.translator.backendopt.support import var_needsgc
import types
@@ -53,7 +54,8 @@
LL_OPERATIONS[op.opname].canmallocgc)
-class WriteBarrierCollector(object):
+
+class WriteBarrierCollector(AbstractForwardDataFlowAnalysis):
"""
This class implements a forward data flow analysis to find all
set/write-operations in the graph that do *not* require a
@@ -65,18 +67,38 @@
self.graph = graph
self.collect_analyzer = collect_analyzer
self.clean_ops = set()
- self._in_states = {} # block->state of each inputarg
- self._out_states = {} # block->writable dict
self.in_stm_ignored = False
- def _merge_out_states(self, out_states):
- assert out_states
- # True: writeable or fresh, otherwise False
- result = [True] * len(out_states[0])
- for outset in out_states:
- for i, state in enumerate(outset):
- result[i] = result[i] and state
- return result
+ def entry_state(self, block):
+ assert block is self.graph.startblock
+ return {} # no variable writeable
+
+ def initialize_block(self, block):
+ # the initial out_state of a block is that
+ # all variables are "writable". This is the
+ # "neutral element" for the join_operation
+ out_state = set()
+ for link in block.exits:
+ for arg in link.args:
+ out_state.add(arg)
+ #
+ # in_state will be newly calculated in forward analysis
+ return out_state
+
+ def join_operation(self, preds_outs, inputargs, pred_out_args):
+ # collect the (renamed) variables / inputargs that are
+ # writeable coming from all predecessors.
+ result = set(inputargs)
+ for i, iarg in enumerate(inputargs):
+ # for input arg, go through all predecessor out_sets
+ # and see if the renamed arg was writable there:
+ for pidx, pouts in enumerate(preds_outs):
+ name_in_pred = pred_out_args[i][pidx]
+ if name_in_pred not in pouts:
+ # was not writable in pred[pidx]
+ result.remove(iarg)
+ break
+ return frozenset(result)
def _set_into_gc_array_part(self, op):
if op.opname == 'setarrayitem':
@@ -87,24 +109,19 @@
return v
return None
- def _flow_through_block(self, block):
- # get all out_state of predecessors and recalculate
- # the in_state of our block (except for startblock)
- insets = self._in_states
- #
- # get input variables and their states:
- assert len(insets[block]) == len(block.inputargs)
- writeable = {}
- for v, state in zip(block.inputargs, insets[block]):
- writeable[v] = state
+ def transfer_function(self, block, in_state):
+ # the set of variables that are writable at the
+ # start of 'block':
+ writeable = set(in_state)
#
# flow through block and inspect operations that
# need a write barrier, and those that create new
# objects
- for i, op in enumerate(block.operations):
+ print writeable, block.operations
+ for op in block.operations:
if self.collect_analyzer.analyze(op): # incl. malloc, obviously
# clear all writeable status
- writeable = {}
+ writeable = set()
#
if op.opname == "stm_ignored_start":
assert not self.in_stm_ignored
@@ -114,7 +131,7 @@
self.in_stm_ignored = False
elif op.opname == "gc_writebarrier":
assert not self.in_stm_ignored
- writeable[op.args[0]] = True
+ writeable.add(op.args[0])
elif op.opname == "malloc":
rtti = get_rtti(op.args[0].value)
if rtti is not None and hasattr(rtti._obj, 'destructor_funcptr'):
@@ -123,10 +140,11 @@
assert op not in self.clean_ops
else:
# freshly allocated object
- writeable[op.result] = True
+ writeable.add(op.result)
#
elif op.opname in ("cast_pointer", "same_as"):
- writeable[op.result] = writeable.get(op.args[0], False)
+ if op.args[0] in writeable:
+ writeable.add(op.result)
#
elif op.opname in ('setfield', 'setarrayitem', 'setinteriorfield', 'raw_store'):
# generic_set case
@@ -148,7 +166,7 @@
self.clean_ops.add(op)
else:
# we need a (partial) write barrier if arg0 is not writeable
- if writeable.get(op.args[0], False):
+ if op.args[0] in writeable:
self.clean_ops.add(op)
elif op in self.clean_ops:
self.clean_ops.remove(op)
@@ -156,42 +174,16 @@
if self._set_into_gc_array_part(op) is None:
# this will do a full write barrier, not card marking
# arg0 is always writeable afterwards
- writeable[op.args[0]] = True
+ writeable.add(op.args[0])
#
- # update in_states of all successors
- to_do = set()
- for link in block.exits:
- succ = link.target
- outset = [writeable.get(v, False) for v in link.args]
- if succ in insets:
- old = insets[succ]
- new = self._merge_out_states([old, outset])
- if new != old:
- to_do.add(succ)
- insets[succ] = new
- else:
- # block not processed yet
- insets[succ] = outset
- to_do.add(succ)
- return to_do
+ return frozenset(writeable)
def collect(self):
assert not self.clean_ops
assert not self.in_stm_ignored
- # we implement a forward-flow analysis that determines the
- # operations that do NOT need a write barrier
- graph = self.graph
- #
- # initialize blocks
- self._in_states = {graph.startblock: [False] * len(graph.startblock.inputargs)}
- #
- # fixpoint iteration
- # XXX: reverse postorder traversal
- pending = {graph.startblock,}
- while pending:
- block = pending.pop()
- pending |= self._flow_through_block(block)
+ self.calculate(self.graph)
+
diff --git a/rpython/translator/backendopt/dataflow.py b/rpython/translator/backendopt/dataflow.py
--- a/rpython/translator/backendopt/dataflow.py
+++ b/rpython/translator/backendopt/dataflow.py
@@ -1,5 +1,5 @@
from rpython.flowspace.model import mkentrymap
-
+import collections
class AbstractDataFlowAnalysis(object):
@@ -17,16 +17,16 @@
raise NotImplementedError("abstract base class")
def initialize_block(self, block):
- """Return the (default) in_state, out_state for 'block'
- used to initialize all blocks before starting the analysis."""
+ """Return the (default) out_state for 'block' used to
+ initialize all blocks before starting the analysis."""
raise NotImplementedError("abstract base class")
def join_operation(self, preds_outs, inputargs, pred_out_args):
"""Joins all preds_outs to generate a new in_state for the
current block.
- 'inputargs' is the list of input arguments to the block;
+ 'inputargs' is the list of input arguments to the block
'pred_out_args' is a list of lists of arguments given to
- the link by a certain predecessor.
+ the link by a certain predecessor - one list per inputarg.
"""
raise NotImplementedError("abstract base class")
@@ -59,13 +59,15 @@
pred = link.prevblock
if pred is None:
# block == startblock
+ in_states[block] = self.entry_state(block)
return True
preds_outs.append(out_states[pred])
for i in range(len(inputargs)):
preds_out_args[i].append(link.args[i])
# join predecessor out_states for updated in_state:
block_in = self.join_operation(preds_outs, inputargs, preds_out_args)
- if block_in != in_states[block]:
+ #
+ if block not in in_states or block_in != in_states[block]:
# in_state changed
in_states[block] = block_in
return True
@@ -79,18 +81,24 @@
in_states = {}
out_states = {}
#
- for block in graph.iterblocks():
- in_states[block], out_states[block] = self.initialize_block(block)
- in_states[graph.startblock] = self.entry_state(graph.startblock)
+ blocks = set(graph.iterblocks())
+ for block in blocks:
+ out_states[block] = self.initialize_block(block)
+
#
# iterate:
- pending = {graph.startblock,}
+ # todo: ordered set? reverse post-order?
+ blocks.remove(graph.startblock)
+ pending = collections.deque(blocks)
+ pending.append(graph.startblock)
while pending:
block = pending.pop()
if self._update_in_state_of(block, entrymap, in_states, out_states):
block_out = self.transfer_function(block, in_states[block])
if block_out != out_states[block]:
out_states[block] = block_out
- pending |= {link.target for link in block.exits}
+ for link in block.exits:
+ if link.target not in pending:
+ pending.appendleft(link.target)
#
return in_states, out_states
diff --git a/rpython/translator/backendopt/test/test_dataflow.py b/rpython/translator/backendopt/test/test_dataflow.py
--- a/rpython/translator/backendopt/test/test_dataflow.py
+++ b/rpython/translator/backendopt/test/test_dataflow.py
@@ -2,7 +2,7 @@
from rpython.tool.algo.unionfind import UnionFind
from rpython.translator.backendopt.dataflow import AbstractForwardDataFlowAnalysis
-
+import pytest
from rpython.translator.translator import TranslationContext, graphof
@@ -19,13 +19,31 @@
return True
def initialize_block(self, block):
- return False, False
+ return False
def join_operation(self, preds_outs, inputargs, pred_out_args):
return any(preds_outs)
+class NotSimpleForwardAnalysis(AbstractForwardDataFlowAnalysis):
+ def __init__(self):
+ self.seen = set()
-def test_simple_forward_flow():
+ def transfer_function(self, block, in_state):
+ self.seen.add(block)
+ return in_state
+
+ def entry_state(self, block):
+ return False
+
+ def initialize_block(self, block):
+ return True
+
+ def join_operation(self, preds_outs, inputargs, pred_out_args):
+ return all(preds_outs)
+
+
+ at pytest.mark.parametrize("flow", [SimpleForwardAnalysis, NotSimpleForwardAnalysis])
+def test_simple_forward_flow(flow):
def f(x):
if x < 0:
if x == -1:
@@ -39,13 +57,14 @@
return x-2
t = TranslationContext()
g = t.buildflowgraph(f)
- sfa = SimpleForwardAnalysis()
+ sfa = flow()
ins, outs = sfa.calculate(g)
assert len(sfa.seen) == 8
assert ins[g.startblock] == sfa.entry_state(None)
assert outs[g.returnblock] == sfa.entry_state(None)
-def test_loopy_forward_flow():
+ at pytest.mark.parametrize("flow", [SimpleForwardAnalysis, NotSimpleForwardAnalysis])
+def test_loopy_forward_flow(flow):
def f(x):
if x < 0:
while x:
@@ -57,7 +76,7 @@
return x
t = TranslationContext()
g = t.buildflowgraph(f)
- sfa = SimpleForwardAnalysis()
+ sfa = flow()
ins, outs = sfa.calculate(g)
assert len(sfa.seen) == 5
assert ins[g.startblock] == sfa.entry_state(None)
More information about the pypy-commit
mailing list