[pypy-commit] pypy jit-leaner-frontend: instead of invoking _do_optimize_loop, optimize_vector is called instead, the additional other optimizations are not necessary

plan_rich pypy.commits at gmail.com
Fri Mar 25 07:12:08 EDT 2016


Author: Richard Plangger <planrichi at gmail.com>
Branch: jit-leaner-frontend
Changeset: r83344:e5c868861aa2
Date: 2016-03-25 12:09 +0100
http://bitbucket.org/pypy/pypy/changeset/e5c868861aa2/

Log:	instead of invoking _do_optimize_loop, optimize_vector is called
	instead, the additional other optimizations are not necessary

diff --git a/rpython/jit/metainterp/optimizeopt/test/test_vecopt.py b/rpython/jit/metainterp/optimizeopt/test/test_vecopt.py
--- a/rpython/jit/metainterp/optimizeopt/test/test_vecopt.py
+++ b/rpython/jit/metainterp/optimizeopt/test/test_vecopt.py
@@ -14,9 +14,10 @@
 from rpython.jit.metainterp.optimizeopt.vector import (VectorizingOptimizer,
         MemoryRef, isomorphic, Pair, NotAVectorizeableLoop, VectorLoop,
         NotAProfitableLoop, GuardStrengthenOpt, CostModel, X86_CostModel,
-        PackSet)
+        PackSet, optimize_vector)
 from rpython.jit.metainterp.optimizeopt.schedule import (Scheduler,
         SchedulerState, VecScheduleState, Pack)
+from rpython.jit.metainterp.optimizeopt.optimizer import BasicLoopInfo
 from rpython.jit.metainterp.optimize import InvalidLoop
 from rpython.jit.metainterp import compile
 from rpython.jit.metainterp.resoperation import rop, ResOperation
@@ -74,6 +75,12 @@
 
 ARCH_VEC_REG_SIZE = 16
 
+class FakeWarmState(object):
+    vec_all = False
+    vec_cost = 0
+
+
+
 class VecTestHelper(DependencyBaseTest):
 
     enable_opts = "intbounds:rewrite:virtualize:string:earlyforce:pure:heap"
@@ -81,18 +88,17 @@
     jitdriver_sd = FakeJitDriverStaticData()
 
     def assert_vectorize(self, loop, expected_loop, call_pure_results=None):
-        jump = ResOperation(rop.LABEL, loop.jump.getarglist(), loop.jump.getdescr())
-        # convert_loop_to_trace assumes that there are no descriptors
-        # but because this optimization pass is after the normal optimization pass
-        # parse_loop already set artificial resume descr!
-        for op in loop.operations:
-            if op.is_guard():
-                op.setdescr(None)
-        trace = convert_loop_to_trace(loop)
-        compile_data = compile.LoopCompileData(trace, loop.jump.getarglist())
-        state = self._do_optimize_loop(compile_data)
-        loop.label = state[0].label_op
-        loop.operations = state[1]
+        jump = ResOperation(rop.JUMP, loop.jump.getarglist(), loop.jump.getdescr())
+        metainterp_sd = FakeMetaInterpStaticData(self.cpu)
+        warmstate = FakeWarmState()
+        loop.operations += [loop.jump]
+        loop_info = BasicLoopInfo(loop.jump.getarglist(), None, jump)
+        loop_info.label_op = ResOperation(rop.LABEL, loop.jump.getarglist(), loop.jump.getdescr())
+        optimize_vector(None, metainterp_sd, self.jitdriver_sd, warmstate,
+                        loop_info, loop.operations)
+        loop.operations = loop.operations[:-1]
+        #loop.label = state[0].label_op
+        #loop.operations = state[1]
         self.assert_equal(loop, expected_loop)
 
     def vectoroptimizer(self, loop):
diff --git a/rpython/jit/metainterp/optimizeopt/vector.py b/rpython/jit/metainterp/optimizeopt/vector.py
--- a/rpython/jit/metainterp/optimizeopt/vector.py
+++ b/rpython/jit/metainterp/optimizeopt/vector.py
@@ -117,7 +117,7 @@
     user_code = not jitdriver_sd.vec and warmstate.vec_all
     e = len(loop_ops)-1
     assert e > 0
-    assert loop_ops[e].is_final()
+    assert rop.is_final(loop_ops[e].getopnum())
     loop = VectorLoop(loop_info.label_op, loop_ops[:e], loop_ops[-1])
     if user_code and user_loop_bail_fast_path(loop, warmstate):
         return loop_info, loop_ops


More information about the pypy-commit mailing list