[pypy-svn] r14041 - in pypy/branch/pycompiler/translator: . c c/test genc goal llvm llvm/test llvm2 llvm2/test pickle pickle/attic test tool tool/pygame

adim at codespeak.net adim at codespeak.net
Fri Jul 1 18:29:13 CEST 2005


Author: adim
Date: Fri Jul  1 18:28:54 2005
New Revision: 14041

Added:
   pypy/branch/pycompiler/translator/backendoptimization.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/backendoptimization.py
   pypy/branch/pycompiler/translator/c/char_include.h
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/char_include.h
   pypy/branch/pycompiler/translator/c/external.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/external.py
   pypy/branch/pycompiler/translator/c/float_include.h
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/float_include.h
   pypy/branch/pycompiler/translator/c/g_debuginfo.h
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/g_debuginfo.h
   pypy/branch/pycompiler/translator/c/g_exception.h
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/g_exception.h
   pypy/branch/pycompiler/translator/c/mkuint.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/mkuint.py
   pypy/branch/pycompiler/translator/c/symboltable.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/symboltable.py
   pypy/branch/pycompiler/translator/c/test/test_annotated.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/test/test_annotated.py
   pypy/branch/pycompiler/translator/c/test/test_backendoptimized.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/test/test_backendoptimized.py
   pypy/branch/pycompiler/translator/c/test/test_exception.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/test/test_exception.py
   pypy/branch/pycompiler/translator/c/test/test_lltyped.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/test/test_lltyped.py
   pypy/branch/pycompiler/translator/c/test/test_notype.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/test/test_notype.py
   pypy/branch/pycompiler/translator/c/test/test_operation.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/test/test_operation.py
   pypy/branch/pycompiler/translator/c/test/test_support.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/test/test_support.py
   pypy/branch/pycompiler/translator/c/test/test_symboltable.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/test/test_symboltable.py
   pypy/branch/pycompiler/translator/c/test/test_typed.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/test/test_typed.py
   pypy/branch/pycompiler/translator/c/unichar_include.h
      - copied unchanged from r14039, pypy/dist/pypy/translator/c/unichar_include.h
   pypy/branch/pycompiler/translator/goal/richards.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/goal/richards.py
   pypy/branch/pycompiler/translator/goal/targetrichards.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/goal/targetrichards.py
   pypy/branch/pycompiler/translator/goal/targetrpystonex.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/goal/targetrpystonex.py
   pypy/branch/pycompiler/translator/goal/unixcheckpoint.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/goal/unixcheckpoint.py
   pypy/branch/pycompiler/translator/llvm2/   (props changed)
      - copied from r14039, pypy/dist/pypy/translator/llvm2/
   pypy/branch/pycompiler/translator/llvm2/__init__.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/__init__.py
   pypy/branch/pycompiler/translator/llvm2/build_llvm_module.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/build_llvm_module.py
   pypy/branch/pycompiler/translator/llvm2/cfgtransform.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/cfgtransform.py
   pypy/branch/pycompiler/translator/llvm2/codewriter.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/codewriter.py
   pypy/branch/pycompiler/translator/llvm2/database.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/database.py
   pypy/branch/pycompiler/translator/llvm2/funcnode.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/funcnode.py
   pypy/branch/pycompiler/translator/llvm2/genllvm.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/genllvm.py
   pypy/branch/pycompiler/translator/llvm2/log.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/log.py
   pypy/branch/pycompiler/translator/llvm2/pyxwrapper.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/pyxwrapper.py
   pypy/branch/pycompiler/translator/llvm2/structnode.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/structnode.py
   pypy/branch/pycompiler/translator/llvm2/test/   (props changed)
      - copied from r14039, pypy/dist/pypy/translator/llvm2/test/
   pypy/branch/pycompiler/translator/llvm2/test/__init__.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/test/__init__.py
   pypy/branch/pycompiler/translator/llvm2/test/llvmsnippet.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/test/llvmsnippet.py
   pypy/branch/pycompiler/translator/llvm2/test/test_genllvm.py
      - copied unchanged from r14039, pypy/dist/pypy/translator/llvm2/test/test_genllvm.py
Removed:
   pypy/branch/pycompiler/translator/genc/
   pypy/branch/pycompiler/translator/goal/targetpypy0.py
   pypy/branch/pycompiler/translator/goal/targetpypy1.py
   pypy/branch/pycompiler/translator/goal/targetrpystone2.py
   pypy/branch/pycompiler/translator/tool/flowtrace.py
   pypy/branch/pycompiler/translator/typer.py
Modified:
   pypy/branch/pycompiler/translator/ann_override.py
   pypy/branch/pycompiler/translator/annrpython.py
   pypy/branch/pycompiler/translator/c/database.py
   pypy/branch/pycompiler/translator/c/funcgen.py
   pypy/branch/pycompiler/translator/c/g_include.h
   pypy/branch/pycompiler/translator/c/g_module.h
   pypy/branch/pycompiler/translator/c/g_support.h
   pypy/branch/pycompiler/translator/c/genc.py
   pypy/branch/pycompiler/translator/c/int_include.h
   pypy/branch/pycompiler/translator/c/ll_include.h
   pypy/branch/pycompiler/translator/c/node.py
   pypy/branch/pycompiler/translator/c/primitive.py
   pypy/branch/pycompiler/translator/c/pyobj_include.h
   pypy/branch/pycompiler/translator/c/support.py
   pypy/branch/pycompiler/translator/c/test/test_database.py
   pypy/branch/pycompiler/translator/c/test/test_genc.py
   pypy/branch/pycompiler/translator/c/wrapper.py
   pypy/branch/pycompiler/translator/geninterplevel.py
   pypy/branch/pycompiler/translator/goal/query.py
   pypy/branch/pycompiler/translator/goal/targetpypymain.py
   pypy/branch/pycompiler/translator/goal/targetrpystone.py
   pypy/branch/pycompiler/translator/goal/translate_pypy.py
   pypy/branch/pycompiler/translator/llvm/build_llvm_module.py
   pypy/branch/pycompiler/translator/llvm/llvmbc.py
   pypy/branch/pycompiler/translator/llvm/pointerrepr.py   (props changed)
   pypy/branch/pycompiler/translator/llvm/reprmap.py   (props changed)
   pypy/branch/pycompiler/translator/llvm/test/llvmsnippet.py
   pypy/branch/pycompiler/translator/llvm/test/test_genllvm.py
   pypy/branch/pycompiler/translator/llvm/typerepr.py
   pypy/branch/pycompiler/translator/pickle/__init__.py   (props changed)
   pypy/branch/pycompiler/translator/pickle/attic/   (props changed)
   pypy/branch/pycompiler/translator/pickle/genpickle.py
   pypy/branch/pycompiler/translator/pickle/loader.py
   pypy/branch/pycompiler/translator/pickle/main.py
   pypy/branch/pycompiler/translator/pickle/writer.py
   pypy/branch/pycompiler/translator/simplify.py
   pypy/branch/pycompiler/translator/test/rpystone.py
   pypy/branch/pycompiler/translator/test/snippet.py
   pypy/branch/pycompiler/translator/test/test_annrpython.py
   pypy/branch/pycompiler/translator/test/test_backends.py   (props changed)
   pypy/branch/pycompiler/translator/test/test_cltrans.py
   pypy/branch/pycompiler/translator/tool/graphpage.py
   pypy/branch/pycompiler/translator/tool/pygame/drawgraph.py
   pypy/branch/pycompiler/translator/tool/pygame/graphclient.py
   pypy/branch/pycompiler/translator/transform.py
   pypy/branch/pycompiler/translator/translator.py
Log:
updated branch with last version of trunk

svn merge -r14039:140409 http://codespeak.net/svn/pypy/dist/pypy


Modified: pypy/branch/pycompiler/translator/ann_override.py
==============================================================================
--- pypy/branch/pycompiler/translator/ann_override.py	(original)
+++ pypy/branch/pycompiler/translator/ann_override.py	Fri Jul  1 18:28:54 2005
@@ -1,15 +1,13 @@
 # overrides for annotation specific to PyPy codebase
 from pypy.annotation.policy import AnnotatorPolicy
-from pypy.annotation.bookkeeper import getbookkeeper
+# for some reason, model must be imported first,
+# or we create a cycle.
 from pypy.annotation import model as annmodel
+from pypy.annotation.bookkeeper import getbookkeeper
 from pypy.annotation import specialize
 
 class PyPyAnnotatorPolicy(AnnotatorPolicy):
 
-    def override__ignore(pol, *args):
-        bk = getbookkeeper()
-        return bk.immutablevalue(None)
-
     def override__instantiate(pol, clspbc):
         assert isinstance(clspbc, annmodel.SomePBC)
         clsdef = None
@@ -34,6 +32,3 @@
         from pypy.interpreter import pycode
         clsdef = getbookkeeper().getclassdef(pycode.PyCode)
         return annmodel.SomeInstance(clsdef)    
-
-    specialize__arg1 = staticmethod(specialize.argvalue(1))
-    specialize__argtype1 = staticmethod(specialize.argtype(1))

Modified: pypy/branch/pycompiler/translator/annrpython.py
==============================================================================
--- pypy/branch/pycompiler/translator/annrpython.py	(original)
+++ pypy/branch/pycompiler/translator/annrpython.py	Fri Jul  1 18:28:54 2005
@@ -1,25 +1,20 @@
 from __future__ import generators
 
 from types import FunctionType, ClassType
-from pypy.tool.ansi_print import ansi_print
+from pypy.tool.ansi_print import ansi_log 
 from pypy.annotation import model as annmodel
 from pypy.annotation.model import pair
 from pypy.annotation.bookkeeper import Bookkeeper
 from pypy.objspace.flow.model import Variable, Constant
 from pypy.objspace.flow.model import SpaceOperation, FunctionGraph
 from pypy.objspace.flow.model import last_exception, checkgraph
+import py
+log = py.log.Producer("annrpython") 
+py.log.setconsumer("annrpython", ansi_log) 
 
 class AnnotatorError(Exception):
     pass
 
-class BasicAnnotatorPolicy:
-    
-    def specialize(pol, bookkeeper, spaceop, func, args, mono):
-        return None, None
-
-    def compute_at_fixpoint(pol, annotator):
-        annotator.bookkeeper.compute_at_fixpoint()
-
 
 class RPythonAnnotator:
     """Block annotator for RPython.
@@ -52,7 +47,8 @@
         self.frozen = False
         # user-supplied annotation logic for functions we don't want to flow into
         if policy is None:
-            self.policy = BasicAnnotatorPolicy()
+            from pypy.annotation.policy import AnnotatorPolicy
+            self.policy = AnnotatorPolicy()
         else:
             self.policy = policy
 
@@ -96,8 +92,9 @@
         # make input arguments and set their type
         input_arg_types = list(input_arg_types)
         nbarg = len(flowgraph.getargs())
-        while len(input_arg_types) < nbarg:
-            input_arg_types.append(object)
+        if len(input_arg_types) != nbarg: 
+            raise TypeError("flowgraph %s expects %d args, got %d" %(       
+                            flowgraph.name, nbarg, len(input_arg_types)))
         inputcells = []
         for t in input_arg_types:
             if not isinstance(t, annmodel.SomeObject):
@@ -233,15 +230,13 @@
         self.bindings[arg] = s_value
         if annmodel.DEBUG:
             if arg in self.return_bindings:
-                ansi_print("%s -> %s" % (self.whereami((self.return_bindings[arg],
-                                                         None, None)),
-                                         s_value),
-                           esc="1") # bold
+                log.bold("%s -> %s" % 
+                    (self.whereami((self.return_bindings[arg], None, None)), 
+                     s_value)) 
 
             if arg in self.return_bindings and s_value == annmodel.SomeObject():
-                ansi_print("*** WARNING: %s result degenerated to SomeObject" %
-                           self.whereami((self.return_bindings[arg],None, None)),
-                           esc="31") # RED
+                log.red("*** WARNING: %s result degenerated to SomeObject" %
+                     self.whereami((self.return_bindings[arg],None, None))) 
                 
             self.binding_caused_by[arg] = called_from
 
@@ -595,6 +590,7 @@
 
     # XXX "contains" clash with SomeObject method
     def consider_op_contains(self, seq, elem):
+        self.bookkeeper.count("contains", seq)
         return annmodel.SomeBool()
 
     def consider_op_newtuple(self, *args):
@@ -611,6 +607,7 @@
         return self.bookkeeper.newdict(*items_s)
 
     def consider_op_newslice(self, start, stop, step):
+        self.bookkeeper.count('newslice', start, stop, step)
         return annmodel.SomeSlice(start, stop, step)
 
 

Modified: pypy/branch/pycompiler/translator/c/database.py
==============================================================================
--- pypy/branch/pycompiler/translator/c/database.py	(original)
+++ pypy/branch/pycompiler/translator/c/database.py	Fri Jul  1 18:28:54 2005
@@ -1,6 +1,7 @@
 from pypy.rpython.lltype import Primitive, Ptr, typeOf
 from pypy.rpython.lltype import Struct, Array, FuncType, PyObject, Void
-from pypy.rpython.lltype import ContainerType
+from pypy.rpython.lltype import ContainerType, pyobjectptr, OpaqueType, GcStruct
+from pypy.rpython.rmodel import getfunctionptr
 from pypy.objspace.flow.model import Constant
 from pypy.translator.c.primitive import PrimitiveName, PrimitiveType
 from pypy.translator.c.primitive import PrimitiveErrorValue
@@ -14,6 +15,7 @@
 class LowLevelDatabase:
 
     def __init__(self, translator=None):
+        self.translator = translator
         self.structdefnodes = {}
         self.structdeflist = []
         self.containernodes = {}
@@ -67,6 +69,11 @@
                     argtypes.append(cdecl(argtype, argname))
             argtypes = ', '.join(argtypes) or 'void'
             return resulttype.replace('@', '(@)(%s)' % argtypes)
+        elif isinstance(T, OpaqueType):
+            if T.tag == 'RuntimeTypeInfo':
+                return 'void (@)(void *)'   # void dealloc_xx(struct xx *)
+            else:
+                raise Exception("don't know about opaque type %r" % (T,))
         else:
             raise Exception("don't know about type %r" % (T,))
 
@@ -105,6 +112,8 @@
 
     def cincrefstmt(self, expr, T):
         if isinstance(T, Ptr) and T._needsgc():
+            if expr == 'NULL':    # hum
+                return ''
             if T.TO == PyObject:
                 return 'Py_XINCREF(%s);' % expr
             else:
@@ -144,3 +153,51 @@
         for node in self.containerlist:
             if node.globalcontainer:
                 yield node
+
+    def get_lltype_of_exception_value(self):
+        if self.translator is not None and self.translator.rtyper is not None:
+            exceptiondata = self.translator.rtyper.getexceptiondata()
+            return exceptiondata.lltype_of_exception_value
+        else:
+            return Ptr(PyObject)
+
+    def pre_include_code_lines(self):
+        # generate some #defines that go before the #include to control
+        # what g_exception.h does
+        if self.translator is not None and self.translator.rtyper is not None:
+            exceptiondata = self.translator.rtyper.getexceptiondata()
+
+            TYPE = exceptiondata.lltype_of_exception_type
+            assert isinstance(TYPE, Ptr)
+            typename = self.gettype(TYPE)
+            yield '#define RPYTHON_EXCEPTION_VTABLE %s' % cdecl(typename, '')
+
+            TYPE = exceptiondata.lltype_of_exception_value
+            assert isinstance(TYPE, Ptr)
+            typename = self.gettype(TYPE)
+            yield '#define RPYTHON_EXCEPTION        %s' % cdecl(typename, '')
+
+            fnptr = getfunctionptr(self.translator,
+                                   exceptiondata.ll_exception_match)
+            fnname = self.get(fnptr)
+            yield '#define RPYTHON_EXCEPTION_MATCH  %s' % (fnname,)
+
+            fnptr = getfunctionptr(self.translator,
+                                   exceptiondata.ll_type_of_exc_inst)
+            fnname = self.get(fnptr)
+            yield '#define RPYTHON_TYPE_OF_EXC_INST %s' % (fnname,)
+
+            fnptr = getfunctionptr(self.translator,
+                                   exceptiondata.ll_pyexcclass2exc)
+            fnname = self.get(fnptr)
+            yield '#define RPYTHON_PYEXCCLASS2EXC   %s' % (fnname,)
+
+            for pyexccls in exceptiondata.standardexceptions:
+                exc_llvalue = exceptiondata.ll_pyexcclass2exc(
+                    pyobjectptr(pyexccls))
+                # strange naming here because the macro name must be
+                # a substring of PyExc_%s
+                yield '#define Exc_%s\t%s' % (
+                    pyexccls.__name__, self.get(exc_llvalue))
+
+            self.complete()   # because of the get() and gettype() above

Modified: pypy/branch/pycompiler/translator/c/funcgen.py
==============================================================================
--- pypy/branch/pycompiler/translator/c/funcgen.py	(original)
+++ pypy/branch/pycompiler/translator/c/funcgen.py	Fri Jul  1 18:28:54 2005
@@ -1,11 +1,10 @@
 from __future__ import generators
 from pypy.translator.c.support import cdecl, ErrorValue
-from pypy.translator.c.support import llvalue_from_constant
+from pypy.translator.c.support import llvalue_from_constant, gen_assignments
 from pypy.objspace.flow.model import Variable, Constant, Block
-from pypy.objspace.flow.model import traverse, uniqueitems, last_exception
+from pypy.objspace.flow.model import traverse, last_exception
 from pypy.rpython.lltype import Ptr, PyObject, Void, Bool
 from pypy.rpython.lltype import pyobjectptr, Struct, Array
-from pypy.translator.unsimplify import remove_direct_loops
 
 
 PyObjPtr = Ptr(PyObject)
@@ -16,55 +15,71 @@
     from a flow graph.
     """
 
-    def __init__(self, graph, db):
+    def __init__(self, graph, db, cpython_exc=False):
         self.graph = graph
-        remove_direct_loops(None, graph)
         self.db = db
-        self.lltypemap = self.collecttypes()
-        self.typemap = {}
-        for v, T in self.lltypemap.items():
-            self.typemap[v] = db.gettype(T)
-
-    def collecttypes(self):
+        self.cpython_exc = cpython_exc
+        #
         # collect all variables and constants used in the body,
         # and get their types now
-        result = []
+        #
+        # NOTE: cannot use dictionaries with Constants has keys, because
+        #       Constants may hash and compare equal but have different lltypes
+        mix = []
+        self.more_ll_values = []
         def visit(block):
             if isinstance(block, Block):
-                result.extend(block.inputargs)
+                mix.extend(block.inputargs)
                 for op in block.operations:
-                    result.extend(op.args)
-                    result.append(op.result)
+                    mix.extend(op.args)
+                    mix.append(op.result)
                 for link in block.exits:
-                    result.extend(link.getextravars())
-                    result.extend(link.args)
-                    result.append(Constant(link.exitcase))
-        traverse(visit, self.graph)
-        resultvar = self.graph.getreturnvar()
-        lltypemap = {resultvar: Void}   # default value, normally overridden
-        for v in uniqueitems(result):
-            # xxx what kind of pointer for constants?
-            T = getattr(v, 'concretetype', PyObjPtr)           
-            lltypemap[v] = T
-        return lltypemap
+                    mix.extend(link.getextravars())
+                    mix.extend(link.args)
+                    mix.append(Constant(link.exitcase))
+                    if hasattr(link, 'llexitcase'):
+                        self.more_ll_values.append(link.llexitcase)
+        traverse(visit, graph)
+        resultvar = graph.getreturnvar()
+
+        self.lltypes = {
+            # default, normally overridden:
+            id(resultvar): (resultvar, Void, db.gettype(Void)),
+            }
+        for v in mix:
+            T = getattr(v, 'concretetype', PyObjPtr)
+            typename = db.gettype(T)
+            self.lltypes[id(v)] = v, T, typename
 
     def argnames(self):
         return [v.name for v in self.graph.getargs()]
 
     def allvariables(self):
-        return [v for v in self.typemap if isinstance(v, Variable)]
+        return [v for v, T, typename in self.lltypes.values()
+                  if isinstance(v, Variable)]
 
     def allconstants(self):
-        return [v for v in self.typemap if isinstance(v, Constant)]
+        return [c for c, T, typename in self.lltypes.values()
+                  if isinstance(c, Constant)]
 
     def allconstantvalues(self):
-        for v in self.typemap:
-            if isinstance(v, Constant):
-                yield llvalue_from_constant(v)
+        for c, T, typename in self.lltypes.values():
+            if isinstance(c, Constant):
+                yield llvalue_from_constant(c)
+        for llvalue in self.more_ll_values:
+            yield llvalue
+
+    def lltypemap(self, v):
+        v, T, typename = self.lltypes[id(v)]
+        return T
+
+    def lltypename(self, v):
+        v, T, typename = self.lltypes[id(v)]
+        return typename
 
-    def expr(self, v):
+    def expr(self, v, special_case_void=True):
         if isinstance(v, Variable):
-            if self.lltypemap[v] == Void:
+            if self.lltypemap(v) == Void and special_case_void:
                 return '/* nothing */'
             else:
                 return v.name
@@ -74,22 +89,36 @@
             raise TypeError, "expr(%r)" % (v,)
 
     def error_return_value(self):
-        returnlltype = self.lltypemap[self.graph.getreturnvar()]
+        returnlltype = self.lltypemap(self.graph.getreturnvar())
         return self.db.get(ErrorValue(returnlltype))
 
+    def return_with_error(self):
+        if self.cpython_exc:
+            lltype_of_exception_value = self.db.get_lltype_of_exception_value()
+            exc_value_typename = self.db.gettype(lltype_of_exception_value)
+            assert self.lltypemap(self.graph.getreturnvar()) == PyObjPtr
+            yield '{'
+            yield '\t%s;' % cdecl(exc_value_typename, 'vanishing_exc_value')
+            yield '\tConvertExceptionToCPython(vanishing_exc_value);'
+            yield '\t%s' % self.db.cdecrefstmt('vanishing_exc_value', lltype_of_exception_value)
+            yield '}'
+        yield 'return %s; ' % self.error_return_value()
+
     # ____________________________________________________________
 
     def cfunction_declarations(self):
         # declare the local variables, excluding the function arguments
-        inputargset = {}
+        seen = {}
         for a in self.graph.getargs():
-            inputargset[a] = True
+            seen[a.name] = True
 
         result_by_name = []
         for v in self.allvariables():
-            if v not in inputargset:
-                result = cdecl(self.typemap[v], v.name) + ';'
-                if self.lltypemap[v] == Void:
+            name = v.name
+            if name not in seen:
+                seen[name] = True
+                result = cdecl(self.lltypename(v), name) + ';'
+                if self.lltypemap(v) == Void:
                     result = '/*%s*/' % result
                 result_by_name.append((v._name, result))
         result_by_name.sort()
@@ -116,24 +145,36 @@
             for v in to_release:
                 linklocalvars[v] = self.expr(v)
             has_ref = linklocalvars.copy()
+            assignments = []
+            increfs = []
             for a1, a2 in zip(link.args, link.target.inputargs):
-                if self.lltypemap[a2] == Void:
+                if self.lltypemap(a2) == Void:
                     continue
                 if a1 in linklocalvars:
                     src = linklocalvars[a1]
                 else:
                     src = self.expr(a1)
-                line = '%s = %s;' % (self.expr(a2), src)
+                dest = self.expr(a2)
+                assignments.append((self.lltypename(a2), dest, src))
                 if a1 in has_ref:
                     del has_ref[a1]
                 else:
-                    assert self.lltypemap[a1] == self.lltypemap[a2]
-                    line += '\t' + self.cincref(a2)
-                yield line
+                    assert self.lltypemap(a1) == self.lltypemap(a2)
+                    line = self.cincref(a2)
+                    if line:
+                        increfs.append(line)
+            # warning, the order below is delicate to get right:
+            # 1. decref the old variables that are not passed over
             for v in has_ref:
                 line = self.cdecref(v, linklocalvars[v])
                 if line:
                     yield line
+            # 2. perform the assignments with collision-avoidance
+            for line in gen_assignments(assignments):
+                yield line
+            # 3. incref the new variables if needed
+            for line in increfs:
+                yield line
             yield 'goto block%d;' % blocknum[link.target]
 
         # collect all blocks
@@ -169,8 +210,9 @@
                     # exceptional return block
                     exc_cls   = self.expr(block.inputargs[0])
                     exc_value = self.expr(block.inputargs[1])
-                    yield 'PyErr_Restore(%s, %s, NULL);' % (exc_cls, exc_value)
-                    yield 'return %s;' % self.error_return_value()
+                    yield 'RaiseException(%s, %s);' % (exc_cls, exc_value)
+                    for line in self.return_with_error():
+                        yield line 
                 else:
                     # regular return block
                     retval = self.expr(block.inputargs[0])
@@ -197,31 +239,40 @@
                 yield ''
                 for link in block.exits[1:]:
                     assert issubclass(link.exitcase, Exception)
-                    yield 'if (PyErr_ExceptionMatches(%s)) {' % (
-                        self.db.get(pyobjectptr(link.exitcase)),)
-                    yield '\tPyObject *exc_cls, *exc_value, *exc_tb;'
-                    yield '\tPyErr_Fetch(&exc_cls, &exc_value, &exc_tb);'
-                    yield '\tif (exc_value == NULL) {'
-                    yield '\t\texc_value = Py_None;'
-                    yield '\t\tPy_INCREF(Py_None);'
-                    yield '\t}'
-                    yield '\tPy_XDECREF(exc_tb);'
+                    try:
+                        etype = link.llexitcase
+                    except AttributeError:
+                        etype = pyobjectptr(link.exitcase)
+                        T1 = PyObjPtr
+                        T2 = PyObjPtr
+                    else:
+                        assert hasattr(link.last_exception, 'concretetype')
+                        assert hasattr(link.last_exc_value, 'concretetype')
+                        T1 = link.last_exception.concretetype
+                        T2 = link.last_exc_value.concretetype
+                    typ1 = self.db.gettype(T1)
+                    typ2 = self.db.gettype(T2)
+                    yield 'if (MatchException(%s)) {' % (self.db.get(etype),)
+                    yield '\t%s;' % cdecl(typ1, 'exc_cls')
+                    yield '\t%s;' % cdecl(typ2, 'exc_value')
+                    yield '\tFetchException(exc_cls, exc_value, %s);' % (
+                        cdecl(typ2, ''))
                     d = {}
                     if isinstance(link.last_exception, Variable):
                         d[link.last_exception] = 'exc_cls'
                     else:
-                        yield '\tPy_XDECREF(exc_cls);'
+                        yield '\t' + self.db.cdecrefstmt('exc_cls', T1)
                     if isinstance(link.last_exc_value, Variable):
                         d[link.last_exc_value] = 'exc_value'
                     else:
-                        yield '\tPy_XDECREF(exc_value);'
+                        yield '\t' + self.db.cdecrefstmt('exc_value', T2)
                     for op in gen_link(link, d):
                         yield '\t' + op
                     yield '}'
                 err_reachable = True
             else:
                 # block ending in a switch on a value
-                TYPE = self.lltypemap[block.exitswitch]
+                TYPE = self.lltypemap(block.exitswitch)
                 for link in block.exits[:-1]:
                     assert link.exitcase in (False, True)
                     expr = self.expr(block.exitswitch)
@@ -257,7 +308,8 @@
                 yield 'err%d_%d:' % (blocknum[block], len(to_release))
                 err_reachable = True
             if err_reachable:
-                yield 'return %s;' % self.error_return_value()
+                for line in self.return_with_error():
+                    yield line
 
     # ____________________________________________________________
 
@@ -302,22 +354,23 @@
 
     def OP_DIRECT_CALL(self, op, err):
         # skip 'void' arguments
-        args = [self.expr(v) for v in op.args if self.lltypemap[v] != Void]
-        if self.lltypemap[op.result] == Void:
+        args = [self.expr(v) for v in op.args if self.lltypemap(v) != Void]
+        if self.lltypemap(op.result) == Void:
             # skip assignment of 'void' return value
-            return '%s(%s); if (PyErr_Occurred()) FAIL(%s)' % (
+            return '%s(%s); if (ExceptionOccurred()) FAIL(%s)' % (
                 args[0], ', '.join(args[1:]), err)
         else:
             r = self.expr(op.result)
-            return '%s = %s(%s); if (PyErr_Occurred()) FAIL(%s)' % (
+            return '%s = %s(%s); if (ExceptionOccurred()) FAIL(%s)' % (
                 r, args[0], ', '.join(args[1:]), err)
 
     # low-level operations
     def generic_get(self, op, sourceexpr):
-        newvalue = self.expr(op.result)
+        T = self.lltypemap(op.result)
+        newvalue = self.expr(op.result, special_case_void=False)
         result = ['%s = %s;' % (newvalue, sourceexpr)]
         # need to adjust the refcount of the result
-        T = self.lltypemap[op.result]
+
         increfstmt = self.db.cincrefstmt(newvalue, T)
         if increfstmt:
             result.append(increfstmt)
@@ -327,17 +380,17 @@
         return result
 
     def generic_set(self, op, targetexpr):
-        newvalue = self.expr(op.args[2])
+        newvalue = self.expr(op.args[2], special_case_void=False)
         result = ['%s = %s;' % (targetexpr, newvalue)]
         # need to adjust some refcounts
-        T = self.lltypemap[op.args[2]]
+        T = self.lltypemap(op.args[2])
         decrefstmt = self.db.cdecrefstmt('prev', T)
         increfstmt = self.db.cincrefstmt(newvalue, T)
         if increfstmt:
             result.append(increfstmt)
         if decrefstmt:
             result.insert(0, '{ %s = %s;' % (
-                cdecl(self.typemap[op.args[2]], 'prev'),
+                cdecl(self.lltypename(op.args[2]), 'prev'),
                 targetexpr))
             result.append(decrefstmt)
             result.append('}')
@@ -348,7 +401,7 @@
 
     def OP_GETFIELD(self, op, err, ampersand=''):
         assert isinstance(op.args[1], Constant)
-        STRUCT = self.lltypemap[op.args[0]].TO
+        STRUCT = self.lltypemap(op.args[0]).TO
         structdef = self.db.gettypedefnode(STRUCT)
         fieldname = structdef.c_struct_field_name(op.args[1].value)
         return self.generic_get(op, '%s%s->%s' % (ampersand,
@@ -357,7 +410,7 @@
 
     def OP_SETFIELD(self, op, err):
         assert isinstance(op.args[1], Constant)
-        STRUCT = self.lltypemap[op.args[0]].TO
+        STRUCT = self.lltypemap(op.args[0]).TO
         structdef = self.db.gettypedefnode(STRUCT)
         fieldname = structdef.c_struct_field_name(op.args[1].value)
         return self.generic_set(op, '%s->%s' % (self.expr(op.args[0]),
@@ -386,7 +439,10 @@
     def OP_PTR_NONZERO(self, op, err):
         return '%s = (%s != NULL);' % (self.expr(op.result),
                                        self.expr(op.args[0]))
-
+    def OP_PTR_ISZERO(self, op, err):
+        return '%s = (%s == NULL);' % (self.expr(op.result),
+                                       self.expr(op.args[0]))
+    
     def OP_PTR_EQ(self, op, err):
         return '%s = (%s == %s);' % (self.expr(op.result),
                                      self.expr(op.args[0]),
@@ -398,7 +454,7 @@
                                      self.expr(op.args[1]))
 
     def OP_MALLOC(self, op, err):
-        TYPE = self.lltypemap[op.result].TO
+        TYPE = self.lltypemap(op.result).TO
         typename = self.db.gettype(TYPE)
         eresult = self.expr(op.result)
         result = ['OP_ZERO_MALLOC(sizeof(%s), %s, %s)' % (cdecl(typename, ''),
@@ -410,7 +466,7 @@
         return '\t'.join(result)
 
     def OP_MALLOC_VARSIZE(self, op, err):
-        TYPE = self.lltypemap[op.result].TO
+        TYPE = self.lltypemap(op.result).TO
         typename = self.db.gettype(TYPE)
         lenfld = 'length'
         nodedef = self.db.gettypedefnode(TYPE)
@@ -439,7 +495,7 @@
         return '\t'.join(result)
 
     def OP_CAST_POINTER(self, op, err):
-        TYPE = self.lltypemap[op.result]
+        TYPE = self.lltypemap(op.result)
         typename = self.db.gettype(TYPE)
         result = []
         result.append('%s = (%s)%s;' % (self.expr(op.result),
@@ -452,8 +508,8 @@
 
     def OP_SAME_AS(self, op, err):
         result = []
-        assert self.lltypemap[op.args[0]] == self.lltypemap[op.result]
-        if self.lltypemap[op.result] != Void:
+        assert self.lltypemap(op.args[0]) == self.lltypemap(op.result)
+        if self.lltypemap(op.result) != Void:
             result.append('%s = %s;' % (self.expr(op.result),
                                         self.expr(op.args[0])))
             line = self.cincref(op.result)
@@ -462,9 +518,9 @@
         return '\t'.join(result)
 
     def cincref(self, v):
-        T = self.lltypemap[v]
+        T = self.lltypemap(v)
         return self.db.cincrefstmt(v.name, T)
 
     def cdecref(self, v, expr=None):
-        T = self.lltypemap[v]
+        T = self.lltypemap(v)
         return self.db.cdecrefstmt(expr or v.name, T)

Modified: pypy/branch/pycompiler/translator/c/g_include.h
==============================================================================
--- pypy/branch/pycompiler/translator/c/g_include.h	(original)
+++ pypy/branch/pycompiler/translator/c/g_include.h	Fri Jul  1 18:28:54 2005
@@ -10,10 +10,14 @@
 #include "marshal.h"
 #include "eval.h"
 
+#include "g_exception.h"
 #include "g_trace.h"
 #include "g_support.h"
 #include "g_module.h"
 
 #include "int_include.h"
+#include "char_include.h"
+#include "unichar_include.h"
+#include "float_include.h"
 #include "ll_include.h"
 #include "pyobj_include.h"

Modified: pypy/branch/pycompiler/translator/c/g_module.h
==============================================================================
--- pypy/branch/pycompiler/translator/c/g_module.h	(original)
+++ pypy/branch/pycompiler/translator/c/g_module.h	Fri Jul  1 18:28:54 2005
@@ -2,26 +2,34 @@
 /************************************************************/
  /***  C header subsection: CPython-extension-module-ness  ***/
 
-
-#ifndef COUNT_OP_MALLOCS
-# define MODULE_INITFUNC(modname) \
-	static PyMethodDef my_methods[] = { (char *)NULL, (PyCFunction)NULL }; \
-	PyMODINIT_FUNC init##modname(void)
+#ifdef COUNT_OP_MALLOCS
+# define METHODDEF_MALLOC_COUNTERS	\
+		{ "malloc_counters", malloc_counters, METH_VARARGS },
 #else
-# define MODULE_INITFUNC(modname) \
-	static PyMethodDef my_methods[] = { \
-		{ "malloc_counters", malloc_counters }, \
-		{ (char *)NULL, (PyCFunction)NULL } }; \
-	PyMODINIT_FUNC init##modname(void)
+# define METHODDEF_MALLOC_COUNTERS	/* nothing */
 #endif
 
-#define SETUP_MODULE(modname)					\
+#define METHODDEF_DEBUGINFO    /* nothing, unless overridden by g_debuginfo.h */
+
+#define MODULE_INITFUNC(modname)                        \
+	static PyMethodDef my_methods[] = {             \
+		METHODDEF_MALLOC_COUNTERS               \
+		METHODDEF_DEBUGINFO                     \
+		{ (char *)NULL, (PyCFunction)NULL } };  \
+	PyMODINIT_FUNC init##modname(void)
+
+#define SETUP_MODULE(modname)	\
 	PyObject *m = Py_InitModule(#modname, my_methods); \
 	PyModule_AddStringConstant(m, "__sourcefile__", __FILE__); \
 	this_module_globals = PyModule_GetDict(m); \
 	PyGenCFunction_Type.tp_base = &PyCFunction_Type;	\
 	PyType_Ready(&PyGenCFunction_Type);	\
-	if (setup_globalfunctions(globalfunctiondefs) < 0) \
+	RPythonError = PyErr_NewException(#modname ".RPythonError", \
+					  NULL, NULL); \
+	if (RPythonError == NULL) \
+		return; \
+	PyModule_AddObject(m, "RPythonError", RPythonError); \
+	if (setup_globalfunctions(globalfunctiondefs, #modname) < 0) \
 		return;	\
 	if (setup_initcode(frozen_initcode, FROZEN_INITCODE_SIZE) < 0) \
 		return;	\
@@ -62,12 +70,15 @@
 	return 0;
 }
 
-static int setup_globalfunctions(globalfunctiondef_t* def)
+static int setup_globalfunctions(globalfunctiondef_t* def, char* modname)
 {
 	PyObject* fn;
+	PyObject* modname_o = PyString_FromString(modname);
+	if (modname_o == NULL)
+		return -1;
 
 	for (; def->p != NULL; def++) {
-		fn = PyCFunction_New(&def->ml, NULL);
+		fn = PyCFunction_NewEx(&def->ml, NULL, modname_o);
 		if (fn == NULL)
 			return -1;
 		fn->ob_type = &PyGenCFunction_Type;

Modified: pypy/branch/pycompiler/translator/c/g_support.h
==============================================================================
--- pypy/branch/pycompiler/translator/c/g_support.h	(original)
+++ pypy/branch/pycompiler/translator/c/g_support.h	Fri Jul  1 18:28:54 2005
@@ -11,12 +11,13 @@
 
 #define FAIL_EXCEPTION(err, exc, msg) \
 	{ \
-		PyErr_SetString(exc, msg); \
+		RaiseSimpleException(exc, msg); \
 		FAIL(err) \
 	}
-#define FAIL_OVF(err, msg) FAIL_EXCEPTION(err, PyExc_OverflowError, msg)
-#define FAIL_VAL(err, msg) FAIL_EXCEPTION(err, PyExc_ValueError, msg)
-#define FAIL_ZER(err, msg) FAIL_EXCEPTION(err, PyExc_ZeroDivisionError, msg)
+#define FAIL_OVF(err, msg) FAIL_EXCEPTION(err, Exc_OverflowError, msg)
+#define FAIL_VAL(err, msg) FAIL_EXCEPTION(err, Exc_ValueError, msg)
+#define FAIL_ZER(err, msg) FAIL_EXCEPTION(err, Exc_ZeroDivisionError, msg)
+#define CFAIL(err)         { ConvertExceptionFromCPython(); FAIL(err) }
 
 /* we need a subclass of 'builtin_function_or_method' which can be used
    as methods: builtin function objects that can be bound on instances */
@@ -368,5 +369,9 @@
 	return PyTuple_SetItem(tuple, index, o);
 }
 
-#define PyString_FromStringAndSize_Hack(s, size) \
-		PyString_FromStringAndSize((char*)(s), size)
+#define PyString_FromLLCharArrayAndSize(itemsarray, size) \
+		PyString_FromStringAndSize(itemsarray->items, size)
+
+#define PyString_ToLLCharArray(s, itemsarray)                           \
+		memcpy(itemsarray->items, PyString_AS_STRING(s),        \
+                       itemsarray->length)

Modified: pypy/branch/pycompiler/translator/c/genc.py
==============================================================================
--- pypy/branch/pycompiler/translator/c/genc.py	(original)
+++ pypy/branch/pycompiler/translator/c/genc.py	Fri Jul  1 18:28:54 2005
@@ -16,7 +16,8 @@
     return db, pf
 
 
-def genc(translator, targetdir=None, modulename=None, compile=True):
+def genc(translator, targetdir=None, modulename=None, compile=True,
+                                                      symtable=True):
     """Generate C code starting at the translator's entry point.
     The files are written to the targetdir if specified.
     If 'compile' is True, compile and return the new module.
@@ -31,18 +32,26 @@
     elif isinstance(targetdir, str):
         targetdir = py.path.local(targetdir)
     targetdir.ensure(dir=1)
+    if symtable:
+        from pypy.translator.c.symboltable import SymbolTable
+        symboltable = SymbolTable()
+    else:
+        symboltable = None
     cfile = gen_source(db, modulename, targetdir,
                        # defines={'COUNT_OP_MALLOCS': 1},
-                       exports = {translator.entrypoint.func_name: pf})
+                       exports = {translator.entrypoint.func_name: pf},
+                       symboltable = symboltable)
     if not compile:
         return cfile
     m = make_module_from_c(cfile, include_dirs = [autopath.this_dir])
+    symboltable.attach(m)   # hopefully temporary hack
     return m
 
 
 # ____________________________________________________________
 
 def gen_readable_parts_of_main_c_file(f, database):
+    lines = list(database.pre_include_code_lines())
     #
     # All declarations
     #
@@ -53,9 +62,6 @@
     for node in database.structdeflist:
         for line in node.definition(phase=1):
             print >> f, line
-    for node in database.structdeflist:
-        for line in node.definition(phase=2):
-            print >> f, line
     print >> f
     print >> f, '/***********************************************************/'
     print >> f, '/***  Forward declarations                               ***/'
@@ -63,13 +69,23 @@
     for node in database.globalcontainers():
         for line in node.forward_declaration():
             print >> f, line
+
     #
     # Implementation of functions and global structures and arrays
     #
     print >> f
     print >> f, '/***********************************************************/'
     print >> f, '/***  Implementations                                    ***/'
-    blank = True
+    print >> f
+    for line in lines:
+        print >> f, line
+    print >> f, '#include "g_include.h"'
+    print >> f
+    blank = False
+    for node in database.structdeflist:
+        for line in node.definition(phase=2):
+            print >> f, line
+        blank = True
     for node in database.globalcontainers():
         if blank:
             print >> f
@@ -79,7 +95,8 @@
             blank = True
 
 
-def gen_source(database, modulename, targetdir, defines={}, exports={}):
+def gen_source(database, modulename, targetdir, defines={}, exports={},
+                                                symboltable=None):
     if isinstance(targetdir, str):
         targetdir = py.path.local(targetdir)
     filename = targetdir.join(modulename + '.c')
@@ -90,7 +107,15 @@
     #
     for key, value in defines.items():
         print >> f, '#define %s %s' % (key, value)
-    print >> f, '#include "g_include.h"'
+    print >> f, '#include "Python.h"'
+    includes = {}
+    for node in database.globalcontainers():
+        for include in node.includes:
+            includes[include] = True
+    includes = includes.keys()
+    includes.sort()
+    for include in includes:
+        print >> f, '#include <%s>' % (include,)
 
     #
     # 1) All declarations
@@ -99,6 +124,27 @@
     gen_readable_parts_of_main_c_file(f, database)
 
     #
+    # Debugging info
+    #
+    if symboltable:
+        print >> f
+        print >> f, '/*******************************************************/'
+        print >> f, '/***  Debugging info                                 ***/'
+        print >> f
+        print >> f, 'static int debuginfo_offsets[] = {'
+        for node in database.structdeflist:
+            for expr in symboltable.generate_type_info(database, node):
+                print >> f, '\t%s,' % expr
+        print >> f, '\t0 };'
+        print >> f, 'static void *debuginfo_globals[] = {'
+        for node in database.globalcontainers():
+            if not isinstance(node, PyObjectNode):
+                result = symboltable.generate_global_info(database, node)
+                print >> f, '\t%s,' % (result,)
+        print >> f, '\tNULL };'
+        print >> f, '#include "g_debuginfo.h"'
+
+    #
     # PyObject support (strange) code
     #
     pyobjmaker = database.pyobjmaker

Modified: pypy/branch/pycompiler/translator/c/int_include.h
==============================================================================
--- pypy/branch/pycompiler/translator/c/int_include.h	(original)
+++ pypy/branch/pycompiler/translator/c/int_include.h	Fri Jul  1 18:28:54 2005
@@ -4,60 +4,56 @@
 
 /*** unary operations ***/
 
-#define OP_INCREF_int(x)          /* nothing */
-#define OP_DECREF_int(x)          /* nothing */
-#define CONV_TO_OBJ_int           PyInt_FromLong
-#define CONV_FROM_OBJ_int         PyInt_AS_LONG
-
 #define OP_INT_IS_TRUE(x,r,err)   OP_INT_NE(x,0,r,err)
 
-#define OP_INT_INVERT(x,r,err)    r = ~((long)(x));
+#define OP_INT_INVERT(x,r,err)    r = ~((x));
 
 #define OP_INT_POS(x,r,err)    r = x;
 
-#define OP_INT_NEG(x,r,err)    r = -((long)x);
+#define OP_INT_NEG(x,r,err)    r = -(x);
 
 #define OP_INT_NEG_OVF(x,r,err) \
 	OP_INT_NEG(x,r,err) \
-	if ((long)(x) >= 0 || (long)(x) != -(long)(x)); \
+	if ((x) >= 0 || (x) != -(x)); \
 	else FAIL_OVF(err, "integer negate")
 
-#define OP_INT_ABS(x,r,err)    r = (long)(x) >= 0 ? x : -((long)x);
+#define OP_INT_ABS(x,r,err)    r = (x) >= 0 ? x : -(x);
+#define OP_UINT_ABS(x,r,err)   r = (x);
 
 #define OP_INT_ABS_OVF(x,r,err) \
 	OP_INT_ABS(x,r,err) \
-	if ((long)(x) >= 0 || (long)(x) != -(long)(x)); \
+	if ((x) >= 0 || (x) != -(x)); \
 	else FAIL_OVF(err, "integer absolute")
 
 /***  binary operations ***/
 
-#define OP_INT_EQ(x,y,r,err)	  r = ((long)(x) == (long)(y));
-#define OP_INT_NE(x,y,r,err)	  r = ((long)(x) != (long)(y));
-#define OP_INT_LE(x,y,r,err)	  r = ((long)(x) <= (long)(y));
-#define OP_INT_GT(x,y,r,err)	  r = ((long)(x) >  (long)(y));
-#define OP_INT_LT(x,y,r,err)	  r = ((long)(x) <  (long)(y));
-#define OP_INT_GE(x,y,r,err)	  r = ((long)(x) >= (long)(y));
+#define OP_INT_EQ(x,y,r,err)	  r = ((x) == (y));
+#define OP_INT_NE(x,y,r,err)	  r = ((x) != (y));
+#define OP_INT_LE(x,y,r,err)	  r = ((x) <= (y));
+#define OP_INT_GT(x,y,r,err)	  r = ((x) >  (y));
+#define OP_INT_LT(x,y,r,err)	  r = ((x) <  (y));
+#define OP_INT_GE(x,y,r,err)	  r = ((x) >= (y));
 
 #define OP_INT_CMP(x,y,r,err) \
-	r = (((long)(x) > (long)(y)) - ((long)(x) < (long)(y)))
+	r = (((x) > (y)) - ((x) < (y)))
 
 /* addition, subtraction */
 
-#define OP_INT_ADD(x,y,r,err)     r = (long)(x) + (long)(y);
+#define OP_INT_ADD(x,y,r,err)     r = (x) + (y);
 
 #define OP_INT_ADD_OVF(x,y,r,err) \
 	OP_INT_ADD(x,y,r,err) \
-	if ((r^((long)x)) >= 0 || (r^((long)y)) >= 0); \
+	if ((r^(x)) >= 0 || (r^(y)) >= 0); \
 	else FAIL_OVF(err, "integer addition")
 
-#define OP_INT_SUB(x,y,r,err)     r = (long)(x) - (long)(y);
+#define OP_INT_SUB(x,y,r,err)     r = (x) - (y);
 
 #define OP_INT_SUB_OVF(x,y,r,err) \
 	OP_INT_SUB(x,y,r,err) \
-	if ((r^(long)(x)) >= 0 || (r^~(long)(y)) >= 0); \
+	if ((r^(x)) >= 0 || (r^~(y)) >= 0); \
 	else FAIL_OVF(err, "integer subtraction")
 
-#define OP_INT_MUL(x,y,r,err)     r = (long)(x) * (long)(y);
+#define OP_INT_MUL(x,y,r,err)     r = (x) * (y);
 
 #ifndef HAVE_LONG_LONG
 
@@ -70,7 +66,7 @@
 #define OP_INT_MUL_OVF(x,y,r,err) \
 	{ \
 		PY_LONG_LONG lr = (PY_LONG_LONG)(x) * (PY_LONG_LONG)(y); \
-		r = (long)lr; \
+		r = lr; \
 		if ((PY_LONG_LONG)r == lr); \
 		else FAIL_OVF(err, "integer multiplication") \
 	}
@@ -78,68 +74,112 @@
 
 /* shifting */
 
-#define OP_INT_RSHIFT(x,y,r,err) \
-	if ((long)(y) < LONG_BIT) \
-		r = Py_ARITHMETIC_RIGHT_SHIFT(long, (long)(x), (long)(y)); \
-	else r = (long)(x) < 0 ? -1 : 0;
-
-#define OP_INT_RSHIFT_VAL(x,y,r,err) \
-	if ((long)(y) >= 0) { OP_INT_RSHIFT(x,y,r,err) } \
-	else FAIL_VAL(err, "negative shift count")
-
-#define OP_INT_LSHIFT(x,y,r,err) \
-	if ((long)(y) < LONG_BIT) \
-		r = (long)(x) << (long)(y); \
-	else r = 0;
-
-#define OP_INT_LSHIFT_VAL(x,y,r,err) \
-	if ((long)(y) >= 0) { OP_INT_LSHIFT(x,y,r,err) } \
-	else FAIL_VAL(err, "negative shift count")
+/* NB. shifting has same limitations as C: the shift count must be
+       >= 0 and < LONG_BITS. */
+#define OP_INT_RSHIFT(x,y,r,err)    r = Py_ARITHMETIC_RIGHT_SHIFT(long, x, y);
+#define OP_UINT_RSHIFT(x,y,r,err)   r = (x) >> (y);
+
+#define OP_INT_LSHIFT(x,y,r,err)    r = (x) << (y);
+#define OP_UINT_LSHIFT(x,y,r,err)   r = (x) << (y);
 
 #define OP_INT_LSHIFT_OVF(x,y,r,err) \
 	OP_INT_LSHIFT(x,y,r,err) \
-	if ((long)(x) != Py_ARITHMETIC_RIGHT_SHIFT(long, r, (long)(y))) \
+	if ((x) != Py_ARITHMETIC_RIGHT_SHIFT(long, r, (y))) \
 		FAIL_OVF(err, "x<<y loosing bits or changing sign")
 
-#define OP_INT_LSHIFT_OVF_VAL(x,y,r,err) \
-	if ((long)(y) >= 0) { OP_INT_LSHIFT_OVF(x,y,r,err) } \
-	else FAIL_VAL(err, "negative shift count")
+
+/* for reference, the safe value-checking version of the above macros
+   (not really used at the moment) */
+
+/* #define OP_INT_RSHIFT(x,y,r,err) \ */
+/* 	if ((y) < LONG_BIT) \ */
+/* 		r = Py_ARITHMETIC_RIGHT_SHIFT(long, (x), (y)); \ */
+/* 	else r = (x) < 0 ? -1 : 0; */
+
+/* #define OP_INT_RSHIFT_VAL(x,y,r,err) \ */
+/* 	if ((y) >= 0) { OP_INT_RSHIFT(x,y,r,err) } \ */
+/* 	else FAIL_VAL(err, "negative shift count") */
+
+/* #define OP_INT_LSHIFT(x,y,r,err) \ */
+/* 	if ((y) < LONG_BIT) \ */
+/* 		r = (x) << (y); \ */
+/* 	else r = 0; */
+
+/* #define OP_INT_LSHIFT_VAL(x,y,r,err) \ */
+/* 	if ((y) >= 0) { OP_INT_LSHIFT(x,y,r,err) } \ */
+/* 	else FAIL_VAL(err, "negative shift count") */
+
+/* #define OP_INT_LSHIFT_OVF(x,y,r,err) \ */
+/* 	OP_INT_LSHIFT(x,y,r,err) \ */
+/* 	if ((x) != Py_ARITHMETIC_RIGHT_SHIFT(long, r, (y))) \ */
+/* 		FAIL_OVF(err, "x<<y loosing bits or changing sign") */
+
+/* #define OP_INT_LSHIFT_OVF_VAL(x,y,r,err) \ */
+/* 	if ((y) >= 0) { OP_INT_LSHIFT_OVF(x,y,r,err) } \ */
+/* 	else FAIL_VAL(err, "negative shift count") */
 
 
 /* floor division */
 
-#define OP_INT_FLOORDIV(x,y,r,err) r = op_divmod_adj(x, y, NULL);
+#define OP_INT_FLOORDIV(x,y,r,err)    r = op_divmod_adj(x, y, NULL);
+#define OP_UINT_FLOORDIV(x,y,r,err)   & Is_Unsigned_Division_Really_Useful;
 
 #define OP_INT_FLOORDIV_OVF(x,y,r,err) \
-	if ((long)(y) == -1 && (long)(x) < 0 && (long)(x) == -(long)(x)) \
+	if ((y) == -1 && (x) < 0 && ((unsigned long)(x) << 1) == 0) \
 		FAIL_OVF(err, "integer division") \
 	OP_INT_FLOORDIV(x,y,r,err)
 
 #define OP_INT_FLOORDIV_ZER(x,y,r,err) \
-	if ((long)(y)) { OP_INT_FLOORDIV(x,y,r,err) } \
+	if ((y)) { OP_INT_FLOORDIV(x,y,r,err) } \
 	else FAIL_ZER(err, "integer division")
-		
+#define OP_UINT_FLOORDIV_ZER(x,y,r,err)   & Is_Unsigned_Division_Really_Useful;
+
 #define OP_INT_FLOORDIV_OVF_ZER(x,y,r,err) \
-	if ((long)(y)) { OP_INT_FLOORDIV_OVF(x,y,r,err) } \
+	if ((y)) { OP_INT_FLOORDIV_OVF(x,y,r,err) } \
 	else FAIL_ZER(err, "integer division")
 
 /* modulus */
 
 #define OP_INT_MOD(x,y,r,err)     op_divmod_adj(x, y, &r);
+#define OP_UINT_MOD(x,y,r,err)    & Is_Unsigned_Division_Really_Useful;
 
 #define OP_INT_MOD_OVF(x,y,r,err) \
-	if ((long)(y) == -1 && (long)(x) < 0 && (long)x == -(long)(x)) \
+	if ((y) == -1 && (x) < 0 && ((unsigned long)(x) << 1) == 0) \
 		FAIL_OVF(err, "integer modulo") \
 	OP_INT_MOD(x,y,r,err);
 
 #define OP_INT_MOD_ZER(x,y,r,err) \
-	if ((long)(y)) { OP_INT_MOD(x,y,r,err) } \
+	if ((y)) { OP_INT_MOD(x,y,r,err) } \
 	else FAIL_ZER(err, "integer modulo")
-		
+#define OP_UINT_MOD_ZER(x,y,r,err)    & Is_Unsigned_Division_Really_Useful;
+
 #define OP_INT_MOD_OVF_ZER(x,y,r,err) \
-	if ((long)(y)) { OP_INT_MOD_OVF(x,y,r,err) } \
+	if ((y)) { OP_INT_MOD_OVF(x,y,r,err) } \
 	else FAIL_ZER(err, "integer modulo")
 
+/* bit operations */
+
+#define OP_INT_AND(x,y,r,err)     r = (x) & (y);
+#define OP_INT_OR( x,y,r,err)     r = (x) | (y);
+#define OP_INT_XOR(x,y,r,err)     r = (x) ^ (y);
+
+/*** conversions ***/
+
+#define OP_CAST_BOOL_TO_INT(x,r,err)    r = (long)(x);
+#define OP_CAST_BOOL_TO_UINT(x,r,err)   r = (unsigned long)(x);
+#define OP_CAST_UINT_TO_INT(x,r,err)    r = (long)(x);
+#define OP_CAST_INT_TO_UINT(x,r,err)    r = (unsigned long)(x);
+#define OP_CAST_CHAR_TO_INT(x,r,err)    r = (long)(x);
+#define OP_CAST_INT_TO_CHAR(x,r,err)    r = (char)(x);
+#define OP_CAST_PTR_TO_INT(x,r,err)     r = (long)(x);    /* XXX */
+
+#define OP_CAST_UNICHAR_TO_INT(x,r,err)    r = (x);
+#define OP_CAST_INT_TO_UNICHAR(x,r,err)    r = (Py_UCS4)(x);
+
+/* bool operations */
+
+#define OP_BOOL_NOT(x, r, err) r = !(x);
+
 /* _________________ certain implementations __________________ */
 
 #ifndef HAVE_LONG_LONG
@@ -197,4 +237,31 @@
 	if (p_rem)
 		*p_rem = xmody;
 	return xdivy;
-}
\ No newline at end of file
+}
+/* no editing below this point */
+/* following lines are generated by mkuint.py */
+
+#define OP_UINT_IS_TRUE OP_INT_IS_TRUE
+#define OP_UINT_INVERT OP_INT_INVERT
+#define OP_UINT_POS OP_INT_POS
+#define OP_UINT_NEG OP_INT_NEG
+/* skipping OP_UINT_ABS */
+#define OP_UINT_EQ OP_INT_EQ
+#define OP_UINT_NE OP_INT_NE
+#define OP_UINT_LE OP_INT_LE
+#define OP_UINT_GT OP_INT_GT
+#define OP_UINT_LT OP_INT_LT
+#define OP_UINT_GE OP_INT_GE
+#define OP_UINT_CMP OP_INT_CMP
+#define OP_UINT_ADD OP_INT_ADD
+#define OP_UINT_SUB OP_INT_SUB
+#define OP_UINT_MUL OP_INT_MUL
+/* skipping OP_UINT_RSHIFT */
+/* skipping OP_UINT_LSHIFT */
+/* skipping OP_UINT_FLOORDIV */
+/* skipping OP_UINT_FLOORDIV_ZER */
+/* skipping OP_UINT_MOD */
+/* skipping OP_UINT_MOD_ZER */
+#define OP_UINT_AND OP_INT_AND
+#define OP_UINT_OR OP_INT_OR
+#define OP_UINT_XOR OP_INT_XOR

Modified: pypy/branch/pycompiler/translator/c/ll_include.h
==============================================================================
--- pypy/branch/pycompiler/translator/c/ll_include.h	(original)
+++ pypy/branch/pycompiler/translator/c/ll_include.h	Fri Jul  1 18:28:54 2005
@@ -3,11 +3,16 @@
  /***  C header subsection: operations on LowLevelTypes    ***/
 
 
-#define OP_ZERO_MALLOC(size, r, err)  {                 \
-    r = (void*) PyObject_Malloc(size);                  \
-    if (r == NULL) { PyErr_NoMemory(); FAIL(err) }      \
-    memset((void*) r, 0, size);                         \
-    COUNT_MALLOC                                        \
+/* XXX hack to initialize the refcount of global structures: officially,
+   we need a value equal to the number of references to this global from
+   other globals, plus one.  This upper bound "approximation" will do... */
+#define REFCOUNT_IMMORTAL  (INT_MAX/2)
+
+#define OP_ZERO_MALLOC(size, r, err)  {                                 \
+    r = (void*) PyObject_Malloc(size);                                  \
+    if (r == NULL) FAIL_EXCEPTION(err, Exc_MemoryError, "out of memory")\
+    memset((void*) r, 0, size);                                         \
+    COUNT_MALLOC                                                        \
   }
 
 #define OP_FREE(p)	{ PyObject_Free(p); COUNT_FREE }

Modified: pypy/branch/pycompiler/translator/c/node.py
==============================================================================
--- pypy/branch/pycompiler/translator/c/node.py	(original)
+++ pypy/branch/pycompiler/translator/c/node.py	Fri Jul  1 18:28:54 2005
@@ -1,8 +1,10 @@
 from __future__ import generators
 from pypy.rpython.lltype import Struct, Array, FuncType, PyObjectType, typeOf
 from pypy.rpython.lltype import GcStruct, GcArray, GC_CONTAINER, ContainerType
-from pypy.rpython.lltype import parentlink, Ptr, PyObject, Void
+from pypy.rpython.lltype import parentlink, Ptr, PyObject, Void, OpaqueType
+from pypy.rpython.lltype import RuntimeTypeInfo, getRuntimeTypeInfo
 from pypy.translator.c.funcgen import FunctionCodeGenerator
+from pypy.translator.c.external import CExternalFunctionCodeGenerator
 from pypy.translator.c.support import cdecl, somelettersfrom
 from pypy.translator.c.primitive import PrimitiveType
 
@@ -19,10 +21,12 @@
 class StructDefNode:
     refcount = None
     deallocator = None
+    static_deallocator = None
 
     def __init__(self, db, STRUCT, varlength=1):
         self.db = db
         self.STRUCT = STRUCT
+        self.LLTYPE = STRUCT
         self.varlength = varlength
 
     def setup(self):
@@ -56,11 +60,40 @@
             firstfieldname, firstfieldtype = self.fields[0]
             firstdefnode = db.gettypedefnode(T)
             self.refcount = '%s.%s' % (firstfieldname, firstdefnode.refcount)
+            # check here that there is enough run-time type information to
+            # handle this case
+            getRuntimeTypeInfo(STRUCT)
+            getRuntimeTypeInfo(T)
 
-        # is a specific deallocator needed?
-        if self.refcount and varlength == 1 and list(self.deallocator_lines('')):
+        # do we need deallocator(s)?
+        if self.refcount and varlength == 1:
             self.deallocator = db.namespace.uniquename('dealloc_'+self.name)
 
+            # are two deallocators needed (a dynamic one for DECREF, which checks
+            # the real type of the structure and calls the static deallocator) ?
+            rtti = None
+            if isinstance(STRUCT, GcStruct):
+                try:
+                    rtti = getRuntimeTypeInfo(STRUCT)
+                except ValueError:
+                    pass
+            if rtti is not None:
+                self.static_deallocator = db.namespace.uniquename(
+                    'staticdealloc_'+self.name)
+                fnptr = rtti._obj.query_funcptr
+                if fnptr is None:
+                    raise NotImplementedError(
+                        "attachRuntimeTypeInfo(): please provide a function")
+                self.rtti_query_funcptr = db.get(fnptr)
+                T = typeOf(fnptr).TO.ARGS[0]
+                self.rtti_query_funcptr_argtype = db.gettype(T)
+            else:
+                # is a deallocator really needed, or would it be empty?
+                if list(self.deallocator_lines('')):
+                    self.static_deallocator = self.deallocator
+                else:
+                    self.deallocator = None
+
     def c_struct_field_name(self, name):
         return self.prefix + name
 
@@ -82,12 +115,29 @@
                     line = '/* %s */' % line
                 yield '\t' + line
             yield '};'
-        elif phase == 2 and self.deallocator:
-            yield 'void %s(struct %s *p) {' % (self.deallocator, self.name)
-            for line in self.deallocator_lines('p->'):
-                yield '\t' + line
-            yield '\tOP_FREE(p);'
-            yield '}'
+            if self.deallocator:
+                yield 'void %s(struct %s *);' % (self.deallocator, self.name)
+
+        elif phase == 2:
+            if self.static_deallocator:
+                yield 'void %s(struct %s *p) {' % (self.static_deallocator,
+                                                   self.name)
+                for line in self.deallocator_lines('p->'):
+                    yield '\t' + line
+                yield '\tOP_FREE(p);'
+                yield '}'
+            if self.deallocator and self.deallocator != self.static_deallocator:
+                yield 'void %s(struct %s *p) {' % (self.deallocator, self.name)
+                yield '\tvoid (*staticdealloc) (void *);'
+                # the refcount should be 0; temporarily bump it to 1
+                yield '\tp->%s = 1;' % (self.refcount,)
+                # cast 'p' to the type expected by the rtti_query function
+                yield '\tstaticdealloc = %s((%s) p);' % (
+                    self.rtti_query_funcptr,
+                    cdecl(self.rtti_query_funcptr_argtype, ''))
+                yield '\tif (!--p->%s)' % (self.refcount,)
+                yield '\t\tstaticdealloc(p);'
+                yield '}'
 
     def deallocator_lines(self, prefix):
         STRUCT = self.STRUCT
@@ -99,6 +149,17 @@
                                         FIELD_T):
                 yield line
 
+    def debug_offsets(self):
+        # generate number exprs giving the offset of the elements in the struct
+        STRUCT = self.STRUCT
+        for name in STRUCT._names:
+            FIELD_T = self.c_struct_field_type(name)
+            if FIELD_T == Void:
+                yield '-1'
+            else:
+                cname = self.c_struct_field_name(name)
+                yield 'offsetof(struct %s, %s)' % (self.name, cname)
+
 
 class ArrayDefNode:
     refcount = None
@@ -107,6 +168,7 @@
     def __init__(self, db, ARRAY, varlength=1):
         self.db = db
         self.ARRAY = ARRAY
+        self.LLTYPE = ARRAY
         self.varlength = varlength
 
     def setup(self):
@@ -176,6 +238,16 @@
             yield '\t}'
             yield '}'
 
+    def debug_offsets(self):
+        # generate three offsets for debugging inspection
+        yield 'offsetof(struct %s, length)' % (self.name,)
+        if self.ARRAY.OF != Void:
+            yield 'offsetof(struct %s, items[0])' % (self.name,)
+            yield 'offsetof(struct %s, items[1])' % (self.name,)
+        else:
+            yield '-1'
+            yield '-1'
+
 
 def generic_dealloc(db, expr, T):
     if isinstance(T, Ptr) and T._needsgc():
@@ -191,6 +263,7 @@
 
 
 class ContainerNode:
+    includes = ()
 
     def __init__(self, db, T, obj):
         self.db = db
@@ -248,7 +321,7 @@
     def initializationexpr(self, decoration=''):
         yield '{'
         if needs_refcount(self.T):
-            yield '\t1,'
+            yield '\tREFCOUNT_IMMORTAL,'
         defnode = self.db.gettypedefnode(self.T)
         for name in self.T._names:
             value = getattr(self.obj, name)
@@ -274,8 +347,8 @@
     def initializationexpr(self, decoration=''):
         yield '{'
         if needs_refcount(self.T):
-            yield '\t1,'
-        if self.T.OF == Void:
+            yield '\tREFCOUNT_IMMORTAL,'
+        if self.T.OF == Void or len(self.obj.items) == 0:
             yield '\t%d' % len(self.obj.items)
             yield '}'
         else:
@@ -318,26 +391,40 @@
     globalcontainer = True
 
     def __init__(self, db, T, obj):
-        graph = obj.graph # only user-defined functions with graphs for now
-        self.funcgen = FunctionCodeGenerator(graph, db)
+        self.funcgen = select_function_code_generator(obj, db)
         self.db = db
         self.T = T
         self.obj = obj
         #self.dependencies = {}
         self.typename = db.gettype(T)  #, who_asks=self)
-        argnames = self.funcgen.argnames()
-        self.implementationtypename = db.gettype(T, argnames=argnames)
-        self.name = db.namespace.uniquename('g_' + self.basename())
+        if self.funcgen:
+            argnames = self.funcgen.argnames()
+            self.implementationtypename = db.gettype(T, argnames=argnames)
+        if hasattr(obj, 'includes'):
+            self.includes = obj.includes
+            self.name = self.basename()
+        else:
+            self.name = db.namespace.uniquename('g_' + self.basename())
         self.ptrname = self.name
 
     def basename(self):
         return self.obj._name
 
     def enum_dependencies(self):
+        if self.funcgen is None:
+            return []
         return self.funcgen.allconstantvalues()
 
+    def forward_declaration(self):
+        if self.funcgen:
+            return ContainerNode.forward_declaration(self)
+        else:
+            return []
+
     def implementation(self):
         funcgen = self.funcgen
+        if funcgen is None:
+            return
         yield '%s {' % cdecl(self.implementationtypename, self.name)
         #
         # declare the local variables
@@ -380,17 +467,34 @@
         yield '}'
 
 
-class CExternalFuncNode(ContainerNode):
+def select_function_code_generator(fnptr, db):
+    if hasattr(fnptr, 'graph'):
+        cpython_exc = getattr(fnptr, 'exception_policy', None) == "CPython"
+        return FunctionCodeGenerator(fnptr.graph, db, cpython_exc)
+    elif getattr(fnptr, 'external', None) == 'C':
+        if getattr(fnptr, 'includes', None):
+            return None   # assume no wrapper needed
+        else:
+            return CExternalFunctionCodeGenerator(fnptr, db)
+    else:
+        raise ValueError, "don't know how to generate code for %r" % (fnptr,)
+
+
+class OpaqueNode(ContainerNode):
     globalcontainer = True
+    typename = 'void (@)(void *)'
 
     def __init__(self, db, T, obj):
+        assert T == RuntimeTypeInfo
+        assert isinstance(obj.about, GcStruct)
         self.db = db
         self.T = T
         self.obj = obj
-        #self.dependencies = {}
-        self.typename = db.gettype(T)  #, who_asks=self)
-        self.name = obj._name
-        self.ptrname = self.name
+        defnode = db.gettypedefnode(obj.about)
+        self.implementationtypename = 'void (@)(struct %s *)' % (
+            defnode.name,)
+        self.name = defnode.static_deallocator
+        self.ptrname = '((void (*)(void *)) %s)' % (self.name,)
 
     def enum_dependencies(self):
         return []
@@ -398,22 +502,6 @@
     def implementation(self):
         return []
 
-    def forward_declaration(self):
-        return []
-
-    def implementation(self):
-        return []
-
-
-def funcnodemaker(db, T, obj):
-    if hasattr(obj, 'graph'):
-        cls = FuncNode
-    elif getattr(obj, 'external', None) == 'C':
-        cls = CExternalFuncNode
-    else:
-        raise ValueError, "don't know about %r" % (obj,)
-    return cls(db, T, obj)
-
 
 class PyObjectNode(ContainerNode):
     globalcontainer = True
@@ -446,6 +534,7 @@
     GcStruct:     StructNode,
     Array:        ArrayNode,
     GcArray:      ArrayNode,
-    FuncType:     funcnodemaker,
+    FuncType:     FuncNode,
+    OpaqueType:   OpaqueNode,
     PyObjectType: PyObjectNode,
     }

Modified: pypy/branch/pycompiler/translator/c/primitive.py
==============================================================================
--- pypy/branch/pycompiler/translator/c/primitive.py	(original)
+++ pypy/branch/pycompiler/translator/c/primitive.py	Fri Jul  1 18:28:54 2005
@@ -1,3 +1,4 @@
+import sys
 from pypy.rpython.lltype import *
 
 # ____________________________________________________________
@@ -5,17 +6,22 @@
 # Primitives
 
 def name_signed(value):
-    return '%d' % value
+    if value == -sys.maxint-1:   # blame C
+        return '(-%dL-1L)' % sys.maxint
+    else:
+        return '%dL' % value
 
 def name_unsigned(value):
     assert value >= 0
-    return '%d' % value
+    return '%dUL' % value
+
+def name_float(value):
+    return repr(value)
 
 def name_char(value):
-    value = value
     assert type(value) is str and len(value) == 1
     if ' ' <= value < '\x7f':
-        return "'%s'" % (value.replace("'", r"\'"),)
+        return "'%s'" % (value.replace("\\", r"\\").replace("'", r"\'"),)
     else:
         return '%d' % ord(value)
 
@@ -25,10 +31,17 @@
 def name_void(value):
     return '/* nothing */'
 
+def name_unichar(value):
+    assert type(value) is unicode and len(value) == 1
+    return '%d' % ord(value)
+    
+
 PrimitiveName = {
     Signed:   name_signed,
     Unsigned: name_unsigned,
+    Float:    name_float,
     Char:     name_char,
+    UniChar:  name_unichar,
     Bool:     name_bool,
     Void:     name_void,
     }
@@ -36,7 +49,9 @@
 PrimitiveType = {
     Signed:   'long @',
     Unsigned: 'unsigned long @',
+    Float:    'double @',
     Char:     'char @',
+    UniChar:  'Py_UCS4 @',
     Bool:     'char @',
     Void:     'void @',
     }
@@ -44,7 +59,9 @@
 PrimitiveErrorValue = {
     Signed:   '-1',
     Unsigned: '((unsigned) -1)',
+    Float:    '-1.0',
     Char:     '((char) -1)',
+    UniChar:  '((Py_UCS4) -1)',
     Bool:     '((char) -1)',
     Void:     '/* error */',
     }

Modified: pypy/branch/pycompiler/translator/c/pyobj_include.h
==============================================================================
--- pypy/branch/pycompiler/translator/c/pyobj_include.h	(original)
+++ pypy/branch/pycompiler/translator/c/pyobj_include.h	Fri Jul  1 18:28:54 2005
@@ -7,13 +7,13 @@
 #define op_bool(r,err,what) { \
 		int _retval = what; \
 		if (_retval < 0) { \
-			FAIL(err) \
+			CFAIL(err) \
 		} \
 		r = PyBool_FromLong(_retval); \
 	}
 
 #define op_richcmp(x,y,r,err,dir) \
-					if (!(r=PyObject_RichCompare(x,y,dir))) FAIL(err)
+					if (!(r=PyObject_RichCompare(x,y,dir))) CFAIL(err)
 #define OP_LT(x,y,r,err)  op_richcmp(x,y,r,err, Py_LT)
 #define OP_LE(x,y,r,err)  op_richcmp(x,y,r,err, Py_LE)
 #define OP_EQ(x,y,r,err)  op_richcmp(x,y,r,err, Py_EQ)
@@ -29,71 +29,71 @@
 #define OP_LEN(x,r,err) { \
 		int _retval = PyObject_Size(x); \
 		if (_retval < 0) { \
-			FAIL(err) \
+			CFAIL(err) \
 		} \
 		r = PyInt_FromLong(_retval); \
 	}
-#define OP_NEG(x,r,err)           if (!(r=PyNumber_Negative(x)))     FAIL(err)
-#define OP_POS(x,r,err)           if (!(r=PyNumber_Positive(x)))     FAIL(err)
-#define OP_INVERT(x,r,err)        if (!(r=PyNumber_Invert(x)))       FAIL(err)
-#define OP_ABS(x,r,err)           if (!(r=PyNumber_Absolute(x)))     FAIL(err)
-
-#define OP_ADD(x,y,r,err)         if (!(r=PyNumber_Add(x,y)))        FAIL(err)
-#define OP_SUB(x,y,r,err)         if (!(r=PyNumber_Subtract(x,y)))   FAIL(err)
-#define OP_MUL(x,y,r,err)         if (!(r=PyNumber_Multiply(x,y)))   FAIL(err)
-#define OP_TRUEDIV(x,y,r,err)     if (!(r=PyNumber_TrueDivide(x,y))) FAIL(err)
-#define OP_FLOORDIV(x,y,r,err)    if (!(r=PyNumber_FloorDivide(x,y)))FAIL(err)
-#define OP_DIV(x,y,r,err)         if (!(r=PyNumber_Divide(x,y)))     FAIL(err)
-#define OP_MOD(x,y,r,err)         if (!(r=PyNumber_Remainder(x,y)))  FAIL(err)
-#define OP_DIVMOD(x,y,r,err)      if (!(r=PyNumber_Divmod(x,y)))     FAIL(err)
-#define OP_POW(x,y,z,r,err)       if (!(r=PyNumber_Power(x,y,z)))    FAIL(err)
-#define OP_LSHIFT(x,y,r,err)      if (!(r=PyNumber_Lshift(x,y)))     FAIL(err)
-#define OP_RSHIFT(x,y,r,err)      if (!(r=PyNumber_Rshift(x,y)))     FAIL(err)
-#define OP_AND_(x,y,r,err)        if (!(r=PyNumber_And(x,y)))        FAIL(err)
-#define OP_OR_(x,y,r,err)         if (!(r=PyNumber_Or(x,y)))         FAIL(err)
-#define OP_XOR(x,y,r,err)         if (!(r=PyNumber_Xor(x,y)))        FAIL(err)
+#define OP_NEG(x,r,err)           if (!(r=PyNumber_Negative(x)))     CFAIL(err)
+#define OP_POS(x,r,err)           if (!(r=PyNumber_Positive(x)))     CFAIL(err)
+#define OP_INVERT(x,r,err)        if (!(r=PyNumber_Invert(x)))       CFAIL(err)
+#define OP_ABS(x,r,err)           if (!(r=PyNumber_Absolute(x)))     CFAIL(err)
+
+#define OP_ADD(x,y,r,err)         if (!(r=PyNumber_Add(x,y)))        CFAIL(err)
+#define OP_SUB(x,y,r,err)         if (!(r=PyNumber_Subtract(x,y)))   CFAIL(err)
+#define OP_MUL(x,y,r,err)         if (!(r=PyNumber_Multiply(x,y)))   CFAIL(err)
+#define OP_TRUEDIV(x,y,r,err)     if (!(r=PyNumber_TrueDivide(x,y))) CFAIL(err)
+#define OP_FLOORDIV(x,y,r,err)    if (!(r=PyNumber_FloorDivide(x,y)))CFAIL(err)
+#define OP_DIV(x,y,r,err)         if (!(r=PyNumber_Divide(x,y)))     CFAIL(err)
+#define OP_MOD(x,y,r,err)         if (!(r=PyNumber_Remainder(x,y)))  CFAIL(err)
+#define OP_DIVMOD(x,y,r,err)      if (!(r=PyNumber_Divmod(x,y)))     CFAIL(err)
+#define OP_POW(x,y,z,r,err)       if (!(r=PyNumber_Power(x,y,z)))    CFAIL(err)
+#define OP_LSHIFT(x,y,r,err)      if (!(r=PyNumber_Lshift(x,y)))     CFAIL(err)
+#define OP_RSHIFT(x,y,r,err)      if (!(r=PyNumber_Rshift(x,y)))     CFAIL(err)
+#define OP_AND_(x,y,r,err)        if (!(r=PyNumber_And(x,y)))        CFAIL(err)
+#define OP_OR_(x,y,r,err)         if (!(r=PyNumber_Or(x,y)))         CFAIL(err)
+#define OP_XOR(x,y,r,err)         if (!(r=PyNumber_Xor(x,y)))        CFAIL(err)
 
 #define OP_INPLACE_ADD(x,y,r,err) if (!(r=PyNumber_InPlaceAdd(x,y)))           \
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_SUB(x,y,r,err) if (!(r=PyNumber_InPlaceSubtract(x,y)))      \
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_MUL(x,y,r,err) if (!(r=PyNumber_InPlaceMultiply(x,y)))      \
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_TRUEDIV(x,y,r,err) if (!(r=PyNumber_InPlaceTrueDivide(x,y)))\
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_FLOORDIV(x,y,r,err)if(!(r=PyNumber_InPlaceFloorDivide(x,y)))\
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_DIV(x,y,r,err) if (!(r=PyNumber_InPlaceDivide(x,y)))        \
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_MOD(x,y,r,err) if (!(r=PyNumber_InPlaceRemainder(x,y)))     \
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_POW(x,y,r,err) if (!(r=PyNumber_InPlacePower(x,y,Py_None))) \
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_LSHIFT(x,y,r,err) if (!(r=PyNumber_InPlaceLshift(x,y)))     \
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_RSHIFT(x,y,r,err) if (!(r=PyNumber_InPlaceRshift(x,y)))     \
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_AND(x,y,r,err)    if (!(r=PyNumber_InPlaceAnd(x,y)))        \
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_OR(x,y,r,err)     if (!(r=PyNumber_InPlaceOr(x,y)))         \
-								     FAIL(err)
+								     CFAIL(err)
 #define OP_INPLACE_XOR(x,y,r,err)    if (!(r=PyNumber_InPlaceXor(x,y)))        \
-								     FAIL(err)
+								     CFAIL(err)
 
-#define OP_GETITEM(x,y,r,err)     if (!(r=PyObject_GetItem1(x,y)))   FAIL(err)
-#define OP_SETITEM(x,y,z,r,err)   if ((PyObject_SetItem1(x,y,z))<0)  FAIL(err) \
+#define OP_GETITEM(x,y,r,err)     if (!(r=PyObject_GetItem1(x,y)))   CFAIL(err)
+#define OP_SETITEM(x,y,z,r,err)   if ((PyObject_SetItem1(x,y,z))<0)  CFAIL(err) \
 				  r=Py_None; Py_INCREF(r);
-#define OP_DELITEM(x,y,r,err)     if ((PyObject_DelItem(x,y))<0)     FAIL(err) \
+#define OP_DELITEM(x,y,r,err)     if ((PyObject_DelItem(x,y))<0)     CFAIL(err) \
 				  r=Py_None; Py_INCREF(r);
 #define OP_CONTAINS(x,y,r,err)    op_bool(r,err,(PySequence_Contains(x,y)))
 
-#define OP_GETATTR(x,y,r,err)     if (!(r=PyObject_GetAttr(x,y)))    FAIL(err)
-#define OP_SETATTR(x,y,z,r,err)   if ((PyObject_SetAttr(x,y,z))<0)   FAIL(err) \
+#define OP_GETATTR(x,y,r,err)     if (!(r=PyObject_GetAttr(x,y)))    CFAIL(err)
+#define OP_SETATTR(x,y,z,r,err)   if ((PyObject_SetAttr(x,y,z))<0)   CFAIL(err) \
 				  r=Py_None; Py_INCREF(r);
-#define OP_DELATTR(x,y,r,err)     if ((PyObject_SetAttr(x,y,NULL))<0)FAIL(err) \
+#define OP_DELATTR(x,y,r,err)     if ((PyObject_SetAttr(x,y,NULL))<0)CFAIL(err) \
 				  r=Py_None; Py_INCREF(r);
 
-#define OP_NEWSLICE(x,y,z,r,err)  if (!(r=PySlice_New(x,y,z)))       FAIL(err)
+#define OP_NEWSLICE(x,y,z,r,err)  if (!(r=PySlice_New(x,y,z)))       CFAIL(err)
 
 #define OP_GETSLICE(x,y,z,r,err)  {					\
 		PyObject *__yo = y, *__zo = z;				\
@@ -102,46 +102,46 @@
 		if (__zo == Py_None) __zo = NULL;			\
 		if (!_PyEval_SliceIndex(__yo, &__y) ||			\
 		    !_PyEval_SliceIndex(__zo, &__z) ||			\
-		    !(r=PySequence_GetSlice(x, __y, __z))) FAIL(err)	\
+		    !(r=PySequence_GetSlice(x, __y, __z))) CFAIL(err)	\
 	}
 
 #define OP_ALLOC_AND_SET(x,y,r,err) { \
 		/* XXX check for long/int overflow */ \
 		int __i, __x = PyInt_AsLong(x); \
-		if (PyErr_Occurred()) FAIL(err) \
-		if (!(r = PyList_New(__x))) FAIL(err) \
+		if (PyErr_Occurred()) CFAIL(err) \
+		if (!(r = PyList_New(__x))) CFAIL(err) \
 		for (__i=0; __i<__x; __i++) { \
 			Py_INCREF(y); \
 			PyList_SET_ITEM(r, __i, y); \
 		} \
 	}
 
-#define OP_ITER(x,r,err)          if (!(r=PyObject_GetIter(x)))      FAIL(err)
+#define OP_ITER(x,r,err)          if (!(r=PyObject_GetIter(x)))      CFAIL(err)
 #define OP_NEXT(x,r,err)          if (!(r=PyIter_Next(x))) {                   \
 		if (!PyErr_Occurred()) PyErr_SetNone(PyExc_StopIteration);     \
-		FAIL(err)                                                      \
+		CFAIL(err)                                                      \
 	}
 
-#define OP_STR(x,r,err)           if (!(r=PyObject_Str(x)))          FAIL(err)
-#define OP_REPR(x,r,err)          if (!(r=PyObject_Repr(x)))         FAIL(err)
+#define OP_STR(x,r,err)           if (!(r=PyObject_Str(x)))          CFAIL(err)
+#define OP_REPR(x,r,err)          if (!(r=PyObject_Repr(x)))         CFAIL(err)
 #define OP_ORD(s,r,err) { \
 	char *__c = PyString_AsString(s); \
 	int __len; \
-	if ( !__c) FAIL(err) \
+	if ( !__c) CFAIL(err) \
 	if ((__len = PyString_GET_SIZE(s)) != 1) { \
 	    PyErr_Format(PyExc_TypeError, \
 		  "ord() expected a character, but string of length %d found", \
 		  __len); \
-	    FAIL(err) \
+	    CFAIL(err) \
 	} \
 	if (!(r = PyInt_FromLong((unsigned char)(__c[0])))) \
-	    FAIL(err) \
+	    CFAIL(err) \
     }
-#define OP_ID(x,r,err)    if (!(r=PyLong_FromVoidPtr(x))) FAIL(err)
+#define OP_ID(x,r,err)    if (!(r=PyLong_FromVoidPtr(x))) CFAIL(err)
 #define OP_HASH(x,r,err)  { \
 	long __hash = PyObject_Hash(x); \
-	if (__hash == -1 && PyErr_Occurred()) FAIL(err) \
-	if (!(r = PyInt_FromLong(__hash))) FAIL(err) \
+	if (__hash == -1 && PyErr_Occurred()) CFAIL(err) \
+	if (!(r = PyInt_FromLong(__hash))) CFAIL(err) \
     }
 
 #define OP_HEX(x,r,err)   { \
@@ -150,9 +150,9 @@
 	    __nb->nb_hex == NULL) { \
 		PyErr_SetString(PyExc_TypeError, \
 			   "hex() argument can't be converted to hex"); \
-		FAIL(err) \
+		CFAIL(err) \
 	} \
-	if (!(r = (*__nb->nb_hex)(x))) FAIL(err) \
+	if (!(r = (*__nb->nb_hex)(x))) CFAIL(err) \
     }
 #define OP_OCT(x,r,err)   { \
 	PyNumberMethods *__nb; \
@@ -160,32 +160,32 @@
 	    __nb->nb_oct == NULL) { \
 		PyErr_SetString(PyExc_TypeError, \
 			   "oct() argument can't be converted to oct"); \
-		FAIL(err) \
+		CFAIL(err) \
 	} \
-	if (!(r = (*__nb->nb_oct)(x))) FAIL(err) \
+	if (!(r = (*__nb->nb_oct)(x))) CFAIL(err) \
     }
 
 #define OP_INT(x,r,err)   { \
 	long __val = PyInt_AsLong(x); \
-	if (__val == -1 && PyErr_Occurred()) FAIL(err) \
-	if (!(r = PyInt_FromLong(__val))) FAIL (err) \
+	if (__val == -1 && PyErr_Occurred()) CFAIL(err) \
+	if (!(r = PyInt_FromLong(__val))) CFAIL (err) \
     }
 #define OP_FLOAT(x,r,err)   { \
 	double __val = PyFloat_AsDouble(x); \
-	if (PyErr_Occurred()) FAIL(err) \
-	if (!(r = PyFloat_FromDouble(__val))) FAIL (err) \
+	if (PyErr_Occurred()) CFAIL(err) \
+	if (!(r = PyFloat_FromDouble(__val))) CFAIL (err) \
     }
 
 #define OP_CMP(x,y,r,err)   { \
 	int __val = PyObject_Compare(x, y); \
-	if (PyErr_Occurred()) FAIL(err) \
-	if (!(r = PyInt_FromLong(__val))) FAIL (err) \
+	if (PyErr_Occurred()) CFAIL(err) \
+	if (!(r = PyInt_FromLong(__val))) CFAIL (err) \
     }
 
 
 #define OP_SIMPLE_CALL(args,r,err) if (!(r=PyObject_CallFunctionObjArgs args)) \
-					FAIL(err)
-#define OP_CALL_ARGS(args,r,err)   if (!(r=CallWithShape args))    FAIL(err)
+					CFAIL(err)
+#define OP_CALL_ARGS(args,r,err)   if (!(r=CallWithShape args))    CFAIL(err)
 
 /* Needs to act like getattr(x, '__class__', type(x)) */
 #define OP_TYPE(x,r,err) { \
@@ -205,27 +205,17 @@
 
 /*** operations with a variable number of arguments ***/
 
-#define OP_NEWLIST0(r,err)         if (!(r=PyList_New(0))) FAIL(err)
-#define OP_NEWLIST(args,r,err)     if (!(r=PyList_Pack args)) FAIL(err)
-#define OP_NEWDICT0(r,err)         if (!(r=PyDict_New())) FAIL(err)
-#define OP_NEWDICT(args,r,err)     if (!(r=PyDict_Pack args)) FAIL(err)
-#define OP_NEWTUPLE(args,r,err)    if (!(r=PyTuple_Pack args)) FAIL(err)
+#define OP_NEWLIST0(r,err)         if (!(r=PyList_New(0))) CFAIL(err)
+#define OP_NEWLIST(args,r,err)     if (!(r=PyList_Pack args)) CFAIL(err)
+#define OP_NEWDICT0(r,err)         if (!(r=PyDict_New())) CFAIL(err)
+#define OP_NEWDICT(args,r,err)     if (!(r=PyDict_Pack args)) CFAIL(err)
+#define OP_NEWTUPLE(args,r,err)    if (!(r=PyTuple_Pack args)) CFAIL(err)
 
 /*** argument parsing ***/
 
 #define OP_DECODE_ARG(fname, pos, name, vargs, vkwds, r, err)	\
-	if (!(r=decode_arg(fname, pos, name, vargs, vkwds, NULL))) FAIL(err)
+	if (!(r=decode_arg(fname, pos, name, vargs, vkwds, NULL))) CFAIL(err)
 #define OP_DECODE_ARG_DEF(fname, pos, name, vargs, vkwds, def, r, err)	\
-	if (!(r=decode_arg(fname, pos, name, vargs, vkwds, def))) FAIL(err)
+	if (!(r=decode_arg(fname, pos, name, vargs, vkwds, def))) CFAIL(err)
 #define OP_CHECK_NO_MORE_ARG(fname, n, vargs, r, err)	\
-	if (check_no_more_arg(fname, n, vargs) < 0) FAIL(err)
-
-/*** conversions, reference counting ***/
-
-#define OP_INCREF_pyobj(o)          Py_INCREF(o);
-#define OP_DECREF_pyobj(o)          Py_DECREF(o);
-#define CONV_TO_OBJ_pyobj(o)        ((void)Py_INCREF(o), o)
-#define CONV_FROM_OBJ_pyobj(o)      ((void)Py_INCREF(o), o)
-
-#define OP_INCREF_borrowedpyobj(o)  /* nothing */
-#define OP_DECREF_borrowedpyobj(o)  /* nothing */
+	if (check_no_more_arg(fname, n, vargs) < 0) CFAIL(err)

Modified: pypy/branch/pycompiler/translator/c/support.py
==============================================================================
--- pypy/branch/pycompiler/translator/c/support.py	(original)
+++ pypy/branch/pycompiler/translator/c/support.py	Fri Jul  1 18:28:54 2005
@@ -59,3 +59,43 @@
            double    long      typedef
            else      register  union
            ''')
+
+
+def gen_assignments(assignments):
+    # Generate a sequence of assignments that is possibly reordered
+    # to avoid clashes -- i.e. do the equivalent of a tuple assignment,
+    # reading all sources first, writing all targets next, but optimized
+
+    allsources = []
+    src2dest = {}
+    types = {}
+    assignments = list(assignments)
+    for typename, dest, src in assignments:
+        if src != dest:   # ignore 'v=v;'
+            allsources.append(src)
+            src2dest.setdefault(src, []).append(dest)
+            types[dest] = typename
+
+    for starting in allsources:
+        # starting from some starting variable, follow a chain of assignments
+        #     'vn=vn-1; ...; v3=v2; v2=v1; v1=starting;'
+        v = starting
+        srcchain = []
+        while src2dest.get(v):
+            srcchain.append(v)
+            v = src2dest[v].pop(0)
+            if v == starting:
+                break    # loop
+        if not srcchain:
+            continue   # already done in a previous chain
+        srcchain.reverse()   # ['vn-1', ..., 'v2', 'v1', 'starting']
+        code = []
+        for pair in zip([v] + srcchain[:-1], srcchain):
+            code.append('%s = %s;' % pair)
+        if v == starting:
+            # assignment loop 'starting=vn-1; ...; v2=v1; v1=starting;'
+            typename = types[starting]
+            tmpdecl = cdecl(typename, 'tmp')
+            code.insert(0, '{ %s = %s;' % (tmpdecl, starting))
+            code[-1] = '%s = tmp; }' % (srcchain[-2],)
+        yield ' '.join(code)

Modified: pypy/branch/pycompiler/translator/c/test/test_database.py
==============================================================================
--- pypy/branch/pycompiler/translator/c/test/test_database.py	(original)
+++ pypy/branch/pycompiler/translator/c/test/test_database.py	Fri Jul  1 18:28:54 2005
@@ -27,7 +27,7 @@
 
 def test_primitive():
     db = LowLevelDatabase()
-    assert db.get(5) == '5'
+    assert db.get(5) == '5L'
     assert db.get(True) == '1'
 
 def test_struct():
@@ -236,26 +236,26 @@
     db.complete()
     dump_on_stdout(db)
 
-def test_nested_gcstruct():
-    S1 = GcStruct('inlined', ('x', Signed), ('y', Ptr(PyObject)))
-    S = GcStruct('testing', ('head', S1),
-                            ('ptr2', Ptr(S1)),
-                            ('z', Signed))
-    def ll_f(x):
-        ptr2 = malloc(S1)
-        ptr2.x = x+1
-        s = malloc(S)
-        s.head.x = x
-        s.ptr2 = ptr2
-        return s.head.x * s.ptr2.x
-    t = Translator(ll_f)
-    t.annotate([int])
-    t.specialize()
+##def test_nested_gcstruct():
+##    S1 = GcStruct('inlined', ('x', Signed), ('y', Ptr(PyObject)))
+##    S = GcStruct('testing', ('head', S1),
+##                            ('ptr2', Ptr(S1)),
+##                            ('z', Signed))
+##    def ll_f(x):
+##        ptr2 = malloc(S1)
+##        ptr2.x = x+1
+##        s = malloc(S)
+##        s.head.x = x
+##        s.ptr2 = ptr2
+##        return s.head.x * s.ptr2.x
+##    t = Translator(ll_f)
+##    t.annotate([int])
+##    t.specialize()
     
-    db = LowLevelDatabase(t)
-    db.get(getfunctionptr(t, ll_f))
-    db.complete()
-    dump_on_stdout(db)
+##    db = LowLevelDatabase(t)
+##    db.get(getfunctionptr(t, ll_f))
+##    db.complete()
+##    dump_on_stdout(db)
 
 def test_array():
     A = GcArray(('obj', Ptr(PyObject)))

Modified: pypy/branch/pycompiler/translator/c/test/test_genc.py
==============================================================================
--- pypy/branch/pycompiler/translator/c/test/test_genc.py	(original)
+++ pypy/branch/pycompiler/translator/c/test/test_genc.py	Fri Jul  1 18:28:54 2005
@@ -58,12 +58,12 @@
     assert f1(5) == 10
     assert f1(x=5) == 10
     assert f1(-123) == -246
-    py.test.raises(TypeError, f1, "world")  # check that it's really typed
-    py.test.raises(TypeError, f1)
-    py.test.raises(TypeError, f1, 2, 3)
-    py.test.raises(TypeError, f1, 2, x=2)
-    #py.test.raises(TypeError, f1, 2, y=2)   XXX missing a check at the moment
     assert module.malloc_counters() == (0, 0)
+    py.test.raises(Exception, f1, "world")  # check that it's really typed
+    py.test.raises(Exception, f1)
+    py.test.raises(Exception, f1, 2, 3)
+    py.test.raises(Exception, f1, 2, x=2)
+    #py.test.raises(Exception, f1, 2, y=2)   XXX missing a check at the moment
 
 
 def test_rlist():
@@ -142,3 +142,88 @@
     assert f1(12, "hello") == "hello"
     mallocs, frees = module.malloc_counters()
     assert mallocs == frees
+
+
+def test_runtime_type_info():
+    S = GcStruct('s', ('is_actually_s1', Bool))
+    S1 = GcStruct('s1', ('sub', S))
+    attachRuntimeTypeInfo(S)
+    attachRuntimeTypeInfo(S1)
+    def rtti_S(p):
+        if p.is_actually_s1:
+            return getRuntimeTypeInfo(S1)
+        else:
+            return getRuntimeTypeInfo(S)
+    def rtti_S1(p):
+        return getRuntimeTypeInfo(S1)
+    def does_stuff():
+        p = malloc(S)
+        p.is_actually_s1 = False
+        p1 = malloc(S1)
+        p1.sub.is_actually_s1 = True
+        # and no crash when p and p1 are decref'ed
+        return sys
+    t = Translator(does_stuff)
+    t.annotate([])
+    from pypy.rpython.rtyper import RPythonTyper
+    rtyper = RPythonTyper(t.annotator)
+    rtyper.attachRuntimeTypeInfoFunc(S,  rtti_S)
+    rtyper.attachRuntimeTypeInfoFunc(S1, rtti_S1)
+    rtyper.specialize()
+    #t.view()
+
+    db = LowLevelDatabase(t)
+    entrypoint = db.get(pyobjectptr(does_stuff))
+    db.complete()
+
+    module = compile_db(db)
+
+    f1 = getattr(module, entrypoint)
+    f1()
+    mallocs, frees = module.malloc_counters()
+    assert mallocs == frees
+
+def test_time_clock():
+    import time
+    def does_stuff():
+        return time.clock()
+    t = Translator(does_stuff)
+    t.annotate([])
+    t.specialize()
+    #t.view()
+
+    db = LowLevelDatabase(t)
+    entrypoint = db.get(pyobjectptr(does_stuff))
+    db.complete()
+
+    module = compile_db(db)
+
+    f1 = getattr(module, entrypoint)
+    t0 = time.clock()
+    t1 = f1()
+    assert type(t1) is float
+    t2 = time.clock()
+    assert t0 <= t1 <= t2
+    mallocs, frees = module.malloc_counters()
+    assert mallocs == frees
+
+def test_str():
+    def call_str(o):
+        return str(o)
+    t = Translator(call_str)
+    t.annotate([object])
+    t.specialize()
+    #t.view()
+
+    db = LowLevelDatabase(t)
+    entrypoint = db.get(pyobjectptr(call_str))
+    db.complete()
+
+    module = compile_db(db)
+
+    f1 = getattr(module, entrypoint)
+    lst = (1, [5], "'hello'", lambda x: x+1)
+    res = f1(lst)
+    assert res == str(lst)
+    mallocs, frees = module.malloc_counters()
+    assert mallocs == frees

Modified: pypy/branch/pycompiler/translator/c/wrapper.py
==============================================================================
--- pypy/branch/pycompiler/translator/c/wrapper.py	(original)
+++ pypy/branch/pycompiler/translator/c/wrapper.py	Fri Jul  1 18:28:54 2005
@@ -118,4 +118,5 @@
                                  PyObjPtr],
                                 PyObjPtr),
                        wgraph.name,
-                       graph = wgraph)
+                       graph = wgraph,
+                       exception_policy = "CPython")

Modified: pypy/branch/pycompiler/translator/geninterplevel.py
==============================================================================
--- pypy/branch/pycompiler/translator/geninterplevel.py	(original)
+++ pypy/branch/pycompiler/translator/geninterplevel.py	Fri Jul  1 18:28:54 2005
@@ -57,6 +57,7 @@
 from pypy.interpreter.error import OperationError
 from pypy.interpreter.argument import Arguments
 from pypy.rpython.rarithmetic import r_int, r_uint
+from pypy.translator.backendoptimization import SSI_to_SSA
 
 from pypy.translator.translator import Translator
 from pypy.objspace.flow import FlowObjSpace
@@ -77,7 +78,7 @@
 import pypy # __path__
 import py.path
 
-GI_VERSION = '1.1.1'  # bump this for substantial changes
+GI_VERSION = '1.1.2'  # bump this for substantial changes
 # ____________________________________________________________
 
 def eval_helper(self, typename, expr):
@@ -302,8 +303,10 @@
                 src = linklocalvars[a1]
             else:
                 src = self.expr(a1, localscope)
-            left.append(self.expr(a2, localscope))
-            right.append(src)
+            dest = self.expr(a2, localscope)
+            if src != dest:
+                left.append(dest)
+                right.append(src)
         if left: # anything at all?
             txt = "%s = %s" % (", ".join(left), ", ".join(right))
             if len(txt) <= 65: # arbitrary
@@ -1000,6 +1003,14 @@
         return name # no success
 
     def gen_rpyfunction(self, func):
+        try:
+            graph = self.translator.getflowgraph(func)
+        except Exception, e:
+            print 20*"*", e
+            print func
+            raise
+        SSI_to_SSA(graph)
+        checkgraph(graph)
 
         f = self.f
         print >> f, "##SECTION##" # simple to split, afterwards
@@ -1011,7 +1022,7 @@
             func.func_code.co_firstlineno)
         print >> f, "##SECTION##"
         localscope = self.namespace.localScope()
-        body = list(self.rpyfunction_body(func, localscope))
+        body = list(self.rpyfunction_body(graph, localscope))
         name_of_defaults = [self.nameof(x, debug=('Default argument of', func))
                             for x in (func.func_defaults or ())]
         self.gen_global_declarations()
@@ -1022,14 +1033,14 @@
         assert cname.startswith('gfunc_')
         f_name = 'f_' + cname[6:]
 
-        # collect all the local variables
-        graph = self.translator.getflowgraph(func)
-        localslst = []
-        def visit(node):
-            if isinstance(node, Block):
-                localslst.extend(node.getvariables())
-        traverse(visit, graph)
-        localnames = [self.expr(a, localscope) for a in uniqueitems(localslst)]
+##        # collect all the local variables
+##        graph = self.translator.getflowgraph(func)
+##        localslst = []
+##        def visit(node):
+##            if isinstance(node, Block):
+##                localslst.extend(node.getvariables())
+##        traverse(visit, graph)
+##        localnames = [self.expr(a, localscope) for a in uniqueitems(localslst)]
 
         # collect all the arguments
         vararg = varkw = None
@@ -1110,7 +1121,7 @@
         if docstr is not None:
             print >> f, docstr
 
-        fast_locals = [arg for arg in localnames if arg not in fast_set]
+##        fast_locals = [arg for arg in localnames if arg not in fast_set]
 ##        # if goto is specialized, the false detection of
 ##        # uninitialized variables goes away.
 ##        if fast_locals and not self.specialize_goto:
@@ -1134,25 +1145,7 @@
             pass # del self.translator.flowgraphs[func]
         # got duplicate flowgraphs when doing this!
 
-    def rpyfunction_body(self, func, localscope):
-        try:
-            graph = self.translator.getflowgraph(func)
-        except Exception, e:
-            print 20*"*", e
-            print func
-            raise
-        # not needed, we use tuple assignment!
-        # remove_direct_loops(graph)
-        checkgraph(graph)
-
-        allblocks = []
-        
-        f = self.f
-        t = self.translator
-        #t.simplify(func)
-        graph = t.getflowgraph(func)
-
-
+    def rpyfunction_body(self, graph, localscope):
         start = graph.startblock
         allblocks = ordered_blocks(graph)
         nblocks = len(allblocks)

Modified: pypy/branch/pycompiler/translator/goal/query.py
==============================================================================
--- pypy/branch/pycompiler/translator/goal/query.py	(original)
+++ pypy/branch/pycompiler/translator/goal/query.py	Fri Jul  1 18:28:54 2005
@@ -1,5 +1,6 @@
 # functions to query information out of the translator and annotator from the debug prompt of translate_pypy
 import types
+import re
 
 import pypy.annotation.model as annmodel
 import pypy.objspace.flow.model as flowmodel
@@ -251,6 +252,16 @@
         if len(callb[x]) >= 2 and x not in b_nb:
             print ' '.join([prettycallable((classdef and classdef.cls, func)) for (classdef,func) in callb[x].keys()])
 
+def pretty_els(objs):
+    accum = []
+    for classdef, obj in objs:
+        cls = classdef and classdef.cls
+        accum.append(prettycallable((cls, obj)))
+        els = ' '.join(accum)
+    if len(accum) == 1:
+        return els
+    else:
+        return "{%s}" % els
 
 def pbccall(translator):
     fams = translator.annotator.getpbccallfamilies().root_info.itervalues()
@@ -282,17 +293,6 @@
         else:
             return "in total %d %s" % (nels, prettycallable(kinds))
 
-    def pretty_els(objs):
-        accum = []
-        for classdef, obj in objs:
-            cls = classdef and classdef.cls
-            accum.append(prettycallable((cls, obj)))
-        els = ' '.join(accum)
-        if len(accum) == 1:
-            return els
-        else:
-            return "{%s}" % els
-
     items = one_pattern_fams.items()
 
     items.sort(lambda a,b: cmp((a[0][1],a[1][1]), (b[0][1],b[1][1]))) # sort by pattern and then by els
@@ -311,6 +311,129 @@
             print " - many callables, many patterns -"
         print "family of", pretty_els(objs), "with call-patterns:", prettypatt(patts)
 
+def pbcbmsanity(translator):
+    callb = translator.annotator.getpbccallables()
+    bk = translator.annotator.bookkeeper
+    bmeths = [x for x in callb if isinstance(x, types.MethodType) and x.im_self is not None]
+    print "%d bound-methods" % len(bmeths)
+    fams = translator.annotator.getpbccallfamilies()
+    plural_bm_families = {}
+    one_el = 0
+    for bm in bmeths:
+        notpbc = bm.im_self not in bk.pbccache
+        freestanding = bm.im_func in callb
+        if notpbc or freestanding:
+            print "! %s," % bm,
+        if notpbc:
+            print "of non-PBC %s,",
+        if freestanding:
+            print "found freestanding too"
+        bm_fam = fams[(None, bm)]
+        if len(bm_fam.objects) == 1:
+            one_el += 1
+        else:
+            plural_bm_families[bm_fam] = True
+    print "%d families of one bound-method" % one_el
+    print "%d families with more than just one bound-method" % len(plural_bm_families)
+    for bm_fam in plural_bm_families:
+        print pretty_els(bm_fam.objects)
+    return plural_bm_families
+
+class Counters(dict):
+
+    def __getitem__(self, outcome):
+        if (isinstance(outcome, annmodel.SomeObject) or 
+            isinstance(outcome, tuple) and outcome and 
+            isinstance(outcome[0], annmodel.SomeObject)):
+            for k in self.iterkeys():
+                if k == outcome:
+                    outcome = k
+                    break
+            else:
+                raise KeyError
+        return dict.__getitem__(self, outcome)
+
+    def get(self, outcome, defl):
+        try:
+            return self[outcome]
+        except KeyError:
+            return defl
+
+    def __setitem__(self, outcome, c):
+        if (isinstance(outcome, annmodel.SomeObject) or 
+            isinstance(outcome, tuple) and outcome and 
+            isinstance(outcome[0], annmodel.SomeObject)):
+            for k in self.iterkeys():
+                if k == outcome:
+                    outcome = k
+                    break
+        return dict.__setitem__(self, outcome, c)
+
+
+def keyrepr(k):
+    if isinstance(k, tuple):
+        return "(%s)" % ', '.join([keyrepr(x) for x in k])
+    else:
+        return str(k)
+
+def statsfor(t, category):
+    stats = t.annotator.bookkeeper.stats
+    for_category = stats.classify[category]
+    print "%s total = %d" % (category, len(for_category))
+    counters = Counters()
+    for pos, outcome in for_category.iteritems():
+        counters[outcome] = counters.get(outcome, 0) + 1
+        
+    w = max([len(keyrepr(o)) for o in counters.keys()])+1
+    if w < 60:
+        for outcome, n in counters.iteritems():
+            print "%*s | %d" % (w, keyrepr(outcome), n)
+    else:
+        for outcome, n in counters.iteritems():
+            print "%s | %d" % (keyrepr(outcome), n)
+
+def statsforstrformat(t):
+    stats = t.annotator.bookkeeper.stats
+    stats = stats.classify['strformat']
+    result = {}
+    for fmt, args in stats.itervalues():
+        fmts = re.findall("%l?.", fmt)
+        if not isinstance(args, tuple):
+            args = (args,)
+        for f, a in zip(fmts, args):
+            result[(f,a)] = result.get((f,a), 0) + 1
+    for (f,a), c in result.iteritems():
+        print "%s %s %d" % (f, keyrepr(a), c)
+
+def statbuiltins(t):
+    stats = t.annotator.bookkeeper.stats.classify
+    for k in stats:
+        if k.startswith('__builtin__'):
+            statsfor(t, k)
+
+def dicts(t):
+    ann = t.annotator
+    r = []
+
+    def sdicts():
+        for so in ann.bindings.itervalues():
+            if isinstance(so, annmodel.SomeDict):
+                yield so
+        for so in ann.bookkeeper.immutable_cache.itervalues():
+            if isinstance(so, annmodel.SomeDict):
+                yield so
+    
+    for so in sdicts():
+            sk, sv = so.dictdef.dictkey.s_value, so.dictdef.dictvalue.s_value
+            for x in r:
+                if x == (sk, sv):
+                    break
+            else:
+                r.append((sk, sv))
+
+    for x in r:
+        print x
+
 # debug helper
 def tryout(f, *args):
     try:

Deleted: /pypy/branch/pycompiler/translator/goal/targetpypy0.py
==============================================================================
--- /pypy/branch/pycompiler/translator/goal/targetpypy0.py	Fri Jul  1 18:28:54 2005
+++ (empty file)
@@ -1,33 +0,0 @@
-from pypy.objspace import dummy
-from pypy.interpreter.pycode import PyCode
-
-# __________  Entry point  __________
-
-def entry_point(code, w_loc):
-    code2 = PyCode(space)
-    code2 = code2._from_code(code)
-    code2.exec_code(space, space.wrap({}), w_loc)
-
-# _____ Define and setup target _____
-
-def target():
-    global space
-    space = dummy.DummyObjSpace()
-
-    from pypy.interpreter import pycode
-
-    pycode.setup_frame_classes()
-
-    from pypy.interpreter import pyopcode
-
-    # cheat
-    space._gatewaycache.content[pyopcode.app] =  space.newdict([])
-
-    return entry_point,[object, dummy.W_Obj]
-
-# _____ Run translated _____
-
-def run(c_entry_point):
-    w_result = c_entry_point(compile("a+b","<stuff>","eval"),dummy.W_Obj())
-    print w_result
-

Deleted: /pypy/branch/pycompiler/translator/goal/targetpypy1.py
==============================================================================
--- /pypy/branch/pycompiler/translator/goal/targetpypy1.py	Fri Jul  1 18:28:54 2005
+++ (empty file)
@@ -1,75 +0,0 @@
-from pypy.objspace.std.objspace import StdObjSpace, W_Object
-from pypy.objspace.std.intobject import W_IntObject
-from pypy.objspace.std import stdtypedef
-
-# __________  Entry point  __________
-
-operations = "mul add sub div mod lshift rshift floordiv truediv ".split()
-
-def entry_point():
-    w_a = W_IntObject(space, -7)
-    w_b = W_IntObject(space, 6)
-    results_w = [mmentrypoints[op](space, w_a, w_b) for op in operations]
-    return [space.unwrap(each) for each in resuls_w]
-
-# flatten the above code, to get a nicer look
-def make_flat_code():
-    g = globals()
-    # make globals constants from the operations
-    code = """def entry_point():
-    import sys
-    w_a = W_IntObject(space, -7)
-    # -sys.maxint-1 crashes: genc problem with OP_SUB and int constant
-    # when implementing lshift_Long_Long and rshift__Long_Long
-    w_b = W_IntObject(space,  6)
-    results_w = []
-    append = results_w.append
-"""
-    for op in operations:
-        g["op_%s" % op] = mmentrypoints[op]
-        line = "    append(op_%s(space, w_a, w_b))" % op
-        code += line + '\n'
-    code += "    return [space.unwrap(each) for each in results_w]\n"
-    print code
-    exec code in g
-    
-# _____ Define and setup target _____
-def target():
-    global space, mmentrypoints
-    # disable translation of the whole of classobjinterp.py
-    StdObjSpace.setup_old_style_classes = lambda self: None
-    space = StdObjSpace()
-    # call cache filling code *not* needed here
-
-    # ------------------------------------------------------------
-    mmentrypoints = {}
-    for name in operations:
-        mm = getattr(space.MM, name)
-        exprargs, expr, miniglobals, fallback = (
-            mm.install_not_sliced(space.model.typeorder, baked_perform_call=False))
-        func = stdtypedef.make_perform_trampoline('__mm_'+name,
-                                                  exprargs, expr, miniglobals,
-                                                  mm)
-        mmentrypoints[name] = func
-    # ------------------------------------------------------------
-
-    # further call the entry_point once to trigger building remaining
-    # caches (as far as analyzing the entry_point is concerned)
-    make_flat_code()
-    entry_point()
-
-    return entry_point, []
-
-# _____ Run translated _____
-
-def run(c_entry_point):
-    res = c_entry_point()
-    print res
-    import operator
-    assert res == [getattr(operator, name)(-7, 6) for name in operations]
-
-if __name__ == "__main__":
-    # just run it without translation
-    target()
-    run(entry_point)
-    
\ No newline at end of file

Modified: pypy/branch/pycompiler/translator/goal/targetpypymain.py
==============================================================================
--- pypy/branch/pycompiler/translator/goal/targetpypymain.py	(original)
+++ pypy/branch/pycompiler/translator/goal/targetpypymain.py	Fri Jul  1 18:28:54 2005
@@ -1,6 +1,9 @@
 import os, sys
 from pypy.objspace.std.objspace import StdObjSpace
-from pypy.annotation.model import *
+# XXX from pypy.annotation.model import *
+# since we are execfile()'ed this would pull some
+# weird objects into the globals, which we would try to pickle.
+from pypy.annotation.model import SomeList, SomeString
 from pypy.annotation.listdef import ListDef
 from pypy.interpreter import gateway
 

Modified: pypy/branch/pycompiler/translator/goal/targetrpystone.py
==============================================================================
--- pypy/branch/pycompiler/translator/goal/targetrpystone.py	(original)
+++ pypy/branch/pycompiler/translator/goal/targetrpystone.py	Fri Jul  1 18:28:54 2005
@@ -1,36 +1,12 @@
-import buildcache2
-from pypy.objspace.std.objspace import StdObjSpace
-from pypy.translator.test import rpystone
+from pypy.translator.goal import targetrpystonex
 
-# __________  Entry point  __________
-
-LOOPS = 150000
+LOOPS = 2000000
 
-# rpystone.setslow(False)
 
-def entry_point():
-    rpystone.entrypoint(LOOPS)
-    
+# __________  Entry point  __________
 # _____ Define and setup target _____
-def target():
-    global space, mmentrypoints
-    space = StdObjSpace()
-
-    # ------------------------------------------------------------
-
-    return entry_point, []
-
 # _____ Run translated _____
 
-def run(c_entry_point):
-    res_w = c_entry_point()
-    print res_w
-    print "CPython:"
-    rpystone.entrypoint(50000)
-
-if __name__ == "__main__":
-    # just run it without translation
-    LOOPS = 50000
-    target()
-    run(entry_point)
-    
\ No newline at end of file
+(entry_point,
+ target,
+ run) = targetrpystonex.make_target_definition(LOOPS)

Deleted: /pypy/branch/pycompiler/translator/goal/targetrpystone2.py
==============================================================================
--- /pypy/branch/pycompiler/translator/goal/targetrpystone2.py	Fri Jul  1 18:28:54 2005
+++ (empty file)
@@ -1,36 +0,0 @@
-import buildcache2
-from pypy.objspace.std.objspace import StdObjSpace
-from pypy.translator.test import rpystone
-
-# __________  Entry point  __________
-
-LOOPS = 1000000
-
-rpystone.setslow(False)
-
-def entry_point():
-    rpystone.entrypoint(LOOPS)
-    
-# _____ Define and setup target _____
-def target():
-    global space, mmentrypoints
-    space = StdObjSpace()
-
-    # ------------------------------------------------------------
-
-    return entry_point, []
-
-# _____ Run translated _____
-
-def run(c_entry_point):
-    res_w = c_entry_point()
-    print res_w
-    print "CPython:"
-    rpystone.entrypoint(50000)
-
-if __name__ == "__main__":
-    # just run it without translation
-    LOOPS = 50000
-    target()
-    run(entry_point)
-    
\ No newline at end of file

Modified: pypy/branch/pycompiler/translator/goal/translate_pypy.py
==============================================================================
--- pypy/branch/pycompiler/translator/goal/translate_pypy.py	(original)
+++ pypy/branch/pycompiler/translator/goal/translate_pypy.py	Fri Jul  1 18:28:54 2005
@@ -15,45 +15,54 @@
    -no-a      Don't infer annotations, just translate everything
    -no-s      Don't simplify the graph after annotation
    -no-t      Don't type-specialize the graph operations with the C typer
+   -no-o      Don't do backend-oriented optimizations
    -no-c      Don't generate the C code
+   -fork      (UNIX) Create a restartable checkpoint after annotation
    -c         Generate the C code, but don't compile it
    -o         Generate and compile the C code, but don't run it
-   -no-mark-some-objects
-              Do not mark functions that have SomeObject in their signature.
    -tcc       Equivalent to the envvar PYPY_CC='tcc -shared -o "%s.so" "%s.c"'
                   -- http://fabrice.bellard.free.fr/tcc/
    -no-d      Disable recording of debugging information
    -huge=%    Threshold in the number of functions after which only a local call
               graph and not a full one is displayed
+   -no-snapshot
+              Don't redirect imports to the translation snapshot
+   -save filename
+              saves the translator to a file. The file type can either
+              be .py or .zip (recommended).
+   -load filename
+              restores the translator from a file. The file type must
+              be either .py or .zip .
 """
 import autopath, sys, os
 
-# xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
-basedir = autopath.this_dir
+if '-no-snapshot' not in sys.argv:
+    # xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    basedir = autopath.this_dir
 
-pypy_translation_snapshot_dir = os.path.join(basedir, 'pypy-translation-snapshot')
+    pypy_translation_snapshot_dir = os.path.join(basedir, 'pypy-translation-snapshot')
 
-if not os.path.isdir(pypy_translation_snapshot_dir):
-    print """
-Translation is performed on a specific revision of PyPy which lives on
-a branch. This needs to be checked out into translator/goal with:
+    if not os.path.isdir(pypy_translation_snapshot_dir):
+        print """
+    Translation is performed on a specific revision of PyPy which lives on
+    a branch. This needs to be checked out into translator/goal with:
 
-svn co http://codespeak.net/svn/pypy/branch/pypy-translation-snapshot
-"""[1:]
-    sys.exit(2)
+    svn co http://codespeak.net/svn/pypy/branch/pypy-translation-snapshot
+    """[1:]
+        sys.exit(2)
 
-# override imports from pypy head with imports from pypy-translation-snapshot
-import pypy
-pypy.__path__.insert(0, pypy_translation_snapshot_dir)
+    # override imports from pypy head with imports from pypy-translation-snapshot
+    import pypy
+    pypy.__path__.insert(0, pypy_translation_snapshot_dir)
 
-# complement imports from pypy.objspace (from pypy-translation-snapshot)
-# with pypy head objspace/
-import pypy.objspace
-pypy.objspace.__path__.append(os.path.join(autopath.pypydir, 'objspace'))
+    # complement imports from pypy.objspace (from pypy-translation-snapshot)
+    # with pypy head objspace/
+    import pypy.objspace
+    pypy.objspace.__path__.append(os.path.join(autopath.pypydir, 'objspace'))
 
-print "imports redirected to pypy-translation-snapshot."
+    print "imports redirected to pypy-translation-snapshot."
 
-# xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    # xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
 
 
 import threading, pdb
@@ -65,7 +74,7 @@
 from pypy.annotation.model import SomeObject
 from pypy.tool.udir import udir 
 from pypy.tool.ansi_print import ansi_print
-
+from pypy.translator.pickle.main import load, save
 
 # XXX this tries to make compiling faster
 from pypy.translator.tool import buildpyxmodule
@@ -77,25 +86,38 @@
 # __________  Main  __________
 
 def analyse(target):
-    global t, entry_point
-
-    entry_point, inputtypes = target()
+    global t, entry_point, inputtypes
 
-    t = Translator(entry_point, verbose=True, simplifying=True)
+    if target:
+        entry_point, inputtypes = target()
+        t = Translator(entry_point, verbose=True, simplifying=True)
+        a = None
+    else:
+        # otherwise we have been loaded
+        a = t.annotator
+        t.frozen = False
     if listen_port:
         run_async_server()
     if not options['-no-a']:
+        print 'Annotating...'
         a = t.annotate(inputtypes, policy=PyPyAnnotatorPolicy())
         sanity_check_exceptblocks(t)
-        worstblocks_topten(a)
-        if not options['-no-s']:
-            a.simplify()
-        if not options['-no-t']:
-            a.specialize()
+        worstblocks_topten(a, 3)
+        find_someobjects(t)
+    if a and not options['-no-s']:
+        print 'Simplifying...'
+        a.simplify()
+    if a and options['-fork']:
+        from pypy.translator.goal import unixcheckpoint
+        unixcheckpoint.restartable_point()
+    if a and not options['-no-t']:
+        print 'Specializing...'
+        t.specialize()
+    if not options['-no-o']:
+        print 'Back-end optimizations...'
+        t.backend_optimizations()
+    if a:
         t.frozen = True   # cannot freeze if we don't have annotations
-        if not options['-no-mark-some-objects']:
-            options['-no-mark-some-objects'] = True # Do not do this again
-            find_someobjects(t)
 
 def sanity_check_exceptblocks(translator):
     annotator = translator.annotator
@@ -225,6 +247,8 @@
 
     targetspec = 'targetpypymain'
     huge = 100
+    load_file = None
+    save_file = None
 
     options = {'-text': False,
                '-no-c': False,
@@ -234,11 +258,17 @@
                '-no-a': False,
                '-no-s': False,
                '-no-t': False,
+               '-no-o': False,
                '-tcc':  False,
                '-no-d': False,
+               '-no-snapshot' : False,
+               '-load': False,
+               '-save': False,
+               '-fork': False,
                }
     listen_port = None
-    for arg in sys.argv[1:]:
+    argiter = iter(sys.argv[1:])
+    for arg in argiter:
         if arg in ('-h', '--help'):
             print __doc__.strip()
             sys.exit()
@@ -256,6 +286,11 @@
             else:                
                 assert arg in options, "unknown option %r" % (arg,)
                 options[arg] = True
+                if arg == '-load':
+                    load_file = argiter.next()
+                    loaded_dic = load(load_file)
+                if arg == '-save':
+                    save_file = argiter.next()
     if options['-tcc']:
         os.environ['PYPY_CC'] = 'tcc -shared -o "%s.so" "%s.c"'
     if options['-no-d']:
@@ -455,12 +490,41 @@
             cleanup()
 
     try:
-        targetspec_dic = {}
-        sys.path.insert(0, os.path.dirname(targetspec))
-        execfile(targetspec+'.py',targetspec_dic)
-        print "Analysing target as defined by %s" % targetspec
-        analyse(targetspec_dic['target'])
+        if load_file:
+            t = loaded_dic['trans']
+            entry_point = t.entrypoint
+            inputtypes = loaded_dic['inputtypes']
+            targetspec_dic = loaded_dic['targetspec_dic']
+            targetspec = loaded_dic['targetspec']
+            old_options = loaded_dic['options']
+            for name in '-no-a -no-s -no-t -no-o'.split():
+                # if one of these options has not been set, before,
+                # then the action has been done and must be prevented, now.
+                if not old_options[name]:
+                    if options[name]:
+                        print 'option %s is implied by the load' % name
+                    options[name] = True
+            print "continuing Analysis as defined by %s, loaded from %s" %(
+                targetspec, load_file)
+            print 'options in effect:', options
+            analyse(None)
+        else:
+            targetspec_dic = {}
+            sys.path.insert(0, os.path.dirname(targetspec))
+            execfile(targetspec+'.py', targetspec_dic)
+            print "Analysing target as defined by %s" % targetspec
+            print 'options in effect:', options
+            analyse(targetspec_dic['target'])
         print '-'*60
+        if save_file:
+            print 'saving state to %s' % save_file
+            save(t, save_file,
+                 trans=t,
+                 inputtypes=inputtypes,
+                 targetspec=targetspec,
+                 targetspec_dic=targetspec_dic,
+                 options=options,
+                 )
         if options['-no-c']:
             print 'Not generating C code.'
         elif options['-c']:

Modified: pypy/branch/pycompiler/translator/llvm/build_llvm_module.py
==============================================================================
--- pypy/branch/pycompiler/translator/llvm/build_llvm_module.py	(original)
+++ pypy/branch/pycompiler/translator/llvm/build_llvm_module.py	Fri Jul  1 18:28:54 2005
@@ -14,7 +14,7 @@
 from pypy.translator.tool.buildpyxmodule import make_c_from_pyxfile
 from pypy.translator.tool import stdoutcapture
 
-debug = False
+debug = True
 
 class CompileError(exceptions.Exception):
     pass

Modified: pypy/branch/pycompiler/translator/llvm/llvmbc.py
==============================================================================
--- pypy/branch/pycompiler/translator/llvm/llvmbc.py	(original)
+++ pypy/branch/pycompiler/translator/llvm/llvmbc.py	Fri Jul  1 18:28:54 2005
@@ -97,8 +97,7 @@
     #Shift instructions
     def shift_instruction(self, instr, l_target, l_a, l_b):
         self.phi_done = True
-        assert l_target.llvmtype() == l_a.llvmtype()
-        #assert l_b.llvmtype() == 'ubyte'   #or cast to ubyte or assuma nothing goes wrong
+        # XXX hack: just assume that l_b is of the appropriate type
         s = "%s = %s %s, ubyte %s" % (l_target.llvmname(), instr,
                                       l_a.typed_name(), l_b.llvmname())
         self.instructions.append(s)

Modified: pypy/branch/pycompiler/translator/llvm/test/llvmsnippet.py
==============================================================================
--- pypy/branch/pycompiler/translator/llvm/test/llvmsnippet.py	(original)
+++ pypy/branch/pycompiler/translator/llvm/test/llvmsnippet.py	Fri Jul  1 18:28:54 2005
@@ -62,6 +62,13 @@
 def return_none():
     pass
 
+def shiftleft(i, j):
+    return i << j
+
+def shiftright(i, j):
+    return i >> j
+
+
 #float snippets
 
 def float_f1(x):

Modified: pypy/branch/pycompiler/translator/llvm/test/test_genllvm.py
==============================================================================
--- pypy/branch/pycompiler/translator/llvm/test/test_genllvm.py	(original)
+++ pypy/branch/pycompiler/translator/llvm/test/test_genllvm.py	Fri Jul  1 18:28:54 2005
@@ -1,5 +1,7 @@
 from __future__ import division
 import autopath
+import sys
+
 import py
 
 from pypy.translator.translator import Translator
@@ -97,6 +99,14 @@
         f = compile_function(llvmsnippet.return_none, [])
         assert f() is None
 
+    def test_shift(self):
+        shl = compile_function(llvmsnippet.shiftleft, [int, int])
+        shr = compile_function(llvmsnippet.shiftright, [int, int])
+        for i in [1, 2, 3, 100000, 2000000, sys.maxint - 1]:
+            for j in [1, 2, 3, 100000, 2000000, sys.maxint - 1]:
+                assert shl(i, j) == i << j
+                assert shr(i, j) == i >> j
+
 class TestFloat(object):
     def setup_method(self, method):
         if not llvm_found:

Modified: pypy/branch/pycompiler/translator/llvm/typerepr.py
==============================================================================
--- pypy/branch/pycompiler/translator/llvm/typerepr.py	(original)
+++ pypy/branch/pycompiler/translator/llvm/typerepr.py	Fri Jul  1 18:28:54 2005
@@ -85,8 +85,6 @@
         "int_mod": "rem",
         "int_xor": "xor",
         "int_and": "and",
-        "int_lshift": "shl",
-        "int_rshift": "shr",
         "int_or": "or",
         "int_eq": "seteq",
         "int_ne": "setne",
@@ -105,15 +103,51 @@
             assert len(args) == 2
             l_args = [self.gen.get_repr(arg) for arg in args]
             l_func.dependencies.update(l_args)
-            l_op = SignedTypeRepr.directly_supported_binary_ops[opname]
-            if l_op in ('shl', 'shr'):  #feel free to refactor this
-                lblock.shift_instruction(
-                    l_op, l_target,
-                    l_args[0], l_args[1])
-            else:
-                lblock.binary_instruction(
-                    l_op, l_target,
-                    l_args[0], l_args[1])
+            lblock.binary_instruction(
+                SignedTypeRepr.directly_supported_binary_ops[opname], l_target,
+                l_args[0], l_args[1])
+
+    def t_op_int_lshift(self, l_target, args, lblock, l_func):
+        # XXX hack: llvm only supports shifts by ubyte args
+        # so we have to jump through some loops
+        # someone on the LLVM list said this would change in the future
+        assert len(args) == 2
+        l_tmp1 = self.gen.get_local_tmp(None, l_func) #using only the name
+        l_tmp2 = self.gen.get_local_tmp(None, l_func)
+        l_tmp3 = self.gen.get_local_tmp(None, l_func)
+        l_args = [self.gen.get_repr(arg) for arg in args]
+        l_func.dependencies.update(l_args)
+        lblock.instruction("%s = setge %s, %s" % (l_tmp1.llvmname(),
+                                                  l_args[1].typed_name(),
+                                                  8 * BYTES_IN_INT))
+        lblock.instruction("%s = cast int %s to ubyte" %
+                           (l_tmp2.llvmname(), l_args[1].llvmname()))
+        lblock.shift_instruction("shl", l_tmp3, l_args[0], l_tmp2)
+        lblock.instruction("%s = select bool %s, int 0, int %s" %
+                           (l_target.llvmname(), l_tmp1.llvmname(),
+                            l_tmp3.llvmname()))
+        lblock.phi_done = True
+
+    def t_op_int_rshift(self, l_target, args, lblock, l_func):
+        # XXX hack: llvm only supports shifts by ubyte args
+        # so we have to jump through some loops
+        # someone on the LLVM list said this would change in the future
+        assert len(args) == 2
+        l_tmp1 = self.gen.get_local_tmp(None, l_func) #using only the name
+        l_tmp2 = self.gen.get_local_tmp(None, l_func)
+        l_tmp3 = self.gen.get_local_tmp(None, l_func)
+        l_args = [self.gen.get_repr(arg) for arg in args]
+        l_func.dependencies.update(l_args)
+        lblock.instruction("%s = setge %s, %s" % (l_tmp1.llvmname(),
+                                                  l_args[1].typed_name(),
+                                                  8 * BYTES_IN_INT))
+        lblock.instruction("%s = cast int %s to ubyte" % (l_tmp2.llvmname(),
+                                                      l_args[1].llvmname()))
+        lblock.shift_instruction("shr", l_tmp3, l_args[0], l_tmp2)
+        lblock.instruction("%s = select bool %s, int 0, int %s" %
+                           (l_target.llvmname(), l_tmp1.llvmname(),
+                            l_tmp3.llvmname()))
+        lblock.phi_done = True
 
     def t_op_int_pos(self, l_target, args, lblock, l_func):
         pass
@@ -133,8 +167,9 @@
     def t_op_int_abs(self, l_target, args, lblock, l_func):
         l_arg = self.gen.get_repr(args[0])
         l_func.dependencies.add(l_arg)
-        lblock.instruction("%s = and int 2147483647, %s" % (l_target.llvmname(),
-                                                            l_arg.llvmname()))
+        lblock.instruction("%s = and int %s, %s" % (l_target.llvmname(),
+                                                    sys.maxint - 1,
+                                                    l_arg.llvmname()))
 
     def typename(self):
         return "int"

Modified: pypy/branch/pycompiler/translator/pickle/genpickle.py
==============================================================================
--- pypy/branch/pycompiler/translator/pickle/genpickle.py	(original)
+++ pypy/branch/pycompiler/translator/pickle/genpickle.py	Fri Jul  1 18:28:54 2005
@@ -17,7 +17,7 @@
 from pypy.translator.gensupp import builtin_base
 from pypy.rpython.rarithmetic import r_int, r_uint
 from pypy.objspace.flow.model import Variable, Constant, SpaceOperation
-from pypy.objspace.flow.model import FunctionGraph, Block, Link
+from pypy.objspace.flow.model import FunctionGraph, Block, Link, Atom
 from pypy.objspace.flow.flowcontext import SpamBlock, EggBlock
 from pypy.annotation.model import SomeInteger, SomeObject, SomeChar, SomeBool
 from pypy.annotation.model import SomeList, SomeString, SomeTuple
@@ -33,195 +33,258 @@
 from types import *
 import types
 
+class AlreadyCreated(Exception): pass
+
 # ____________________________________________________________
 
 
+#XXX Hack: This float is supposed to overflow to inf
+OVERFLOWED_FLOAT = float("1e10000000000000000000000000000000")
+#XXX Hack: and this one to underflow to -inf
+OVERFLOWED_FLOATM = float("-1e10000000000000000000000000000000")
+
 class GenPickle:
 
     def __init__(self, translator, writer = None):
         self.translator = translator
-        self.initcode = [
+        self.writer = writer
+        self.initcode = []
+        self.produce = self._produce()
+        self.produce(
             'from __future__ import %s\n' % ', '.join(all_feature_names) +
             'import new, types, sys',
-            ]
-
-        self.latercode = []    # list of generators generating extra lines
-        self.debugstack = ()   # linked list of nested nameof()
-
-        self.picklenames = {Constant(None):  'None',
-                            Constant(False): 'False',
-                            Constant(True):  'True',
-                            # hack: overflowed float
-                            Constant(float("1e10000000000000000000000000000000")):
-                                'float("1e10000000000000000000000000000000")',
-                            }
+            )
+        self.picklenames = {}  # memoize objects
         for name in all_feature_names + "new types sys".split():
-            self.picklenames[Constant(globals()[name])] = name
+            self.memoize(globals()[name], name)
+        self.memoize((), '()')
         self.namespace = NameManager()
+        self.uniquename = self.namespace.uniquename
         self.namespace.make_reserved_names('None False True')
         self.namespace.make_reserved_names('new types sys')
         self.namespace.make_reserved_names(' '.join(all_feature_names))
+        self.namespace.make_reserved_names('result') # result dict
+        self.result = {}
+        self.simple_const_types = {
+            int: repr,
+            long: repr,
+            float: self.save_float,
+            str: repr,
+            unicode: repr,
+            type(None): repr,
+            bool: repr,
+            }
+        self.typecache = {} # hold types vs. nameof methods
         # we distinguish between the "user program" and other stuff.
         # "user program" will never use save_global.
         self.domains = (
             'pypy.objspace.std.',
+            'pypy.objspace.descroperation',
             'pypy._cache.',
             'pypy.interpreter.',
             'pypy.module.',
+            'pypy.translator.test.',
             '__main__',
             )
         self.shortnames = {
-            SpaceOperation: 'SOP',
             Variable:       'V',
             Constant:       'C',
             Block:          'B',
             SpamBlock:      'SB',
             EggBlock:       'EB',
             Link:           'L',
-            FunctionGraph:  'F',
-            SomeInteger:    'SI',
-            SomeObject:     'SO',
-            SomeChar:       'SC',
-            SomeBool:       'SB',
-            SomeList:       'SL',
-            SomeString:     'SS',
-            SomeTuple:      'ST',
-            SomeInstance:   'SIN',
+            FunctionGraph:  'FG',
+            SomeInteger:    'sI',
+            SomeObject:     'sO',
+            SomeChar:       'sC',
+            SomeBool:       'sB',
+            SomeList:       'sL',
+            SomeString:     'sS',
+            SomeTuple:      'sT',
+            SomeInstance:   'sIn',
+            }
+        self.inline_instances = {
+            SpaceOperation: True,
             }
-        self.writer = writer
 
-    def nameof(self, obj, debug=None, namehint=None):
-        key = Constant(obj)
+    def save_float(self, fl):
+        if fl == OVERFLOWED_FLOAT:
+            return 'float("1e10000000000000000000000000000000")'
+        elif fl == OVERFLOWED_FLOATM:
+            return 'float("-1e10000000000000000000000000000000")'
+        return repr(fl)
+
+    def pickle(self, *args, **kwds):
+        for obj in args:
+            self.nameof(obj)
+        for obj in kwds.values():
+            self.nameof(obj)
+        self.result.update(kwds)
+
+    def finish(self):
+        self.pickle(self.result)
+        self.produce('result = %s' % self.nameof(self.result))
+        if self.writer:
+            self.writer.close()
+
+    def memoize(self, obj, name):
+        self.picklenames[id(obj)] = name
+        return name
+
+    def memoize_unique(self, obj, basename):
+        if id(obj) in self.picklenames:
+            raise AlreadyCreated
+        return self.memoize(obj, self.uniquename(basename))
+
+    def _produce(self):
+        writer = self.writer
+        down = 1234
+        cnt = [0, 0]  # text, calls
+        self.last_progress = ''
+        if writer:
+            write = writer.write
+        else:
+            write = self.initcode.append
+        def produce(text):
+            write(text+'\n')
+            cnt[0] += len(text) + 1
+            cnt[1] += 1
+            if cnt[1] == down:
+                cnt[1] = 0
+                self.progress("%d" % cnt[0])
+        return produce
+
+    def progress(self, txt):
+        back = '\x08' * len(self.last_progress)
+        self.last_progress = txt+' ' # soft space
+        print back+txt,
+
+    def nameof(self, obj):
         try:
-            return self.picklenames[key]
+            try:
+                return self.picklenames[id(obj)]
+            except KeyError:
+                typ = type(obj)
+                return self.simple_const_types[typ](obj)
         except KeyError:
-            if debug:
-                stackentry = debug, obj
-            else:
-                stackentry = obj
-            self.debugstack = (self.debugstack, stackentry)
-            obj_builtin_base = builtin_base(obj)
-            if obj_builtin_base in (object, int, long) and type(obj) is not obj_builtin_base:
-                # assume it's a user defined thingy
-                name = self.nameof_instance(obj)
-            else:
-                for cls in type(obj).__mro__:
-                    meth = getattr(self,
-                                   'nameof_' + ''.join( [
-                                       c for c in cls.__name__
-                                       if c.isalpha() or c == '_'] ),
-                                   None)
-                    if meth:
-                        break
-                else:
-                    raise Exception, "nameof(%r)" % (obj,)
-
-                code = meth.im_func.func_code
-                if namehint and 'namehint' in code.co_varnames[:code.co_argcount]:
-                    name = meth(obj, namehint=namehint)
-                else:
-                    name = meth(obj)
-            self.debugstack, x = self.debugstack
-            assert x is stackentry
-            if name[0].isalpha():
-                # avoid to store things which are used just once
-                self.picklenames[key] = name
+            try:
+                try:
+                    meth = self.typecache[typ]
+                except KeyError:
+                    obj_builtin_base = builtin_base(obj)
+                    if (obj_builtin_base in (object,) + tuple(
+                        self.simple_const_types.keys()) and
+                        typ is not obj_builtin_base):
+                        # assume it's a user defined thingy
+                        meth = self.nameof_instance
+                    else:
+                        for cls in typ.__mro__:
+                            meth = getattr(self, 'nameof_' + ''.join(
+                                [ c for c in cls.__name__
+                                  if c.isalpha() or c == '_'] ), None)
+                            if meth:
+                                break
+                        else:
+                            raise Exception, "nameof(%r)" % (obj,)
+                    self.typecache[typ] = meth
+                name = meth(obj)
+            except AlreadyCreated:
+                name = self.picklenames[id(obj)]
             return name
 
     def nameofargs(self, tup, plain_tuple = False):
         """ a string with the nameofs, concatenated """
         # see if we can build a compact representation
-        for each in tup:
-            if type(each) is tuple and len(each) > 2:
-                break
-        else:
-            ret = ', '.join([self.nameof(arg) for arg in tup])
-            if plain_tuple and len(tup) == 1:
-                ret += ','
-            if len(ret) <= 90:
-                return ret
-        ret = '\n ' + (',\n ').join(
+        ret = ', '.join([self.nameof(arg) for arg in tup])
+        if plain_tuple and len(tup) == 1:
+            ret += ','
+        if len(ret) <= 90:
+            return ret
+        ret = '\n ' + ',\n '.join(
             [self.nameof(arg) for arg in tup]) + ',\n '
         return ret
 
-    def uniquename(self, basename):
-        return self.namespace.uniquename(basename)
-
-    def initcode_python(self, name, pyexpr):
-        # generate init code that will evaluate the given Python expression
-        #self.initcode.append("print 'setting up', %r" % name)
-        self.initcode.append("%s = %s" % (name, pyexpr))
-
     def nameof_object(self, value):
         if type(value) is not object:
-            raise Exception, "nameof(%r)" % (value,)
-        name = self.uniquename('g_object')
-        self.initcode_python(name, "object()")
+            raise Exception, "nameof(%r): type %s not object" % (
+                value, type(value).__name__)
+        name = self.memoize_unique(value, 'g_object')
+        self.produce('%s = object()' % name)
         return name
 
     def nameof_module(self, value):
         # all allowed here, we reproduce ourselves
         if self.is_app_domain(value.__name__):
-            name = self.uniquename('gmod_%s' % value.__name__)
-            self.initcode.append('%s = new.module(%r)\n'
-                                 'sys.modules[%r] = %s'% (
+            name = self.memoize_unique(value, 'gmod_%s' % value.__name__)
+            self.produce('%s = new.module(%r)\n'
+                         'sys.modules[%r] = %s'% (
                 name, value.__name__, value.__name__, name) )
             def initmodule():
-                for k, v in value.__dict__.items():
+                names = value.__dict__.keys()
+                names.sort()
+                for k in names:
                     try:
+                        v = value.__dict__[k]
                         nv = self.nameof(v)
                         yield '%s.%s = %s' % (name, k, nv)
                     except PicklingError:
                         pass
-            self.later(initmodule())
+            for line in initmodule():
+                self.produce(line)
         else:
-            name = self.uniquename(value.__name__)
-            self.initcode_python(name, "__import__(%r)" % (value.__name__,))
+            name = self.memoize_unique(value, value.__name__)
+            self.produce('%s = __import__(%r)' % (name, value.__name__,))
         return name
 
-    def nameof_int(self, value):
-        return repr(value)
-
-    # we don't need to name the following const types.
-    # the compiler folds the consts the same way as we do.
-    # note that true pickling is more exact, here.
-    nameof_long = nameof_float = nameof_bool = nameof_NoneType = nameof_int
-    nameof_str = nameof_unicode = nameof_int
-
-    def skipped_function(self, func):
+    def skipped_function(self, func, reason=None, _dummydict={}):
         # Generates a placeholder for missing functions
         # that raises an exception when called.
         # The original code object is retained in an
         # attribute '_skipped_code'
-        name = self.uniquename('gskippedfunc_' + func.__name__)
-        codename = self.nameof(func.func_code)
-        self.initcode.append('def %s(*a,**k):\n' 
-                             '  raise NotImplementedError' % name)
-        self.initcode.append('%s._skipped_code = %s' % (name, codename) )
-        return name
+        skipname = 'gskippedfunc_' + func.__name__
+        funcname = func.__name__
+        # need to handle this specially
+        if id(func) in self.picklenames:
+            raise AlreadyCreated
+        # generate code object before the skipped func (reads better)
+        func_code = getattr(func, 'func_code', None) # maybe builtin
+        self.nameof(func_code)
+        if reason:
+            text = 'skipped: %r, see _skipped_code attr: %s' % (
+                reason, funcname)
+        else:
+            text = 'skipped, see _skipped_code attr: %s' % funcname
+        def dummy(*args, **kwds):
+            raise NotImplementedError, text
+        _dummydict['__builtins__'] = __builtins__
+        skippedfunc = new.function(dummy.func_code, _dummydict, skipname, (),
+                                   dummy.func_closure)
+        skippedfunc._skipped_code = func_code
+        name = self.nameof(skippedfunc)
+        return self.memoize(func, name)
 
     def nameof_staticmethod(self, sm):
         # XXX XXX XXXX
         func = sm.__get__(42.5)
-        name = self.uniquename('gsm_' + func.__name__)
         functionname = self.nameof(func)
-        self.initcode_python(name, 'staticmethod(%s)' % functionname)
+        name = self.memoize_unique(sm, 'gsm_' + func.__name__)
+        self.produce('%s = staticmethod(%s)' % (name, functionname))
         return name
 
     def nameof_instancemethod(self, meth):
+        func = self.nameof(meth.im_func)
+        typ = self.nameof(meth.im_class)
         if meth.im_self is None:
             # no error checking here
-            return self.nameof(meth.im_func)
+            name = self.memoize_unique(meth, 'gmeth_' + func)
+            self.produce('%s = %s.%s' % (name, typ, meth.__name__))
         else:
             ob = self.nameof(meth.im_self)
-            func = self.nameof(meth.im_func)
-            typ = self.nameof(meth.im_class)
-            name = self.uniquename('gmeth_'+meth.im_func.__name__)
-            self.initcode_python(name, 'new.instancemethod(%s, %s, %s)' % (
-                func, ob, typ))
-            return name
+            name = self.memoize_unique(meth, 'gumeth_'+ func)
+            self.produce('%s = new.instancemethod(%s, %s, %s)' % (
+                name, func, ob, typ))
+        return name
 
-    # new version: save if we don't know
     def should_translate_attr(self, pbc, attr):
         ann = self.translator.annotator
         if ann:
@@ -241,22 +304,31 @@
             # builtin function
             # where does it come from? Python2.2 doesn't have func.__module__
             for modname, module in sys.modules.items():
-                # here we don't ignore extension modules
+                # here we don't ignore extension modules, but it must be
+                # a builtin module
+                if not module: continue
+                if hasattr(module, '__file__'):
+                    fname = module.__file__.lower()
+                    pyendings = '.py', '.pyc', '.pyo'
+                    if [fname.endswith(ending) for ending in pyendings]:
+                        continue
                 if func is getattr(module, func.__name__, None):
                     break
             else:
-                raise Exception, '%r not found in any built-in module' % (func,)
-            name = self.uniquename('gbltin_' + func.__name__)
+                #raise Exception, '%r not found in any built-in module' % (func,)
+                return self.skipped_function(
+                    func, 'not found in any built-in module')
+            name = self.memoize_unique(func, 'gbltin_' + func.__name__)
             if modname == '__builtin__':
-                self.initcode_python(name, func.__name__)
+                self.produce('%s = %s' % (name, func.__name__))
             else:
                 modname = self.nameof(module)
-                self.initcode_python(name, '%s.%s' % (modname, func.__name__))
+                self.produce('%s = %s.%s' % (name, modname, func.__name__))
         else:
             # builtin (bound) method
-            name = self.uniquename('gbltinmethod_' + func.__name__)
             selfname = self.nameof(func.__self__)
-            self.initcode_python(name, '%s.%s' % (selfname, func.__name__))
+            name = self.memoize_unique(func, 'gbltinmethod_' + func.__name__)
+            self.produce('%s = %s.%s' % (name, selfname, func.__name__))
         return name
 
     def nameof_classobj(self, cls):
@@ -265,27 +337,21 @@
 
         try:
             return self.save_global(cls)
-        except PicklingError:
+        except PicklingError, e:
             pass
         
         metaclass = "type"
         if issubclass(cls, Exception):
             # if cls.__module__ == 'exceptions':
             # don't rely on this, py.magic redefines AssertionError
-            if getattr(__builtin__,cls.__name__,None) is cls:
-                name = self.uniquename('gexc_' + cls.__name__)
-                self.initcode_python(name, cls.__name__)
+            if getattr(__builtin__, cls.__name__, None) is cls:
+                name = self.memoize_unique(cls, 'gexc_' + cls.__name__)
+                self.produce('%s = %s' % (name, cls.__name__))
                 return name
-            #else:
-            #    # exceptions must be old-style classes (grr!)
-            #    metaclass = "&PyClass_Type"
-        # For the moment, use old-style classes exactly when the
-        # pypy source uses old-style classes, to avoid strange problems.
         if not isinstance(cls, type):
             assert type(cls) is ClassType
             metaclass = "types.ClassType"
 
-        name = self.uniquename('gcls_' + cls.__name__)
         basenames = [self.nameof(base) for base in cls.__bases__]
         def initclassobj():
             content = cls.__dict__.items()
@@ -302,7 +368,6 @@
                 if isapp:
                     if (isinstance(value, staticmethod) and value.__get__(1) not in
                         self.translator.flowgraphs and self.translator.frozen):
-                        print value
                         continue
                     if isinstance(value, classmethod):
                         doc = value.__get__(cls).__doc__
@@ -310,7 +375,6 @@
                             continue
                     if (isinstance(value, FunctionType) and value not in
                         self.translator.flowgraphs and self.translator.frozen):
-                        print value
                         continue
                 if key in ignore:
                     continue
@@ -322,15 +386,15 @@
         baseargs = ", ".join(basenames)
         if baseargs:
             baseargs = '(%s)' % baseargs
+        name = self.memoize_unique(cls, 'gcls_' + cls.__name__)
         ini = 'class %s%s:\n  __metaclass__ = %s' % (name, baseargs, metaclass)
         if '__slots__' in cls.__dict__:
             ini += '\n  __slots__ = %r' % cls.__slots__
-        self.initcode.append(ini)
-        self.initcode.append('%s.name = %r' % (name, cls.__name__))
-        # squeeze it out, now# self.later(initclassobj())
-        self.picklenames[Constant(cls)] = name
+        self.produce(ini)
+        self.produce('%s.__name__ = %r' % (name, cls.__name__))
+        self.produce('%s.__module__ = %r' % (name, cls.__module__))
         for line in initclassobj():
-            self.initcode.append(line)
+            self.produce(line)
         return name
 
     nameof_class = nameof_classobj   # for Python 2.2
@@ -357,6 +421,8 @@
         type(type.__dict__['__basicsize__']): "type(type.__dict__['__basicsize__'])",
         # type 'instancemethod':
         type(Exception().__init__): 'type(Exception().__init__)',
+        # type 'listiterator':
+        type(iter([])): 'type(iter([]))',
         }
     descriptor_filter = {}
     for _key in typename_mapping.keys():
@@ -367,46 +433,54 @@
     def nameof_type(self, cls):
         if cls.__module__ != '__builtin__':
             return self.nameof_classobj(cls)   # user-defined type
-        name = self.uniquename('gtype_%s' % cls.__name__)
+        name = self.memoize_unique(cls, 'gtype_%s' % cls.__name__)
         if getattr(__builtin__, cls.__name__, None) is cls:
             expr = cls.__name__    # type available from __builtin__
         elif cls in types.__dict__.values():
             for key, value in types.__dict__.items():
                 if value is cls:
                     break
-            self.initcode.append('from types import %s as %s' % (
+            self.produce('from types import %s as %s' % (
                 key, name))
             return name
         else:
             expr = self.typename_mapping[cls]
-        self.initcode_python(name, expr)
+        self.produce('%s = %s' % (name, expr))
         return name
 
     def nameof_tuple(self, tup):
         chunk = 20
-        name = self.uniquename('T%d' % len(tup))
+        # first create all arguments
+        for i in range(0, len(tup), chunk):
+            self.nameofargs(tup[i:i+chunk], True)
+        # see if someone else created us meanwhile
+        name = self.memoize_unique(tup, 'T%d' % len(tup))
         argstr = self.nameofargs(tup[:chunk], True)
-        self.initcode_python(name, '(%s)' % argstr)
+        self.produce('%s = (%s)' % (name, argstr))
         for i in range(chunk, len(tup), chunk):
             argstr = self.nameofargs(tup[i:i+chunk], True)
-            self.initcode.append('%s += (%s)' % (name, argstr) )
+            self.produce('%s += (%s)' % (name, argstr) )
         return name
 
     def nameof_list(self, lis):
         chunk = 20
-        name = self.uniquename('L%d' % len(lis))
         def initlist():
-            chunk = 20
             for i in range(0, len(lis), chunk):
                 argstr = self.nameofargs(lis[i:i+chunk])
                 yield '%s += [%s]' % (name, argstr)
-        self.initcode_python(name, '[]')
-        self.later(initlist())
+        name = self.memoize_unique(lis, 'L%d' % len(lis))
+        self.produce('%s = []' % name)
+        for line in initlist():
+            self.produce(line)
         return name
 
-    def is_app_domain(self, modname):
+    def is_app_domain(self, modname, exclude=()):
         for domain in self.domains:
-            if modname.startswith(domain):
+            if domain.endswith('.') and modname.startswith(domain):
+                # handle subpaths
+                return True
+            if modname == domain and modname not in exclude:
+                # handle exact module names
                 return True
         return False
 
@@ -414,6 +488,8 @@
         if '__name__' in dic:
             module = dic['__name__']
             try:
+                if type(module) is str and self.is_app_domain(module):
+                    raise ImportError
                 __import__(module)
                 mod = sys.modules[module]
             except (ImportError, KeyError, TypeError):
@@ -421,51 +497,56 @@
             else:
                 if dic is mod.__dict__ and not self.is_app_domain(module):
                     dictname = module.split('.')[-1] + '__dict__'
-                    dictname = self.uniquename(dictname)
-                    self.initcode.append('from %s import __dict__ as %s' % (
-                            module, dictname) )
-                    self.picklenames[Constant(dic)] = dictname
+                    dictname = self.memoize_unique(dic, dictname)
+                    self.produce('from %s import __dict__ as %s' % (
+                                 module, dictname) )
                     return dictname
-        name = self.uniquename('D%d' % len(dic))
         def initdict():
-            for k in dic:
+            keys = dic.keys()
+            keys.sort()
+            told = False
+            for k in keys:
                 try:
-                    if type(k) is str:
-                        yield '%s[%r] = %s' % (name, k, self.nameof(dic[k]))
-                    else:
-                        yield '%s[%s] = %s' % (name, self.nameof(k),
-                                               self.nameof(dic[k]))
+                    nk, nv = self.nameof(k), self.nameof(dic[k])
+                    yield '%s[%s] = %s' % (name, nk, nv)
                 except PicklingError:
                     pass
-        self.initcode_python(name, '{}')
-        self.later(initdict())
+                else:
+                    # some sanity check
+                    if type(k) is int:
+                        if k in self.picklenames:
+                            print ('WARNING: this dict most likely contains '
+                                   'the id of some object!!')
+                            print 'name of object: %s' % self.picklenames[k]
+                        elif k == id(dic[k]):
+                            print ('WARNING: this dict most likely contains '
+                                   'the id of one of it\'s objects!!')
+                            if not told:
+                                print dic
+                                told = True
+        name = self.memoize_unique(dic, 'D%d' % len(dic))
+        self.produce('%s = {}' % name)
+        for line in initdict():
+            self.produce(line)
         return name
 
     # strange prebuilt instances below, don't look too closely
     # XXX oh well.
     def nameof_member_descriptor(self, md):
-        name = self.uniquename('gdescriptor_%s_%s' % (
-            md.__objclass__.__name__, md.__name__))
         cls = self.nameof(md.__objclass__)
-        self.initcode_python(name, '%s.__dict__[%r]' % (cls, md.__name__))
+        name = self.memoize_unique(md, 'gdescriptor_%s_%s' % (
+            md.__objclass__.__name__, md.__name__))
+        self.produce('%s = %s.__dict__[%r]' % (name, cls, md.__name__))
         return name
     nameof_getset_descriptor  = nameof_member_descriptor
     nameof_method_descriptor  = nameof_member_descriptor
     nameof_wrapper_descriptor = nameof_member_descriptor
 
     def nameof_instance(self, instance):
-        klass = instance.__class__
-        if klass in self.shortnames:
-            name = self.uniquename(self.shortnames[klass])
-        else:
-            name = self.uniquename('ginst_' + klass.__name__)
-        cls = self.nameof(klass)
-        if hasattr(klass, '__base__'):
-            base_class = builtin_base(instance)
-            base = self.nameof(base_class)
-        else:
-            base_class = None
-            base = cls
+        if isinstance(instance, Atom):
+            # cannot reconstruct this, it *must* be
+            # the one from model
+            return self.save_global(instance)
         def initinstance():
             if hasattr(instance, '__setstate__'):
                 # the instance knows what to do
@@ -481,14 +562,33 @@
                 "%s has no dict and no __setstate__" % name)
             content = restorestate.items()
             content.sort()
+            attrs = []
             for key, value in content:
                 if self.should_translate_attr(instance, key):
                     if hasattr(value, '__doc__'):
                         doc = value.__doc__
                         if type(doc) is str and doc.lstrip().startswith('NOT_RPYTHON'):
                             continue
-                    line = '%s.%s = %s' % (name, key, self.nameof(value))
-                    yield line
+                    attrs.append( (key, self.nameof(value)) )
+            for k, v in attrs:
+                yield '%s.%s = %s' % (name, k, v)
+
+        klass = instance.__class__
+        cls = self.nameof(klass)
+        if hasattr(klass, '__base__'):
+            base_class = builtin_base(instance)
+            base = self.nameof(base_class)
+        else:
+            base_class = None
+            base = cls
+        if klass in self.inline_instances:
+            immediate = True
+        else:
+            if klass in self.shortnames:
+                name = self.memoize_unique(instance, self.shortnames[klass])
+            else:
+                name = self.memoize_unique(instance, 'ginst_' + klass.__name__)
+            immediate = False
         if hasattr(instance, '__reduce_ex__'):
             try:
                 reduced = instance.__reduce_ex__()
@@ -501,7 +601,7 @@
                 assert not hasattr(instance, '__dict__'), ('wrong assumptions'
                     ' about __slots__ in %s instance without __setstate__,'
                     ' please update %s' % (cls.__name__, __name__) )
-                restorestate = slotted.__getstate__(instance)
+                restorestate = _get(instance)
                 restorer = _rec
                 restoreargs = klass,
             else:
@@ -526,50 +626,50 @@
             else:
                 restorestate = instance.__dict__
         restoreargstr = self.nameofargs(restoreargs)
+        if immediate:
+            assert restorestate is None
+            return '%s(%s)' % (restorename, restoreargstr)
         if isinstance(klass, type):
-            self.initcode.append('%s = %s(%s)' % (name, restorename,
-                                                   restoreargstr))
+            self.produce('%s = %s(%s)' % (name, restorename, restoreargstr))
         else:
-            self.initcode.append('%s = new.instance(%s)' % (name, cls))
+            self.produce('%s = new.instance(%s)' % (name, cls))
         if restorestate is not None:
-            self.later(initinstance())
+            for line in initinstance():
+                self.produce(line)
         return name
 
     def save_global(self, obj):
         # this is almost similar to pickle.py
         name = obj.__name__
-        key = Constant(obj)
-        if key not in self.picklenames:
-            module = getattr(obj, "__module__", None)
-            if module is None:
-                module = whichmodule(obj, name)
-            if self.is_app_domain(module):
-                # not allowed to import this
-                raise PicklingError('%s belongs to the user program' %
-                                    name)
-            try:
-                __import__(module)
-                mod = sys.modules[module]
-                klass = getattr(mod, name)
-            except (ImportError, KeyError, AttributeError):
+        module = getattr(obj, "__module__", None)
+        if module is None:
+            module = whichmodule(obj, name)
+        if self.is_app_domain(module):
+            # not allowed to import this
+            raise PicklingError('%s belongs to the user program' %
+                                name)
+        try:
+            __import__(module)
+            mod = sys.modules[module]
+            klass = getattr(mod, name)
+        except (ImportError, KeyError, AttributeError):
+            raise PicklingError(
+                "Can't pickle %r: it's not found as %s.%s" %
+                (obj, module, name))
+        else:
+            if klass is not obj:
                 raise PicklingError(
-                    "Can't pickle %r: it's not found as %s.%s" %
+                    "Can't pickle %r: it's not the same object as %s.%s" %
                     (obj, module, name))
-            else:
-                if klass is not obj:
-                    raise PicklingError(
-                        "Can't pickle %r: it's not the same object as %s.%s" %
-                        (obj, module, name))
-            # from here we do our own stuff
-            restorename = self.uniquename(obj.__name__)
-            if restorename != obj.__name__:
-                self.initcode.append('from %s import %s as %s' % (
-                    module, obj.__name__, restorename) )
-            else:
-                self.initcode.append('from %s import %s' % (
-                    module, obj.__name__) )
-            self.picklenames[key] = restorename
-        return self.picklenames[key]
+        # from here we do our own stuff
+        restorename = self.memoize_unique(obj, obj.__name__)
+        if restorename != obj.__name__:
+            self.produce('from %s import %s as %s' % (
+                         module, obj.__name__, restorename) )
+        else:
+            self.produce('from %s import %s' % (
+                         module, obj.__name__) )
+        return restorename
 
     def nameof_function(self, func):
         # look for skipped functions
@@ -577,42 +677,51 @@
             if func not in self.translator.flowgraphs:
                 # see if this is in translator's domain
                 module = whichmodule(func, func.__name__)
-                if self.is_app_domain(module):
-                    return self.skipped_function(func)
+                if self.is_app_domain(module, exclude=['__main__']):
+                    # see if this buddy has been skipped in another save, before
+                    if not hasattr(func, '_skipped_code'):
+                        return self.skipped_function(func,
+                            'not found in translator\'s flowgraphs')
         else:
             if (func.func_doc and
                 func.func_doc.lstrip().startswith('NOT_RPYTHON')):
-                return self.skipped_function(func)
+                return self.skipped_function(func, 'tagged as NOT_RPYTHON')
         try:
             return self.save_global(func)
         except PicklingError:
             pass
         args = (func.func_code, func.func_globals, func.func_name,
                 func.func_defaults, func.func_closure)
-        pyfuncobj = self.uniquename('gfunc_' + func.__name__)
-        self.initcode.append('%s = new.function(%s)' % (pyfuncobj,
-                             self.nameofargs(args)) )
+        argstr = self.nameofargs(args)
+        if hasattr(func, '_skipped_code'):
+            name = self.memoize_unique(func, func.__name__)
+        else:
+            name = self.memoize_unique(func, 'gfunc_' + func.__name__)
+        self.produce('%s = new.function(%s)' % (name, argstr) )
         if func.__dict__:
-            for k, v in func.__dict__.items():
-                try:
-                    self.initcode.append('%s.%s = %s' % (
-                        pyfuncobj, k, self.nameof(v)) )
-                except PicklingError:
-                    pass
-        return pyfuncobj
+            def initfunction():
+                items = func.__dict__.items()
+                items.sort()
+                for k, v in items:
+                    try:
+                        yield '%s.%s = %s' % (name, k, self.nameof(v))
+                    except PicklingError:
+                        pass
+            for line in initfunction():
+                self.produce(line)
+        return name
 
     def nameof_cell(self, cel):
+        # no need to name cells. Their contents is what is shared.
         obj = break_cell(cel)
-        pycell = self.uniquename('gcell_' + self.nameof(obj))
-        self.initcode.append('%s = %s(%s)' % (pycell, self.nameof(make_cell),
-                                              self.nameof(obj)) )
-        return pycell
+        return '%s(%s)' % (self.nameof(make_cell), self.nameof(obj))
 
     def nameof_property(self, prop):
-        pyprop = self.uniquename('gprop_')
-        self.initcode.append('%s = property(%s)' % (pyprop, self.nameofargs(
-            (prop.fget, prop.fset, prop.fdel, prop.__doc__))) )
-        return pyprop
+        argstr = self.nameofargs((prop.fget, prop.fset, prop.fdel,
+                                  prop.__doc__))
+        name = self.memoize_unique(prop, 'gprop_')
+        self.produce('%s = property(%s)' % (name, argstr) )
+        return name
 
     def nameof_code(self, code):
         args = (code.co_argcount, code.co_nlocals, code.co_stacksize,
@@ -621,9 +730,9 @@
                 code.co_firstlineno, code.co_lnotab, code.co_freevars,
                 code.co_cellvars)
         argstr = self.nameofargs(args)
-        codeobj = self.uniquename('gcode_' + code.co_name)
-        self.initcode.append('%s = new.code(%s)' % (codeobj, argstr))
-        return codeobj
+        name = self.memoize_unique(code, 'gcode_' + code.co_name)
+        self.produce('%s = new.code(%s)' % (name, argstr))
+        return name
 
     def nameof_file(self, fil):
         if fil is sys.stdin:  return "sys.stdin"
@@ -633,46 +742,10 @@
 
     def nameof_methodwrapper(self, wp):
         # this object should be enhanced in CPython!
-        reprwp = repr(wp)
-        name = wp.__name__
-        def dummy_methodwrapper():
-            return reprwp + (': method %s of unknown object '
-                'cannot be reconstructed, sorry!' % name )
-        return self.nameof(dummy_methodwrapper)
-
-    def later(self, gen):
-        self.latercode.append((gen, self.debugstack))
+        msg = '%r: method %s of unknown object cannot be reconstructed' % (
+            wp, wp.__name__)
+        return self.skipped_function(wp, msg)
 
-    def collect_initcode(self):
-        writer = self.writer
-        while self.latercode:
-            gen, self.debugstack = self.latercode.pop()
-            #self.initcode.extend(gen) -- eats TypeError! bad CPython!
-            for line in gen:
-                self.initcode.append(line)
-            self.debugstack = ()
-            if writer:
-                for line in self.initcode:
-                    writer.write(line)
-                del self.initcode[:]
-        if writer:
-            writer.close()
-
-    def getfrozenbytecode(self):
-        self.initcode.append('')
-        source = '\n'.join(self.initcode)
-        del self.initcode[:]
-        co = compile(source, '<initcode>', 'exec')
-        originalsource = source
-        small = zlib.compress(marshal.dumps(co))
-        source = """if 1:
-            import zlib, marshal
-            exec marshal.loads(zlib.decompress(%r))""" % small
-        # Python 2.2 SyntaxError without newline: Bug #501622
-        source += '\n'
-        co = compile(source, '<initcode>', 'exec')
-        del source
-        return marshal.dumps(co), originalsource
 
 def make_cell(obj):
     def func():
@@ -693,5 +766,10 @@
 def _rec(klass, base=object, state=None):
     return _reconstructor(klass, base, state)
 
+def _get(obj):
+    return slotted.__getstate__(obj)
+
 def _set(obj, *args):
     slotted.__setstate__(obj, args)
+
+__all__ = ['GenPickle']

Modified: pypy/branch/pycompiler/translator/pickle/loader.py
==============================================================================
--- pypy/branch/pycompiler/translator/pickle/loader.py	(original)
+++ pypy/branch/pycompiler/translator/pickle/loader.py	Fri Jul  1 18:28:54 2005
@@ -2,6 +2,7 @@
 
 class Loader:
     def __init__(self, fname):
+        self.opened = False
         self.f = self.open_file(fname)
 
     def open_file(self, fname):
@@ -10,23 +11,29 @@
     def next_block(self):
         raise SyntaxError, "implement next_block"
 
-    def load(self):
+    def load(self, progress=None):
         dic = {}
         for blk in self.next_block():
             exec blk in dic
+            if progress:
+                progress()
         try:
-            return dic['ginst_Translator']
+            return dic['result']
         finally:
             self.close()
 
     def close(self):
-        self.f.close()
+        if self.opened:
+            self.f.close()
 
 
 class TextLoader(Loader):
 
     def open_file(self, fname):
-        return file(fname)
+        if type(fname) is str:
+            self.opened = True
+            return file(fname)
+        return fname # should be a file-like object
 
     def next_block(self):
         data = self.f.read().split('## SECTION ##\n')
@@ -37,7 +44,10 @@
     """ load compiled code from a ZIP file """
         
     def open_file(self, fname):
-        return zipfile.ZipFile(fname, "r")
+        if type(fname) is str:
+            self.opened = True
+            return zipfile.ZipFile(fname, "r")
+        return fname
 
     def next_block(self):
         root = self.f.read('root')
@@ -48,3 +58,5 @@
             dump = self.f.read(name)
             assert md5.new(dump).hexdigest() == name, "broken checksum"
             yield marshal.loads(dump)
+
+__all__ = ['Loader', 'TextLoader', 'ZipLoader']

Modified: pypy/branch/pycompiler/translator/pickle/main.py
==============================================================================
--- pypy/branch/pycompiler/translator/pickle/main.py	(original)
+++ pypy/branch/pycompiler/translator/pickle/main.py	Fri Jul  1 18:28:54 2005
@@ -1,5 +1,42 @@
+import sys
+from pypy.translator.pickle.genpickle import GenPickle
+from pypy.translator.pickle.writer import Writer, TextWriter, ZipWriter
+from pypy.translator.pickle.loader import Loader, TextLoader, ZipLoader
+
 def load(fname):
-    pass
+    loader = _select(fname)[0]
+    assert loader, 'only .py and .zip files supported'
+    print "Loading:",
+    def progress():
+        sys.stdout.write('.')
+    ret = loader(fname).load(progress)
+    print
+    return ret
+
+def save(translator, fname, **objects):
+    writer = _select(fname)[1]
+    assert writer, 'only .py and .zip files supported'
+    assert objects, 'please provide objects to be saved as keywords'
+    pickler = GenPickle(translator, writer(fname))
+    hold = sys.getrecursionlimit()
+    if hold < 5000:
+        sys.setrecursionlimit(5000)
+    try:
+        pickler.pickle(**objects)
+    finally:
+        sys.setrecursionlimit(hold)
+    pickler.finish()
+    return pickler  # for debugging purposes
+
+# and that's all, folks!
+# _________________________________________________________________
+
+def _select(fname):
+    name = fname.lower()
+    if name.endswith('.py'):
+        return TextLoader, TextWriter
+    elif name.endswith('.zip'):
+        return ZipLoader, ZipWriter
+    else:
+        return None, None
 
-def save(translator, fname):
-    pass

Modified: pypy/branch/pycompiler/translator/pickle/writer.py
==============================================================================
--- pypy/branch/pycompiler/translator/pickle/writer.py	(original)
+++ pypy/branch/pycompiler/translator/pickle/writer.py	Fri Jul  1 18:28:54 2005
@@ -6,6 +6,7 @@
         self.chunksize = 100000
         self.count = 0
         self.blocknum = 0
+        self.opened = False
         self.f = self.open_file(fname)
 
     def open_file(self, fname):
@@ -15,17 +16,18 @@
         self.pieces.append(text)
         self.count += len(text) + 1
         if self.count >= self.chunksize:
-            src = '\n'.join(self.pieces)
+            src = ''.join(self.pieces)
             del self.pieces[:]
             self.count -= self.chunksize
             self.putblock(src)
             self.blocknum += 1
 
     def close(self):
-        src = '\n'.join(self.pieces)
+        src = ''.join(self.pieces)
         self.putblock(src)
         self.finalize()
-        self.f.close()
+        if self.opened:
+            self.f.close()
 
     def finalize(self):
         pass
@@ -34,10 +36,13 @@
 class TextWriter(Writer):
 
     def open_file(self, fname):
-        return file(fname, 'w')
+        if type(fname) is str:
+            self.opened = True
+            return file(fname, 'w')
+        return fname # should be a file-like object
 
     def putblock(self, src):
-        print >> self.f, src
+        self.f.write(src)
         print >> self.f, '## SECTION ##'
     
 class ZipWriter(Writer):
@@ -48,7 +53,10 @@
         self.blocknames = []
         
     def open_file(self, fname):
-        return zipfile.ZipFile(fname, "w", zipfile.ZIP_DEFLATED)
+        if type(fname) is str:
+            self.opened = True
+            return zipfile.ZipFile(fname, "w", zipfile.ZIP_DEFLATED)
+        return fname
 
     def putblock(self, src):
         cod = compile(src, 'block_%d' % self.blocknum, 'exec')
@@ -62,3 +70,5 @@
         digest = md5.new(dump).hexdigest()
         self.f.writestr(digest, dump)
         self.f.writestr('root', digest)
+
+__all__ = ['Writer', 'TextWriter', 'ZipWriter']

Modified: pypy/branch/pycompiler/translator/simplify.py
==============================================================================
--- pypy/branch/pycompiler/translator/simplify.py	(original)
+++ pypy/branch/pycompiler/translator/simplify.py	Fri Jul  1 18:28:54 2005
@@ -52,7 +52,7 @@
     Example:
 
     try:
-        ovfcheck(array1[idx-1] += array2[idx+1])
+        array1[idx-1] = ovfcheck(array1[idx-1] + array2[idx+1])
     except OverflowError:
         ...
 
@@ -341,6 +341,10 @@
         iter get '''.split():
     CanRemove[_op] = True
 del _op
+CanRemoveBuiltins = {
+    isinstance: True,
+    hasattr: True,
+    }
 
 def transform_dead_op_vars_in_blocks(blocks):
     """Remove dead operations and variables that are passed over a link
@@ -415,8 +419,11 @@
                     # ... 
                     if op.args and isinstance(op.args[0], Constant):
                         func = op.args[0].value
-                        if func is isinstance:
-                            del block.operations[i]
+                        try:
+                            if func in CanRemoveBuiltins:
+                                del block.operations[i]
+                        except TypeError:   # func is not hashable
+                            pass
 
         # look for output variables never used
         # warning: this must be completely done *before* we attempt to

Modified: pypy/branch/pycompiler/translator/test/rpystone.py
==============================================================================
--- pypy/branch/pycompiler/translator/test/rpystone.py	(original)
+++ pypy/branch/pycompiler/translator/test/rpystone.py	Fri Jul  1 18:28:54 2005
@@ -1,21 +1,5 @@
 #! /usr/bin/env python
 
-def setslow(X):
-    global XF1, XF2, XF3, XP1, XP2, XP3, XP4, XP5, XP6, XP7, XP8
-    XF1 = True
-    XF2 = X
-    XF3 = True
-    XP1 = X
-    XP2 = X
-    XP3 = True
-    XP4 = X
-    XP5 = True
-    XP6 = True
-    XP7 = True
-    XP8 = X
-
-setslow(True)
-
 """
 "PYSTONE" Benchmark Program
 
@@ -65,7 +49,7 @@
 class Record:
 
     def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0,
-                       IntComp = 0, StringComp = 0):
+                       IntComp = 0, StringComp = ""):
         self.PtrComp = PtrComp
         self.Discr = Discr
         self.EnumComp = EnumComp
@@ -110,8 +94,13 @@
     #global PtrGlbNext
 
     starttime = clock()
-    for i in range(loops):
-        pass
+    #for i in range(loops):
+    # this is bad with very large values of loops
+    # XXX xrange support?
+    i = 0
+    while i < loops:
+        i += 1
+    # the above is most likely to vanish in C :-(
     nulltime = clock() - starttime
 
     g.PtrGlbNext = Record()
@@ -127,29 +116,34 @@
     EnumLoc = None # addition for flow space
     starttime = clock()
 
-    for i in range(loops):
-        if XP5:Proc5()
-        if XP4:Proc4()
+    #for i in range(loops):
+    # this is bad with very large values of loops
+    # XXX xrange support?
+    i = 0
+    while i < loops:
+        Proc5()
+        Proc4()
         IntLoc1 = 2
         IntLoc2 = 3
         String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING"
         EnumLoc = Ident2
-        if XF2:g.BoolGlob = not Func2(String1Loc, String2Loc)
+        g.BoolGlob = not Func2(String1Loc, String2Loc)
         while IntLoc1 < IntLoc2:
             IntLoc3 = 5 * IntLoc1 - IntLoc2
             IntLoc3 = Proc7(IntLoc1, IntLoc2)
             IntLoc1 = IntLoc1 + 1
-        if XP8:Proc8(g.Array1Glob, g.Array2Glob, IntLoc1, IntLoc3)
-        if XP1:g.PtrGlb = Proc1(g.PtrGlb)
+        Proc8(g.Array1Glob, g.Array2Glob, IntLoc1, IntLoc3)
+        g.PtrGlb = Proc1(g.PtrGlb)
         CharIndex = 'A'
         while CharIndex <= g.Char2Glob:
-            if XF1 and EnumLoc == Func1(CharIndex, 'C'):
-                if XP6:EnumLoc = Proc6(Ident1)
+            if EnumLoc == Func1(CharIndex, 'C'):
+                EnumLoc = Proc6(Ident1)
             CharIndex = chr(ord(CharIndex)+1)
         IntLoc3 = IntLoc2 * IntLoc1
         IntLoc2 = IntLoc3 / IntLoc1
         IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1
-        if XP2:IntLoc1 = Proc2(IntLoc1)
+        IntLoc1 = Proc2(IntLoc1)
+        i += 1
 
     benchtime = clock() - starttime - nulltime
     return benchtime, (loops / benchtime)
@@ -159,12 +153,12 @@
     PtrParIn.IntComp = 5
     NextRecord.IntComp = PtrParIn.IntComp
     NextRecord.PtrComp = PtrParIn.PtrComp
-    if XP3:NextRecord.PtrComp = Proc3(NextRecord.PtrComp)
+    NextRecord.PtrComp = Proc3(NextRecord.PtrComp)
     if NextRecord.Discr == Ident1:
         NextRecord.IntComp = 6
-        if XP6:NextRecord.EnumComp = Proc6(PtrParIn.EnumComp)
+        NextRecord.EnumComp = Proc6(PtrParIn.EnumComp)
         NextRecord.PtrComp = g.PtrGlb.PtrComp
-        if XP7:NextRecord.IntComp = Proc7(NextRecord.IntComp, 10)
+        NextRecord.IntComp = Proc7(NextRecord.IntComp, 10)
     else:
         PtrParIn = NextRecord.copy()
     NextRecord.PtrComp = None
@@ -189,7 +183,7 @@
         PtrParOut = g.PtrGlb.PtrComp
     else:
         g.IntGlob = 100
-    if XP7:g.PtrGlb.IntComp = Proc7(10, g.IntGlob)
+    g.PtrGlb.IntComp = Proc7(10, g.IntGlob)
     return PtrParOut
 
 def Proc4():
@@ -208,7 +202,7 @@
 
 def Proc6(EnumParIn):
     EnumParOut = EnumParIn
-    if XF3 and not Func3(EnumParIn):
+    if not Func3(EnumParIn):
         EnumParOut = Ident4
     if EnumParIn == Ident1:
         EnumParOut = Ident1
@@ -254,7 +248,7 @@
 def Func2(StrParI1, StrParI2):
     IntLoc = 1
     while IntLoc <= 1:
-        if XF1 and Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1:
+        if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1:
             CharLoc = 'A'
             IntLoc = IntLoc + 1
     if CharLoc >= 'W' and CharLoc <= 'Z':

Modified: pypy/branch/pycompiler/translator/test/snippet.py
==============================================================================
--- pypy/branch/pycompiler/translator/test/snippet.py	(original)
+++ pypy/branch/pycompiler/translator/test/snippet.py	Fri Jul  1 18:28:54 2005
@@ -363,7 +363,7 @@
         raise Exception
     return 0
 
-def try_raise_choose(n):
+def try_raise_choose(n=int):
     try:
         raise_choose(n)
     except E1:

Modified: pypy/branch/pycompiler/translator/test/test_annrpython.py
==============================================================================
--- pypy/branch/pycompiler/translator/test/test_annrpython.py	(original)
+++ pypy/branch/pycompiler/translator/test/test_annrpython.py	Fri Jul  1 18:28:54 2005
@@ -582,13 +582,13 @@
 
     def test_flow_rev_numbers(self):
         a = self.RPythonAnnotator()
-        s = a.build_types(snippet.flow_rev_numbers, [])
+        s = a.build_types(snippet.flow_rev_numbers, [int])
         assert s.knowntype == int
         assert not s.is_constant() # !
 
     def test_methodcall_is_precise(self):
         a = self.RPythonAnnotator()
-        s = a.build_types(snippet.methodcall_is_precise, [])
+        s = a.build_types(snippet.methodcall_is_precise, [bool])
         classes = a.bookkeeper.userclasses
         assert 'x' not in classes[snippet.CBase].attrs
         assert (classes[snippet.CSub1].attrs['x'].s_value ==
@@ -637,7 +637,7 @@
         def assert_(x):
             assert x,"XXX"
         a = self.RPythonAnnotator()
-        s = a.build_types(assert_, [])
+        s = a.build_types(assert_, [int])
         assert s.const is None
 
     def test_string_and_none(self):
@@ -810,7 +810,7 @@
         assert acc1 is acc2 is acc3
 
         assert len(acc1.objects) == 3
-        assert acc1.attrs == {'v1': True, 'v2': True}
+        assert dict.fromkeys(acc1.attrs) == {'v1': None, 'v2': None}
 
         assert access_sets[c1] is acc1
         py.test.raises(KeyError, "access_sets[object()]")
@@ -1264,6 +1264,122 @@
         s = a.build_types(f, [int]*8)
         assert s == annmodel.SomeTuple([annmodel.SomeInteger(nonneg=True)] * 8)
 
+    def test_attr_moving_into_parent(self):
+        class A: pass
+        class B(A): pass
+        a1 = A()
+        b1 = B()
+        b1.stuff = a1
+        a1.stuff = None
+        def f():
+            return b1.stuff
+        a = self.RPythonAnnotator()
+        s = a.build_types(f, [])
+        assert isinstance(s, annmodel.SomeInstance)
+        assert not s.can_be_None
+        assert s.classdef.cls is A
+
+    def test_class_attribute(self):
+        class A:
+            stuff = 42
+        class B(A):
+            pass
+        def f():
+            b = B()
+            return b.stuff
+        a = self.RPythonAnnotator()
+        s = a.build_types(f, [])
+        assert s == a.bookkeeper.immutablevalue(42)
+
+    def test_attr_recursive_getvalue(self):
+        class A: pass
+        a2 = A()
+        a2.stuff = None
+        a1 = A()
+        a1.stuff = a2
+        def f():
+            return a1.stuff
+        a = self.RPythonAnnotator()
+        s = a.build_types(f, [])
+        assert isinstance(s, annmodel.SomeInstance)
+        assert s.can_be_None
+        assert s.classdef.cls is A
+
+    def test_long_list_recursive_getvalue(self):
+        class A: pass
+        lst = []
+        for i in range(500):
+            a1 = A()
+            a1.stuff = lst
+            lst.append(a1)
+        def f():
+            A().stuff = None
+            return (A().stuff, lst)[1]
+        a = self.RPythonAnnotator()
+        s = a.build_types(f, [])
+        assert isinstance(s, annmodel.SomeList)
+        s_item = s.listdef.listitem.s_value
+        assert isinstance(s_item, annmodel.SomeInstance)
+
+    def test_immutable_dict(self):
+        d = {4: "hello",
+             5: "world"}
+        def f(n):
+            return d[n]
+        a = self.RPythonAnnotator()
+        s = a.build_types(f, [int])
+        assert isinstance(s, annmodel.SomeString)
+
+    def test_immutable_recursive_list(self):
+        l = []
+        l.append(l)
+        def f():
+            return l
+        a = self.RPythonAnnotator()
+        s = a.build_types(f, [])
+        assert isinstance(s, annmodel.SomeList)
+        s_item = s.listdef.listitem.s_value
+        assert isinstance(s_item, annmodel.SomeList)
+        assert s_item.listdef.same_as(s.listdef)
+
+    def test_defaults_with_list_or_dict(self):
+        def fn1(a=[]):
+            return a
+        def fn2(a={}):
+            return a
+        def f():
+            fn1()
+            fn2()
+            return fn1([6, 7]), fn2({2: 3, 4: 5})
+        a = self.RPythonAnnotator()
+        s = a.build_types(f, [])
+        assert isinstance(s, annmodel.SomeTuple)
+        s1, s2 = s.items
+        assert not s1.is_constant()
+        assert not s2.is_constant()
+        assert isinstance(s1.listdef.listitem. s_value, annmodel.SomeInteger)
+        assert isinstance(s2.dictdef.dictkey.  s_value, annmodel.SomeInteger)
+        assert isinstance(s2.dictdef.dictvalue.s_value, annmodel.SomeInteger)
+
+    def test_pbc_union(self):
+        class A:
+            def meth(self):
+                return 12
+        class B(A):
+            pass
+        class C(B):
+            pass
+        def f(i):
+            if i:
+                f(0)
+                x = B()
+            else:
+                x = C()
+            return x.meth()
+        a = self.RPythonAnnotator()
+        s = a.build_types(f, [int])
+        assert s == a.bookkeeper.immutablevalue(12)
+
 
 def g(n):
     return [0,1,2,n]

Modified: pypy/branch/pycompiler/translator/test/test_cltrans.py
==============================================================================
--- pypy/branch/pycompiler/translator/test/test_cltrans.py	(original)
+++ pypy/branch/pycompiler/translator/test/test_cltrans.py	Fri Jul  1 18:28:54 2005
@@ -43,7 +43,7 @@
             py.test.skip("Common Lisp neither configured nor detected.")
 
     def test_if(self):
-        cl_if = make_cl_func(t.if_then_else)
+        cl_if = make_cl_func(t.if_then_else, [object, object, object])
         assert cl_if(True, 50, 100) == 50
         assert cl_if(False, 50, 100) == 100
         assert cl_if(0, 50, 100) == 100
@@ -61,16 +61,16 @@
         assert cl_perfect(28) == True
 
     def test_bool(self):
-        cl_bool = make_cl_func(t.my_bool)
+        cl_bool = make_cl_func(t.my_bool, [object])
         assert cl_bool(0) == False
         assert cl_bool(42) == True
         assert cl_bool(True) == True
 
     def test_contains(self):
-        my_contains = make_cl_func(t.my_contains)
+        my_contains = make_cl_func(t.my_contains, [list, int])
         assert my_contains([1, 2, 3], 1)
         assert not my_contains([1, 2, 3], 0)
-        is_one_or_two = make_cl_func(t.is_one_or_two)
+        is_one_or_two = make_cl_func(t.is_one_or_two, [int])
         assert is_one_or_two(2)
         assert not is_one_or_two(3)
 
@@ -88,9 +88,9 @@
         assert f1(1) == 2
         f2 = make_cl_func(t.while_func, [int])
         assert f2(10) == 55
-        f3 = make_cl_func(t.simple_id)
+        f3 = make_cl_func(t.simple_id, [int])
         assert f3(9) == 9
-        f4 = make_cl_func(t.branch_id)
+        f4 = make_cl_func(t.branch_id, [int, int, int])
         assert f4(1, 2, 3) == 2
         assert f4(0, 2, 3) == 3
         f5 = make_cl_func(t.int_id, [int])
@@ -124,7 +124,7 @@
         assert result.val == (
                           '#(#() #(0) #(1) #(0 1) #(2) #(0 2) #(1 2) #(0 1 2))')
     def test_yast(self):
-        cl_sum = make_cl_func(t.yast) # yet another sum test
+        cl_sum = make_cl_func(t.yast, [list]) # yet another sum test
         assert cl_sum(range(12)) == 66
 
 

Deleted: /pypy/branch/pycompiler/translator/tool/flowtrace.py
==============================================================================
--- /pypy/branch/pycompiler/translator/tool/flowtrace.py	Fri Jul  1 18:28:54 2005
+++ (empty file)
@@ -1,394 +0,0 @@
-"""
-
-This is an experimental hack - use it at your peril!  I (rxe) needed some sanity
-check when (attempting) to write gencpp.py and this ended up being a useful
-exercise.  I doubt it much use to anyone else or the rest of the project, but it
-might have some interesting possiblities later on once we have full translation.
-:-)
-
-The idea is simple - take the basic blocks output via the flow object space and
-evaluate them against the standard object space.  It is effectively the same as
-creating a C extension to the CPython interpreter to use the CPython API instead
-of interpreting bytecodes - except in this case the we don't need to compile
-anything and all space operations are emitted on the fly.  One might call this a
-flow interpreter - I wouldn't go that far!
-
-"""
-import autopath
-from pypy.objspace.flow import FlowObjSpace
-from pypy.objspace.flow.model import traverse, Constant, Variable, Block, Link
-from pypy.translator.simplify import simplify_graph
-from pypy.interpreter.baseobjspace import OperationError
-from pypy.interpreter.argument import Arguments
-
-
-class FlowTracer(object):
-
-    def __init__(self, flow_space, space, debug = False, trace = True):
-        self.space = space
-        self.flow_space = flow_space
-        self.trace = trace
-        self.debug = debug
-
-    def pprint(self, v):        
-        s = self.space.unwrap(self.space.repr(v.e_value))
-        if len(s) > 30:
-            s = s[:27] + "..."
-
-            if isinstance(v, Constant):
-                s = "Const(%s)" % s
-
-            elif isinstance(v, Variable):
-                s = "%s(%s)" % (v, s)
-
-            else:
-                assert False, "really?"
-        return s
-
-
-    def get_blocknames(self, graph):
-        blocknames = {}
-        def visit(n):        
-            if isinstance(n, Block):
-                blocknames[n] = 'block%d' % len(blocknames)
-
-        traverse(visit, graph)
-        return blocknames
-
-    
-    def wrap_constants(self, graph):
-        all = []
-        
-        def visit(n):
-            # Note - be careful with uniqueitems and constants, as there can
-            # multiple constant of the same value (XXX why?)
-            if isinstance(n, Block):
-                values = []
-                values += n.inputargs
-                for op in n.operations:
-                    values += op.args
-
-                for ii in values:
-                    all.append(ii)
-
-                all.append(n.exitswitch)
-                
-            if isinstance(n, Link):
-                values = n.args
-                for ii in values:
-                    all.append(ii)
-                
-        traverse(visit, graph)
-        for ii in all:
-            
-            if isinstance(ii, Constant) :
-                ii.e_value = self.space.wrap(ii.value)
-
-            else:
-                assert (isinstance(ii, Variable) or ii is None)
-
-    def wrap_linkexits(self, graph):
-        all = []
-        
-        def visit(n):                        
-            if isinstance(n, Link):
-                all.append(n)
-
-        traverse(visit, graph)
-
-        for l in all:
-            l.exitcase = self.space.wrap(l.exitcase)
-
-        
-    def execute_function(self, graph, *args_w):
-
-        curblock = graph.startblock
-        assert len(curblock.inputargs) == len(args_w)        
-
-        # We add value as evaluated values during interpretation
-        # to variables, constants and exit switches.
-        # Variables are done on assignment (ie lazily)
-        for input, arg in zip(curblock.inputargs, args_w):
-            input.e_value = arg
-
-        # Here we add value attribute with wrap
-        self.wrap_constants(graph)
-        self.wrap_linkexits(graph)
-        
-        blocknames = self.get_blocknames(graph)
-        last_exception = None
-        while True:
-            
-            if self.trace:
-                print 'Entering %s:' % blocknames[curblock]
-                print '  Input args :- %s' % (", ".join(map(self.pprint, curblock.inputargs)))
-                print '  Operations :-'
-
-            for op in curblock.operations:
-                
-                # Why does op.args have a list in it?
-                opargs = [a.e_value for a in op.args]
-                if self.trace:
-                    print '    %s  = space.%s(%s)' % (op.result, 
-                                                   op.opname,
-                                                   ", ".join(map(self.pprint, op.args)))
-
-                if op.opname == "exception":
-                    assert (len(opargs) == 1)
-                    # XXX What we suppose to do with argument???
-                    if last_exception is not None:
-                        res = last_exception
-                        last_exception = None
-                    else:
-                        res = self.space.w_None
-
-                elif op.opname == "call_args":
-                    assert (len(opargs) >= 2)
-                    shape = self.space.unwrap(opargs[1])
-                    args = Arguments.fromshape(shape, *opargs[2:])
-                    res = self.call_args(opargs[0], args)
-
-                else:
-                    # More special cases
-                    spaceop = getattr(self.space, op.opname)
-                    
-                    if op.opname in ("newlist", "newdict", "newtuple"):
-                        # These expect a list, not a *args
-                        res = spaceop(opargs)
-
-                    else:
-                        try:
-                            res = spaceop(*opargs)
-
-                            # More special case
-                            if op.opname == "is_true":
-                                # Rewrap it!
-                                res = self.space.wrap(res)
-                                
-                        except OperationError, exc:
-                            last_exception = exc.w_type
-                            res = self.space.w_None
-                        
-                op.result.e_value = res
-                
-                if self.trace:
-                    # Cases will likely not be a space object
-                    if curblock.exits and curblock.exitswitch == op.result:
-                        print '    %s := exit(%s)' % (op.result, op.result.e_value)
-                    else:
-                        print '    %s := %s' % (op.result, self.pprint(op.result))
-
-            # Switch to next block
-            if curblock.exits:
-
-                # exits (safe code)                
-                exit_link = None
-                if len(curblock.exits) == 1:
-                    exit_link = curblock.exits[0]
-
-                else:
-                    exit_res = curblock.exitswitch.e_value
-                    for link in curblock.exits:
-                        if self.space.is_true(self.space.eq(exit_res, link.exitcase)):
-                            exit_link = link
-                            break
-
-                assert exit_link is not None
-                
-                if self.trace:
-                    print '  Exit to %s :- ' % blocknames[exit_link.target]
-
-                sourceargs = exit_link.args
-                targetargs = exit_link.target.inputargs
-                assert len(sourceargs) == len(targetargs)
-
-                for s, t in zip(sourceargs, targetargs):                    
-                    if self.trace:
-                        print "    %s = %s" % (t, s)
-
-                    t.e_value = s.e_value
-
-                curblock = exit_link.target
-
-                if self.trace:
-                    print
-                
-            elif len(curblock.inputargs) == 2:   # exc_cls, exc_value
-                exc_cls, exc_value = curblock.inputargs
-                if self.trace:
-                    print "Raising -",
-                    print self.pprint(exc_cls), self.pprint(exc_value)
-                raise exc_cls.e_value, exc_value.e_value
-
-            else:
-                result = curblock.inputargs[0]
-                if self.trace:
-                    print "Returning -", self.pprint(result)
-
-                return result.e_value
-
-
-    def call_args(self, w_func, args):
-        
-        func = self.space.unwrap(w_func)
-        if hasattr(func, "func_code"):        
-            graph = self.flow_space.build_flow(func)
-            simplify_graph(graph)
-            if self.debug:
-                debug(func)
-            scope_w = args.parse(func.name, func.code.signature(), func.defs_w)
-            return self.execute_function(graph, *scope_w)
-
-        else:
-            # XXX We could try creating the flow graph by runnning another
-            # flow objspace under self.space.  Hmmm - if only I had
-            # bigger computer. 
-
-            # Instead we cheat (this is great fun when it is a fake type :-))
-            if self.trace:
-                print "WOA! Cheating!", w_func
-
-            return self.space.call_args(w_func, args)
-            
-
-    def call(self, f, *args):
-        w = self.space.wrap
-        args_w = [w(ii) for ii in args]
-        w_func = w(f)
-
-        res = self.call_args(w_func, Arguments(self.space, args_w))
-        return self.space.unwrap(res)
-            
-
-def debug(func):
-    """Shows the control flow graph with annotations if computed.
-    Requires 'dot' and pygame."""
-    from pypy.translator.tool.pygame.graphdisplay import GraphDisplay
-    from pypy.translator.tool.graphpage import FlowGraphPage
-    from pypy.translator.translator import Translator
-    t = Translator(func)
-    t.simplify()
-    #t.annotate([int])
-    FlowGraphPage(t).display()
-
-def timeit(num, func, *args):
-    from time import time as now
-    start = now()
-    for i in xrange(num):
-        print func(*args)
-    return now() - start
-
-if __name__ == '__main__':
-    from pypy.objspace.std import Space
-    space = Space()
-
-    def create_std_func(app_func):
-
-        import new 
-        from pypy.interpreter.gateway import app2interp
-
-        # Horrible hack (ame needs to start with "app_")
-        app_func = new.function(app_func.func_code,
-                                app_func.func_globals,
-                                "app_" + app_func.__name__)
-        
-        # Create our function
-        func_gw = app2interp(app_func)
-        func = func_gw.get_function(space)
-        w_func = space.wrap(func)
-          
-        def f(*args):
-            args_w = [space.wrap(ii) for ii in args]
-            args_ = Arguments(space, args_w)
-            w_result = space.call_args(w_func, args_)
-            return space.unwrap(w_result) 
-        return f
-    
-    def create_flow_func(f):
-        flow_space = FlowObjSpace()
-        interpreter = FlowTracer(flow_space, space)
-        def func(*args):
-            return interpreter.call(f, *args)
-        return func
-    
-    def do(f, *args):
-        print "doing %s(%s)" % (f.__name__, ", ".join(map(str, args)))
-        f_flow = create_flow_func(f)
-        res = f_flow(*args)
-        f_norm = create_std_func(f)
-        res_norm = f_norm(*args)
-        assert res == res_norm 
-        return res
-
-    def do_flow_only(f, *args):
-        print "doing %s(%s)" % (f.__name__, ", ".join(map(str, args)))
-        f_flow = create_flow_func(f)
-        res = f_flow(*args)
-        return res
-
-    #/////////////////////////////////////////////////////////////////////////////
-
-    def tests():
-        from pypy.translator.test import snippet
-        
-        tests = [
-            (snippet.if_then_else, 1, 2, 3),
-            (snippet.if_then_else, 0, 2, 3),
-            (snippet.my_gcd, 256, 192),
-            (snippet.is_perfect_number, 81),
-            (snippet.my_bool, 1),
-            (snippet.my_bool, 0),
-            (snippet.two_plus_two,),
-            #(snippet.sieve_of_eratosthenes,),
-            (snippet.simple_func, 10),
-            (snippet.nested_whiles, 1, 10),
-            (snippet.simple_func, 10),
-            (snippet.builtinusage,),
-            (snippet.poor_man_range, 10),
-            (snippet.poor_man_rev_range, 10),
-            (snippet.simple_id, 2) ,
-            (snippet.branch_id, 1, "k", 1.0) ,
-            (snippet.branch_id, False, "k", 1.0) ,
-            (snippet.builtinusage,),
-            (snippet.yast, [1,2,3,4,5]),
-            (snippet.time_waster, 5),
-            (snippet.half_of_n, 20),
-            (snippet.int_id, 20),
-            (snippet.greet, "world"),
-            (snippet.choose_last,),
-            #(snippet.choose_last,), XXX Why does repeating this break?
-            (snippet.poly_branch, 1),
-            (snippet.s_and, 1, 1),
-            (snippet.s_and, 0, 1),
-            (snippet.s_and, 1, 0),
-            (snippet.s_and, 0, 0),
-            (snippet.break_continue, 15),
-            (snippet.reverse_3, ("k", 1, 1.0)),
-            (snippet.finallys, ("k", 1, 1.0)),
-            (snippet.finallys, ("k",)),
-            (snippet.finallys, []),
-            (snippet._append_five, []),
-            (snippet._append_five, [1,2,3]),
-            ]
-        for ii in tests:
-            print do(*ii) 
-
-        tests = [
-            (snippet.factorial, 4),
-            (snippet.factorial2, 4),
-            (snippet.call_five,),
-            (snippet.build_instance,),
-            (snippet.set_attr,),
-            (snippet.merge_setattr, 0),        
-            (snippet.merge_setattr, 1),        
-            # XXX These don't work from test.snippet (haven't tried anymore)
-            #(snippet.inheritance1,),        
-            #(snippet.inheritance2,),        
-
-            ]
-
-        for ii in tests:
-            print do_flow_only(*ii) 
-
-    tests()
-

Modified: pypy/branch/pycompiler/translator/tool/graphpage.py
==============================================================================
--- pypy/branch/pycompiler/translator/tool/graphpage.py	(original)
+++ pypy/branch/pycompiler/translator/tool/graphpage.py	Fri Jul  1 18:28:54 2005
@@ -133,8 +133,9 @@
                 return
             for var in vars:
                 if hasattr(var, 'concretetype'):
-                    info = self.links.get(var.name, var.name)
-                    info = '(%s) %s' % (var.concretetype, info)
+                    #info = self.links.get(var.name, var.name)
+                    #info = '(%s) %s' % (var.concretetype, info)
+                    info = str(var.concretetype)
                     self.links[var.name] = info
         for graph in graphs:
             traverse(visit, graph)

Modified: pypy/branch/pycompiler/translator/tool/pygame/drawgraph.py
==============================================================================
--- pypy/branch/pycompiler/translator/tool/pygame/drawgraph.py	(original)
+++ pypy/branch/pycompiler/translator/tool/pygame/drawgraph.py	Fri Jul  1 18:28:54 2005
@@ -230,7 +230,7 @@
     def getfont(self, size):
         if size in self.FONTCACHE:
             return self.FONTCACHE[size]
-        elif size < 4:
+        elif size < 5:
             self.FONTCACHE[size] = None
             return None
         else:

Modified: pypy/branch/pycompiler/translator/tool/pygame/graphclient.py
==============================================================================
--- pypy/branch/pycompiler/translator/tool/pygame/graphclient.py	(original)
+++ pypy/branch/pycompiler/translator/tool/pygame/graphclient.py	Fri Jul  1 18:28:54 2005
@@ -13,13 +13,48 @@
 DOT_FILE   = udir.join('graph.dot')
 PLAIN_FILE = udir.join('graph.plain')
 
+import py
+def dot2plain(dotfile, plainfile): 
+    if 0: 
+        cmdexec('dot -Tplain %s>%s' % (dotfile, plainfile))
+    elif 0: 
+        gw = py.execnet.SshGateway('codespeak.net')
+        channel = gw.remote_exec("""
+            import py
+            content = channel.receive()
+            fn = py.path.local.make_numbered_dir('makegraph').join('graph.dot')
+            fn.write(content)
+            tfn = fn.new(ext='.plain')
+            py.process.cmdexec("dot -Tplain %s >%s" %(fn, tfn))
+            channel.send(tfn.read())
+        """) 
+        channel.send(py.path.local(dotfile).read())
+        plainfile = py.path.local(plainfile) 
+        plainfile.write(channel.receive())
+    else:
+        import urllib
+        content = py.path.local(dotfile).read()
+        request = urllib.urlencode({'dot': content})
+        try:
+            urllib.urlretrieve('http://codespeak.net/pypy/convertdot.cgi',
+                               str(plainfile),
+                               data=request)
+        except IOError:
+            success = False
+        else:
+            plainfile = py.path.local(plainfile)
+            success = (plainfile.check(file=1) and
+                       plainfile.read().startswith('graph '))
+        if not success:
+            print "NOTE: failed to use codespeak's convertdot.cgi, trying local 'dot'"
+            cmdexec('dot -Tplain %s>%s' % (dotfile, plainfile))
 
 class ClientGraphLayout(GraphLayout):
 
     def __init__(self, connexion, key, dot, links, **ignored):
         # generate a temporary .dot file and call dot on it
         DOT_FILE.write(dot)
-        cmdexec('dot -Tplain %s>%s' % (str(DOT_FILE),str(PLAIN_FILE)))
+        dot2plain(DOT_FILE, PLAIN_FILE) 
         GraphLayout.__init__(self, PLAIN_FILE)
         self.connexion = connexion
         self.key = key

Modified: pypy/branch/pycompiler/translator/transform.py
==============================================================================
--- pypy/branch/pycompiler/translator/transform.py	(original)
+++ pypy/branch/pycompiler/translator/transform.py	Fri Jul  1 18:28:54 2005
@@ -10,9 +10,17 @@
 import types
 from pypy.objspace.flow.model import SpaceOperation
 from pypy.objspace.flow.model import Variable, Constant, Block, Link
-from pypy.objspace.flow.model import last_exception
+from pypy.objspace.flow.model import last_exception, checkgraph
 from pypy.translator.annrpython import CannotSimplify
 from pypy.annotation import model as annmodel
+from pypy.annotation.specialize import MemoTable
+
+
+def checkgraphs(self, blocks):
+    for block in blocks:
+        fn = self.annotated[block]
+        graph = self.translator.flowgraphs[fn]
+        checkgraph(graph)
 
 def fully_annotated_blocks(self):
     """Ignore blocked blocks."""
@@ -32,20 +40,19 @@
 def transform_allocate(self, block_subset):
     """Transforms [a] * b to alloc_and_set(b, a) where b is int."""
     for block in block_subset:
-        operations = block.operations[:]
-        n_op = len(operations)
-        for i in range(0, n_op-1):
-            op1 = operations[i]
-            op2 = operations[i+1]
-            if (op1.opname == 'newlist' and
-                len(op1.args) == 1 and
-                op2.opname == 'mul' and
-                op1.result is op2.args[0] and
-                self.gettype(op2.args[1]) is int):
+        length1_lists = {}   # maps 'c' to 'a', in the above notation
+        for i in range(len(block.operations)):
+            op = block.operations[i]
+            if (op.opname == 'newlist' and
+                len(op.args) == 1):
+                length1_lists[op.result] = op.args[0]
+            elif (op.opname == 'mul' and
+                  op.args[0] in length1_lists and
+                  self.gettype(op.args[1]) is int):
                 new_op = SpaceOperation('alloc_and_set',
-                                        (op2.args[1], op1.args[0]),
-                                        op2.result)
-                block.operations[i+1:i+2] = [new_op]
+                                        (op.args[1], length1_lists[op.args[0]]),
+                                        op.result)
+                block.operations[i] = new_op
 
 # a[b:c]
 # -->
@@ -153,10 +160,20 @@
                             if not specialcase:
                                 op.args[0] = Constant(specialized_callb.prebuiltinstances.keys()[0])
                             else:
-                                if op.opname == 'simple_call':
-                                    op.opname = intern('simple_specialcase')
+                                if op.opname != 'simple_call':
+                                    assert 0, "not supported: call_args to a specialized function"
+                                callable = callb.prebuiltinstances.keys()[0]
+                                tag = getattr(callable, '_annspecialcase_', None)
+                                if tag == 'specialize:memo':
+                                    arglist_s = [self.binding(v) for v in op.args[1:]]
+                                    memo_table = MemoTable(self.bookkeeper, 
+                                                           callable, 
+                                                           self.binding(op.result), 
+                                                           arglist_s)
+                                    op.opname = intern('call_memo')
+                                    op.args[0] = Constant(memo_table)
                                 else:
-                                    op.opname = intern('specialcase_args')
+                                    op.opname = intern('call_specialcase')
 
 default_extra_passes = [
     transform_specialization,
@@ -167,10 +184,14 @@
     """Apply set of transformations available."""
     # WARNING: this produces incorrect results if the graph has been
     #          modified by t.simplify() after it had been annotated.
-    if ann.translator:
-        ann.translator.checkgraphs()
     if block_subset is None:
-        block_subset = list(fully_annotated_blocks(ann))
+        block_subset = fully_annotated_blocks(ann)
+    d = {}
+    for block in block_subset:
+        d[block] = True
+    block_subset = d
+    if ann.translator:
+        checkgraphs(ann, block_subset)
     transform_dead_code(ann, block_subset)
     for pass_ in extra_passes:
         pass_(ann, block_subset)
@@ -178,4 +199,5 @@
     # chance to remove dependency on certain variables
     transform_dead_op_vars(ann, block_subset)
     if ann.translator:
-        ann.translator.checkgraphs()
+        checkgraphs(ann, block_subset)
+ 

Modified: pypy/branch/pycompiler/translator/translator.py
==============================================================================
--- pypy/branch/pycompiler/translator/translator.py	(original)
+++ pypy/branch/pycompiler/translator/translator.py	Fri Jul  1 18:28:54 2005
@@ -4,7 +4,7 @@
 translation-related code.  It can be used for interactive testing of the
 translator; see pypy/bin/translator.py.
 """
-import autopath, os, sys
+import autopath, os, sys, types
 
 from pypy.objspace.flow.model import *
 from pypy.translator.simplify import simplify_graph
@@ -39,9 +39,20 @@
         if self.entrypoint:
             self.getflowgraph()
 
+    def __getstate__(self):
+        # try to produce things a bit more ordered
+        return self.entrypoint, self.functions, self.__dict__
+
+    def __setstate__(self, args):
+        assert len(args) == 3
+        self.__dict__.update(args[2])
+        assert args[0] is self.entrypoint and args[1] is self.functions
+
     def getflowgraph(self, func=None, called_by=None, call_tag=None):
         """Get the flow graph for a function (default: the entry point)."""
         func = func or self.entrypoint
+        if not isinstance(func, types.FunctionType):
+            raise Exception, "getflowgraph() expects a function, got %s" % func
         try:
             graph = self.flowgraphs[func]
         except KeyError:
@@ -136,10 +147,15 @@
         self.rtyper = RPythonTyper(self.annotator)
         self.rtyper.specialize()
 
+    def backend_optimizations(self):
+        from pypy.translator.backendoptimization import backend_optimizations
+        for graph in self.flowgraphs.values():
+            backend_optimizations(graph)
+
     def source(self, func=None):
         """Returns original Python source.
         
-        Returns <interactive> for functions written while the
+        Returns <interactive> for functions written during the
         interactive session.
         """
         func = func or self.entrypoint

Deleted: /pypy/branch/pycompiler/translator/typer.py
==============================================================================
--- /pypy/branch/pycompiler/translator/typer.py	Fri Jul  1 18:28:54 2005
+++ (empty file)
@@ -1,209 +0,0 @@
-from __future__ import generators
-import autopath
-from pypy.objspace.flow.model import SpaceOperation, Variable, Constant
-from pypy.objspace.flow.model import Block, Link, uniqueitems
-from pypy.translator.unsimplify import insert_empty_block
-
-
-class TyperError(Exception):
-    def __str__(self):
-        result = Exception.__str__(self)
-        if hasattr(self, 'where'):
-            result += '\n.. %r\n.. %r' % self.where
-        return result
-
-
-class Specializer:
-
-    def __init__(self, annotator, defaultconcretetype, typematches,
-                 specializationtable):
-        self.annotator = annotator
-        self.defaultconcretetype = defaultconcretetype
-        self.typematches = typematches
-        # turn the table into a dict for faster look-ups
-        d = {}
-        for e in specializationtable:
-            opname1    = e[0]
-            opname2    = e[1]
-            spectypes  = e[2:-1]
-            restype    = e[-1]
-            info = opname2, spectypes, restype
-            d.setdefault(opname1, []).append(info)
-            d.setdefault(opname2, []).append(info)
-        self.specializationdict = d
-
-    def specialize(self):
-        """Main entry point: specialize all annotated blocks of the program."""
-        # new blocks can be created as a result of specialize_block(), so
-        # we need to be careful about the loop here.
-        already_seen = {}
-        pending = self.annotator.annotated.keys()
-        while pending:
-            for block in pending:
-                if block.operations != ():
-                    self.specialize_block(block)
-                already_seen[block] = True
-            pending = [block for block in self.annotator.annotated
-                             if block not in already_seen]
-
-    def settype(self, a, concretetype):
-        """Set the concretetype of a Variable."""
-        assert isinstance(a, Variable)
-        if hasattr(a, 'concretetype') and a.concretetype != concretetype:
-            raise TyperError, "inconsitent type for %r: %r != %r" % (
-                a, a.concretetype, concretetype)
-        a.concretetype = concretetype
-
-    def setbesttype(self, a):
-        """Set the best concretetype for a Variable according to
-        the annotations."""
-        try:
-            return a.concretetype
-        except AttributeError:
-            s_value = self.annotator.binding(a, True)
-            if s_value is not None:
-                besttype = self.annotation2concretetype(s_value)
-            else:
-                besttype = self.defaultconcretetype
-            self.settype(a, besttype)
-            return besttype
-
-    def annotation2concretetype(self, s_value):
-        for concretetype in self.typematches:
-            if concretetype.s_annotation.contains(s_value):
-                return concretetype
-        return self.defaultconcretetype
-
-    def convertvar(self, v, concretetype):
-        """Get the operation(s) needed to convert 'v' to the given type."""
-        ops = []
-        if isinstance(v, Constant):
-            # we should never modify a Constant in-place
-            v = Constant(v.value)
-            v.concretetype = concretetype
-
-        elif hasattr(v, 'concretetype') and v.concretetype != concretetype:
-            # XXX do we need better conversion paths?
-
-            # 1) convert to the generic type
-            if v.concretetype != self.defaultconcretetype:
-                v2 = Variable()
-                v2.concretetype = self.defaultconcretetype
-                newops = list(v.concretetype.convert_to_obj(self, v, v2))
-                v = v2
-                ops += newops
-
-            # 2) convert back from the generic type
-            if concretetype != self.defaultconcretetype:
-                v2 = Variable()
-                v2.concretetype = concretetype
-                newops = list(concretetype.convert_from_obj(self, v, v2))
-                v = v2
-                ops += newops
-
-        return v, ops
-
-    def specialize_block(self, block):
-        # give the best possible types to the input args
-        for a in block.inputargs:
-            self.setbesttype(a)
-
-        # specialize all the operations, as far as possible
-        newops = []
-        for op in block.operations:
-
-            args = list(op.args)
-            bindings = [self.annotator.binding(a, True) for a in args]
-
-            # replace constant annotations with real Constants
-            for i in range(len(op.args)):
-                if isinstance(args[i], Variable) and bindings[i] is not None:
-                    if bindings[i].is_constant():
-                        args[i] = Constant(bindings[i].const)
-                        op = SpaceOperation(op.opname, args, op.result)
-
-            # make a specialized version of the current operation
-            # (which may become several operations)
-            try:
-                flatten_ops(self.specialized_op(op, bindings), newops)
-            except TyperError, e:
-                e.where = (block, op)
-                raise
-
-        block.operations[:] = newops
-        self.insert_link_conversions(block)
-
-
-    def typed_op(self, op, argtypes, restype, newopname=None):
-        """Make a typed copy of the given SpaceOperation."""
-        result = []
-        args = list(op.args)
-        assert len(argtypes) == len(args)
-
-        # type-convert the input arguments
-        for i in range(len(args)):
-            args[i], convops = self.convertvar(args[i], argtypes[i])
-            result += convops
-
-        # store the result variable's type
-        self.settype(op.result, restype)
-
-        # store the possibly modified SpaceOperation
-        op = SpaceOperation(newopname or op.opname, args, op.result)
-        result.append(op)
-        return result
-
-
-    def insert_link_conversions(self, block):
-        # insert the needed conversions on the links
-        can_insert_here = block.exitswitch is None and len(block.exits) == 1
-        for link in block.exits:
-            try:
-                for i in range(len(link.args)):
-                    a1 = link.args[i]
-                    if a1 in (link.last_exception, link.last_exc_value):# treated specially in gen_link
-                        continue
-                    a2 = link.target.inputargs[i]
-                    a2type = self.setbesttype(a2)
-                    a1, convops = self.convertvar(a1, a2type)
-                    if convops and not can_insert_here:
-                        # cannot insert conversion operations around a single
-                        # link, unless it is the only exit of this block.
-                        # create a new block along the link...
-                        newblock = insert_empty_block(self.annotator.translator,
-                                                      link)
-                        # ...and do the conversions there.
-                        self.insert_link_conversions(newblock)
-                        break   # done with this link
-                    flatten_ops(convops, block.operations)
-                    link.args[i] = a1
-            except TyperError, e:
-                e.where = (block, link)
-                raise
-
-
-    def specialized_op(self, op, bindings):
-        specializations = self.specializationdict.get(op.opname, ())
-        for opname2, spectypes, restype in specializations:
-            assert len(spectypes) == len(op.args) == len(bindings)
-            for i in range(len(spectypes)):
-                if bindings[i] is None:
-                    break
-                if not spectypes[i].s_annotation.contains(bindings[i]):
-                    break
-            else:
-                # specialization found
-                yield self.typed_op(op, spectypes, restype, newopname=opname2)
-                return
-        # specialization not found
-        argtypes = [self.defaultconcretetype] * len(op.args)
-        yield self.typed_op(op, argtypes, self.defaultconcretetype)
-
-
-def flatten_ops(op, newops):
-    # Flatten lists and generators and record all SpaceOperations found
-    if isinstance(op, SpaceOperation):
-        newops.append(op)
-    else:
-        for op1 in op:
-            flatten_ops(op1, newops)



More information about the Pypy-commit mailing list