[pypy-commit] pypy reflex-support: merge default into branch

wlav noreply at buildbot.pypy.org
Thu Jul 19 02:35:31 CEST 2012


Author: Wim Lavrijsen <WLavrijsen at lbl.gov>
Branch: reflex-support
Changeset: r56209:b3673ceaeb05
Date: 2012-07-18 17:35 -0700
http://bitbucket.org/pypy/pypy/changeset/b3673ceaeb05/

Log:	merge default into branch

diff --git a/lib_pypy/PyQt4.py b/lib_pypy/PyQt4.py
deleted file mode 100644
--- a/lib_pypy/PyQt4.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from _rpyc_support import proxy_sub_module, remote_eval
-
-
-for name in ("QtCore", "QtGui", "QtWebKit"):
-    proxy_sub_module(globals(), name)
-
-s = "__import__('PyQt4').QtGui.QDialogButtonBox."
-QtGui.QDialogButtonBox.Cancel = remote_eval("%sCancel | %sCancel" % (s, s))
-QtGui.QDialogButtonBox.Ok = remote_eval("%sOk | %sOk" % (s, s))
diff --git a/lib_pypy/_rpyc_support.py b/lib_pypy/_rpyc_support.py
deleted file mode 100644
--- a/lib_pypy/_rpyc_support.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import sys
-import socket
-
-from rpyc import connect, SlaveService
-from rpyc.utils.classic import DEFAULT_SERVER_PORT
-
-try:
-    conn = connect("localhost", DEFAULT_SERVER_PORT, SlaveService,
-           config=dict(call_by_value_for_builtin_mutable_types=True))
-except socket.error, e:
-    raise ImportError("Error while connecting: " + str(e))
-
-
-remote_eval = conn.eval
-
-
-def proxy_module(globals):
-    module = getattr(conn.modules, globals["__name__"])
-    for name in module.__dict__.keys():
-        globals[name] = getattr(module, name)
-
-def proxy_sub_module(globals, name):
-    fullname = globals["__name__"] + "." + name
-    sys.modules[fullname] = globals[name] = conn.modules[fullname]
diff --git a/lib_pypy/distributed/__init__.py b/lib_pypy/distributed/__init__.py
deleted file mode 100644
--- a/lib_pypy/distributed/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-
-try:
-    from protocol import RemoteProtocol, test_env, remote_loop, ObjectNotFound
-except ImportError:
-    # XXX fix it
-    # UGH. This is needed for tests
-    pass
diff --git a/lib_pypy/distributed/demo/sockdemo.py b/lib_pypy/distributed/demo/sockdemo.py
deleted file mode 100644
--- a/lib_pypy/distributed/demo/sockdemo.py
+++ /dev/null
@@ -1,42 +0,0 @@
-
-from distributed import RemoteProtocol, remote_loop
-from distributed.socklayer import Finished, socket_listener, socket_connecter
-
-PORT = 12122
-
-class X:
-    def __init__(self, z):
-        self.z = z
-        
-    def meth(self, x):
-        return self.z + x()
-
-    def raising(self):
-        1/0
-
-x = X(3)
-
-def remote():
-    send, receive = socket_listener(address=('', PORT))
-    remote_loop(RemoteProtocol(send, receive, globals()))
-
-def local():
-    send, receive = socket_connecter(('localhost', PORT))
-    return RemoteProtocol(send, receive)
-
-import sys
-if __name__ == '__main__':
-    if len(sys.argv) > 1 and sys.argv[1] == '-r':
-        try:
-            remote()
-        except Finished:
-            print "Finished"
-    else:
-        rp = local()
-        x = rp.get_remote("x")
-        try:
-            x.raising()
-        except:
-            import sys
-            import pdb
-            pdb.post_mortem(sys.exc_info()[2])
diff --git a/lib_pypy/distributed/faker.py b/lib_pypy/distributed/faker.py
deleted file mode 100644
--- a/lib_pypy/distributed/faker.py
+++ /dev/null
@@ -1,89 +0,0 @@
-
-""" This file is responsible for faking types
-"""
-
-class GetSetDescriptor(object):
-    def __init__(self, protocol, name):
-        self.protocol = protocol
-        self.name = name
-
-    def __get__(self, obj, type=None):
-        return self.protocol.get(self.name, obj, type)
-
-    def __set__(self, obj, value):
-        self.protocol.set(self.name, obj, value)
-
-class GetDescriptor(object):
-    def __init__(self, protocol, name):
-        self.protocol = protocol
-        self.name = name
-
-    def __get__(self, obj, type=None):
-        return self.protocol.get(self.name, obj, type)
-
-# these are one-go functions for wrapping/unwrapping types,
-# note that actual caching is defined in other files,
-# this is only the case when we *need* to wrap/unwrap
-# type
-
-from types import MethodType, FunctionType
-
-def not_ignore(name):
-    # we don't want to fake some default descriptors, because
-    # they'll alter the way we set attributes
-    l = ['__dict__', '__weakref__', '__class__', '__bases__',
-         '__getattribute__', '__getattr__', '__setattr__',
-         '__delattr__']
-    return not name in dict.fromkeys(l)
-
-def wrap_type(protocol, tp, tp_id):
-    """ Wrap type to transpotable entity, taking
-    care about descriptors
-    """
-    dict_w = {}
-    for item in tp.__dict__.keys():
-        value = getattr(tp, item)
-        if not_ignore(item):
-            # we've got shortcut for method
-            if hasattr(value, '__get__') and not type(value) is MethodType:
-                if hasattr(value, '__set__'):
-                    dict_w[item] = ('get', item)
-                else:
-                    dict_w[item] = ('set', item)
-            else:
-                dict_w[item] = protocol.wrap(value)
-    bases_w = [protocol.wrap(i) for i in tp.__bases__ if i is not object]
-    return tp_id, tp.__name__, dict_w, bases_w
-
-def unwrap_descriptor_gen(desc_class):
-    def unwrapper(protocol, data):
-        name = data
-        obj = desc_class(protocol, name)
-        obj.__name__ = name
-        return obj
-    return unwrapper
-
-unwrap_get_descriptor = unwrap_descriptor_gen(GetDescriptor)
-unwrap_getset_descriptor = unwrap_descriptor_gen(GetSetDescriptor)
-
-def unwrap_type(objkeeper, protocol, type_id, name_, dict_w, bases_w):
-    """ Unwrap remote type, based on it's description
-    """
-    if bases_w == []:
-        bases = (object,)
-    else:
-        bases = tuple([protocol.unwrap(i) for i in bases_w])
-    d = dict.fromkeys(dict_w)
-    # XXX we do it in two steps to avoid cyclic dependencies,
-    #     probably there is some smarter way of doing this
-    if '__doc__' in dict_w:
-        d['__doc__'] = protocol.unwrap(dict_w['__doc__'])
-    tp = type(name_, bases, d)
-    objkeeper.register_remote_type(tp, type_id)
-    for key, value in dict_w.items():
-        if key != '__doc__':
-            v = protocol.unwrap(value)
-            if isinstance(v, FunctionType):
-                setattr(tp, key, staticmethod(v))
-            else:
-                setattr(tp, key, v)
diff --git a/lib_pypy/distributed/objkeeper.py b/lib_pypy/distributed/objkeeper.py
deleted file mode 100644
--- a/lib_pypy/distributed/objkeeper.py
+++ /dev/null
@@ -1,63 +0,0 @@
-
-""" objkeeper - Storage for remoteprotocol
-"""
-
-from types import FunctionType
-from distributed import faker
-
-class ObjKeeper(object):
-    def __init__(self, exported_names = {}):
-        self.exported_objects = [] # list of object that we've exported outside
-        self.exported_names = exported_names # dictionary of visible objects
-        self.exported_types = {} # dict of exported types
-        self.remote_types = {}
-        self.reverse_remote_types = {}
-        self.remote_objects = {}
-        self.exported_types_id = 0 # unique id of exported types
-        self.exported_types_reverse = {} # reverse dict of exported types
-    
-    def register_object(self, obj):
-        # XXX: At some point it makes sense not to export them again and again...
-        self.exported_objects.append(obj)
-        return len(self.exported_objects) - 1
-    
-    def ignore(self, key, value):
-        # there are some attributes, which cannot be modified later, nor
-        # passed into default values, ignore them
-        if key in ('__dict__', '__weakref__', '__class__',
-                   '__dict__', '__bases__'):
-            return True
-        return False
-    
-    def register_type(self, protocol, tp):
-        try:
-            return self.exported_types[tp]
-        except KeyError:
-            self.exported_types[tp] = self.exported_types_id
-            self.exported_types_reverse[self.exported_types_id] = tp
-            tp_id = self.exported_types_id
-            self.exported_types_id += 1
-
-        protocol.send(('type_reg', faker.wrap_type(protocol, tp, tp_id)))
-        return tp_id
-    
-    def fake_remote_type(self, protocol, tp_data):
-        type_id, name_, dict_w, bases_w = tp_data
-        tp = faker.unwrap_type(self, protocol, type_id, name_, dict_w, bases_w)
-
-    def register_remote_type(self, tp, type_id):
-        self.remote_types[type_id] = tp
-        self.reverse_remote_types[tp] = type_id
-    
-    def get_type(self, id):
-        return self.remote_types[id]
-
-    def get_object(self, id):
-        return self.exported_objects[id]
-    
-    def register_remote_object(self, controller, id):
-        self.remote_objects[controller] = id
-
-    def get_remote_object(self, controller):
-        return self.remote_objects[controller]
-        
diff --git a/lib_pypy/distributed/protocol.py b/lib_pypy/distributed/protocol.py
deleted file mode 100644
--- a/lib_pypy/distributed/protocol.py
+++ /dev/null
@@ -1,447 +0,0 @@
-
-""" Distributed controller(s) for use with transparent proxy objects
-
-First idea:
-
-1. We use py.execnet to create a connection to wherever
-2. We run some code there (RSync in advance makes some sense)
-3. We access remote objects like normal ones, with a special protocol
-
-Local side:
-  - Request an object from remote side from global namespace as simple
-    --- request(name) --->
-  - Receive an object which is in protocol described below which is
-    constructed as shallow copy of the remote type.
-
-    Shallow copy is defined as follows:
-
-    - for interp-level object that we know we can provide transparent proxy
-      we just do that
-
-    - for others we fake or fail depending on object
-
-    - for user objects, we create a class which fakes all attributes of
-      a class as transparent proxies of remote objects, we create an instance
-      of that class and populate __dict__
-
-    - for immutable types, we just copy that
-
-Remote side:
-  - we run code, whatever we like
-  - additionally, we've got thread exporting stuff (or just exporting
-    globals, whatever)
-  - for every object, we just send an object, or provide a protocol for
-    sending it in a different way.
-
-"""
-
-try:
-    from __pypy__ import tproxy as proxy
-    from __pypy__ import get_tproxy_controller
-except ImportError:
-    raise ImportError("Cannot work without transparent proxy functionality")
-
-from distributed.objkeeper import ObjKeeper
-from distributed import faker
-import sys
-
-class ObjectNotFound(Exception):
-    pass
-
-# XXX We do not make any garbage collection. We'll need it at some point
-
-"""
-TODO list:
-
-1. Garbage collection - we would like probably to use weakrefs, but
-   since they're not perfectly working in pypy, let's leave it alone for now
-2. Some error handling - exceptions are working, there are still some
-   applications where it all explodes.
-3. Support inheritance and recursive types
-"""
-
-from __pypy__ import internal_repr
-
-import types
-from marshal import dumps
-import exceptions
-
-# just placeholders for letter_types value
-class RemoteBase(object):
-    pass
-
-class DataDescriptor(object):
-    pass
-
-class NonDataDescriptor(object):
-    pass
-# end of placeholders
-
-class AbstractProtocol(object):
-    immutable_primitives = (str, int, float, long, unicode, bool, types.NotImplementedType)
-    mutable_primitives = (list, dict, types.FunctionType, types.FrameType, types.TracebackType,
-        types.CodeType)
-    exc_dir = dict((val, name) for name, val in exceptions.__dict__.iteritems())
-    
-    letter_types = {
-        'l' : list,
-        'd' : dict,
-        'c' : types.CodeType,
-        't' : tuple,
-        'e' : Exception,
-        'ex': exceptions, # for instances
-        'i' : int,
-        'b' : bool,
-        'f' : float,
-        'u' : unicode,
-        'l' : long,
-        's' : str,
-        'ni' : types.NotImplementedType,
-        'n' : types.NoneType,
-        'lst' : list,
-        'fun' : types.FunctionType,
-        'cus' : object,
-        'meth' : types.MethodType,
-        'type' : type,
-        'tp' : None,
-        'fr' : types.FrameType,
-        'tb' : types.TracebackType,
-        'reg' : RemoteBase,
-        'get' : NonDataDescriptor,
-        'set' : DataDescriptor,
-    }
-    type_letters = dict([(value, key) for key, value in letter_types.items()])
-    assert len(type_letters) == len(letter_types)
-    
-    def __init__(self, exported_names={}):
-        self.keeper = ObjKeeper(exported_names)
-        #self.remote_objects = {} # a dictionary controller --> id
-        #self.objs = [] # we just store everything, maybe later
-        #   # we'll need some kind of garbage collection
-
-    def wrap(self, obj):
-        """ Wrap an object as sth prepared for sending
-        """
-        def is_element(x, iterable):
-            try:
-                return x in iterable
-            except (TypeError, ValueError):
-                return False
-        
-        tp = type(obj)
-        ctrl = get_tproxy_controller(obj)
-        if ctrl:
-            return "tp", self.keeper.get_remote_object(ctrl)
-        elif obj is None:
-            return self.type_letters[tp]
-        elif tp in self.immutable_primitives:
-            # simple, immutable object, just copy
-            return (self.type_letters[tp], obj)
-        elif hasattr(obj, '__class__') and obj.__class__ in self.exc_dir:
-            return (self.type_letters[Exception], (self.exc_dir[obj.__class__], \
-                self.wrap(obj.args)))
-        elif is_element(obj, self.exc_dir): # weird hashing problems
-            return (self.type_letters[exceptions], self.exc_dir[obj])
-        elif tp is tuple:
-            # we just pack all of the items
-            return ('t', tuple([self.wrap(elem) for elem in obj]))
-        elif tp in self.mutable_primitives:
-            id = self.keeper.register_object(obj)
-            return (self.type_letters[tp], id)
-        elif tp is type:
-            try:
-                return "reg", self.keeper.reverse_remote_types[obj]
-            except KeyError:
-                pass
-            try:
-                return self.type_letters[tp], self.type_letters[obj]
-            except KeyError:
-                id = self.register_type(obj)
-                return (self.type_letters[tp], id)
-        elif tp is types.MethodType:
-            w_class = self.wrap(obj.im_class)
-            w_func = self.wrap(obj.im_func)
-            w_self = self.wrap(obj.im_self)
-            return (self.type_letters[tp], (w_class, \
-                self.wrap(obj.im_func.func_name), w_func, w_self))
-        else:
-            id = self.keeper.register_object(obj)
-            w_tp = self.wrap(tp)
-            return ("cus", (w_tp, id))
-    
-    def unwrap(self, data):
-        """ Unwrap an object
-        """
-        if data == 'n':
-            return None
-        tp_letter, obj_data = data
-        tp = self.letter_types[tp_letter]
-        if tp is None:
-            return self.keeper.get_object(obj_data)
-        elif tp is RemoteBase:
-            return self.keeper.exported_types_reverse[obj_data]
-        elif tp in self.immutable_primitives:
-            return obj_data # this is the object
-        elif tp is tuple:
-            return tuple([self.unwrap(i) for i in obj_data])
-        elif tp in self.mutable_primitives:
-            id = obj_data
-            ro = RemoteBuiltinObject(self, id)
-            self.keeper.register_remote_object(ro.perform, id)
-            p = proxy(tp, ro.perform)
-            ro.obj = p
-            return p
-        elif tp is Exception:
-            cls_name, w_args = obj_data
-            return getattr(exceptions, cls_name)(self.unwrap(w_args))
-        elif tp is exceptions:
-            cls_name = obj_data
-            return getattr(exceptions, cls_name)
-        elif tp is types.MethodType:
-            w_class, w_name, w_func, w_self = obj_data
-            tp = self.unwrap(w_class)
-            name = self.unwrap(w_name)
-            self_ = self.unwrap(w_self)
-            if self_ is not None:
-                if tp is None:
-                    setattr(self_, name, classmethod(self.unwrap(w_func)))
-                    return getattr(self_, name)
-                return getattr(tp, name).__get__(self_, tp)
-            func = self.unwrap(w_func)
-            setattr(tp, name, func)
-            return getattr(tp, name)
-        elif tp is type:
-            if isinstance(obj_data, str):
-                return self.letter_types[obj_data]
-            id = obj_data
-            return self.get_type(obj_data)
-        elif tp is DataDescriptor:            
-            return faker.unwrap_getset_descriptor(self, obj_data)
-        elif tp is NonDataDescriptor:
-            return faker.unwrap_get_descriptor(self, obj_data)
-        elif tp is object:
-            # we need to create a proper type
-            w_tp, id = obj_data
-            real_tp = self.unwrap(w_tp)
-            ro = RemoteObject(self, id)
-            self.keeper.register_remote_object(ro.perform, id)
-            p = proxy(real_tp, ro.perform)
-            ro.obj = p
-            return p
-        else:
-            raise NotImplementedError("Cannot unwrap %s" % (data,))
-    
-    def perform(self, *args, **kwargs):
-        raise NotImplementedError("Abstract only protocol")
-    
-    # some simple wrappers
-    def pack_args(self, args, kwargs):
-        return self.pack_list(args), self.pack_dict(kwargs)
-    
-    def pack_list(self, lst):
-        return [self.wrap(i) for i in lst]
-    
-    def pack_dict(self, d):
-        return dict([(self.wrap(key), self.wrap(val)) for key, val in d.items()])
-    
-    def unpack_args(self, args, kwargs):
-        return self.unpack_list(args), self.unpack_dict(kwargs)
-    
-    def unpack_list(self, lst):
-        return [self.unwrap(i) for i in lst]
-    
-    def unpack_dict(self, d):
-        return dict([(self.unwrap(key), self.unwrap(val)) for key, val in d.items()])
-    
-    def register_type(self, tp):
-        return self.keeper.register_type(self, tp)
-    
-    def get_type(self, id):
-        return self.keeper.get_type(id)
-    
-class LocalProtocol(AbstractProtocol):
-    """ This is stupid protocol for testing purposes only
-    """
-    def __init__(self):
-        super(LocalProtocol, self).__init__()
-        self.types = []
-   
-    def perform(self, id, name, *args, **kwargs):
-        obj = self.keeper.get_object(id)
-        # we pack and than unpack, for tests
-        args, kwargs = self.pack_args(args, kwargs)
-        assert isinstance(name, str)
-        dumps((args, kwargs))
-        args, kwargs = self.unpack_args(args, kwargs)
-        return getattr(obj, name)(*args, **kwargs)
-    
-    def register_type(self, tp):
-        self.types.append(tp)
-        return len(self.types) - 1
-    
-    def get_type(self, id):
-        return self.types[id]
-
-def remote_loop(protocol):
-    # the simplest version possible, without any concurrency and such
-    wrap = protocol.wrap
-    unwrap = protocol.unwrap
-    send = protocol.send
-    receive = protocol.receive
-    # we need this for wrap/unwrap
-    while 1:
-        command, data = receive()
-        if command == 'get':
-            try:
-                item = protocol.keeper.exported_names[data]
-            except KeyError:
-                send(("finished_error",data))
-            else:
-                # XXX wrapping problems catching? do we have any?
-                send(("finished", wrap(item)))
-        elif command == 'call':
-            id, name, args, kwargs = data
-            args, kwargs = protocol.unpack_args(args, kwargs)
-            try:
-                retval = getattr(protocol.keeper.get_object(id), name)(*args, **kwargs)
-            except:
-                send(("raised", wrap(sys.exc_info())))
-            else:
-                send(("finished", wrap(retval)))
-        elif command == 'finished':
-            return unwrap(data)
-        elif command == 'finished_error':
-            raise ObjectNotFound("Cannot find name %s" % (data,))
-        elif command == 'raised':
-            exc, val, tb = unwrap(data)
-            raise exc, val, tb
-        elif command == 'type_reg':
-            protocol.keeper.fake_remote_type(protocol, data)
-        elif command == 'force':
-            obj = protocol.keeper.get_object(data)
-            w_obj = protocol.pack(obj)
-            send(("forced", w_obj))
-        elif command == 'forced':
-            obj = protocol.unpack(data)
-            return obj
-        elif command == 'desc_get':
-            name, w_obj, w_type = data
-            obj = protocol.unwrap(w_obj)
-            type_ = protocol.unwrap(w_type)
-            if obj:
-                type__ = type(obj)
-            else:
-                type__ = type_
-            send(('finished', protocol.wrap(getattr(type__, name).__get__(obj, type_))))
-
-        elif command == 'desc_set':
-            name, w_obj, w_value = data
-            obj = protocol.unwrap(w_obj)
-            value = protocol.unwrap(w_value)
-            getattr(type(obj), name).__set__(obj, value)
-            send(('finished', protocol.wrap(None)))
-        elif command == 'remote_keys':
-            keys = protocol.keeper.exported_names.keys()
-            send(('finished', protocol.wrap(keys)))
-        else:
-            raise NotImplementedError("command %s" % command)
-
-class RemoteProtocol(AbstractProtocol):
-    #def __init__(self, gateway, remote_code):
-    #    self.gateway = gateway
-    def __init__(self, send, receive, exported_names={}):
-        super(RemoteProtocol, self).__init__(exported_names)
-        #self.exported_names = exported_names
-        self.send = send
-        self.receive = receive
-        #self.type_cache = {}
-        #self.type_id = 0
-        #self.remote_types = {}
-    
-    def perform(self, id, name, *args, **kwargs):
-        args, kwargs = self.pack_args(args, kwargs)
-        self.send(('call', (id, name, args, kwargs)))
-        try:
-            retval = remote_loop(self)
-        except:
-            e, val, tb = sys.exc_info()
-            raise e, val, tb.tb_next.tb_next
-        return retval
-    
-    def get_remote(self, name):
-        self.send(("get", name))
-        retval = remote_loop(self)
-        return retval
-    
-    def force(self, id):
-        self.send(("force", id))
-        retval = remote_loop(self)
-        return retval
-    
-    def pack(self, obj):
-        if isinstance(obj, list):
-            return "l", self.pack_list(obj)
-        elif isinstance(obj, dict):
-            return "d", self.pack_dict(obj)
-        else:
-            raise NotImplementedError("Cannot pack %s" % obj)
-        
-    def unpack(self, data):
-        letter, w_obj = data
-        if letter == 'l':
-            return self.unpack_list(w_obj)
-        elif letter == 'd':
-            return self.unpack_dict(w_obj)
-        else:
-            raise NotImplementedError("Cannot unpack %s" % (data,))
-
-    def get(self, name, obj, type):
-        self.send(("desc_get", (name, self.wrap(obj), self.wrap(type))))
-        return remote_loop(self)
-
-    def set(self, obj, value):
-        self.send(("desc_set", (name, self.wrap(obj), self.wrap(value))))
-
-    def remote_keys(self):
-        self.send(("remote_keys",None))
-        return remote_loop(self)
-
-class RemoteObject(object):
-    def __init__(self, protocol, id):
-        self.id = id
-        self.protocol = protocol
-    
-    def perform(self, name, *args, **kwargs):
-        return self.protocol.perform(self.id, name, *args, **kwargs)
-
-class RemoteBuiltinObject(RemoteObject):
-    def __init__(self, protocol, id):
-        self.id = id
-        self.protocol = protocol
-        self.forced = False
-    
-    def perform(self, name, *args, **kwargs):
-        # XXX: Check who really goes here
-        if self.forced:
-            return getattr(self.obj, name)(*args, **kwargs)
-        if name in ('__eq__', '__ne__', '__lt__', '__gt__', '__ge__', '__le__',
-            '__cmp__'):
-            self.obj = self.protocol.force(self.id)
-            return getattr(self.obj, name)(*args, **kwargs)
-        return self.protocol.perform(self.id, name, *args, **kwargs)
-
-def test_env(exported_names):
-    from stackless import channel, tasklet, run
-    inp, out = channel(), channel()
-    remote_protocol = RemoteProtocol(inp.send, out.receive, exported_names)
-    t = tasklet(remote_loop)(remote_protocol)
-    
-    #def send_trace(data):
-    #    print "Sending %s" % (data,)
-    #    out.send(data)
-
-    #def receive_trace():
-    #    data = inp.receive()
-    #    print "Received %s" % (data,)
-    #    return data
-    return RemoteProtocol(out.send, inp.receive)
diff --git a/lib_pypy/distributed/socklayer.py b/lib_pypy/distributed/socklayer.py
deleted file mode 100644
--- a/lib_pypy/distributed/socklayer.py
+++ /dev/null
@@ -1,83 +0,0 @@
-
-import py
-from socket import socket
-
-raise ImportError("XXX needs import adaptation as 'green' is removed from py lib for years")
-from py.impl.green.msgstruct import decodemessage, message
-from socket import socket, AF_INET, SOCK_STREAM
-import marshal
-import sys
-
-TRACE = False
-def trace(msg):
-    if TRACE:
-        print >>sys.stderr, msg
-
-class Finished(Exception):
-    pass
-
-class SocketWrapper(object):
-    def __init__(self, conn):
-        self.buffer = ""
-        self.conn = conn
-
-class ReceiverWrapper(SocketWrapper):
-    def receive(self):
-        msg, self.buffer = decodemessage(self.buffer)
-        while msg is None:
-            data = self.conn.recv(8192)
-            if not data:
-                raise Finished()
-            self.buffer += data
-            msg, self.buffer = decodemessage(self.buffer)
-        assert msg[0] == 'c'
-        trace("received %s" % msg[1])
-        return marshal.loads(msg[1])
-
-class SenderWrapper(SocketWrapper):
-    def send(self, data):
-        trace("sending %s" % (data,))
-        self.conn.sendall(message('c', marshal.dumps(data)))
-        trace("done")
-
-def socket_listener(address, socket=socket):
-    s = socket(AF_INET, SOCK_STREAM)
-    s.bind(address)
-    s.listen(1)
-    print "Waiting for connection on %s" % (address,)
-    conn, addr = s.accept()
-    print "Connected from %s" % (addr,)
-
-    return SenderWrapper(conn).send, ReceiverWrapper(conn).receive
-
-def socket_loop(address, to_export, socket=socket):
-    from distributed import RemoteProtocol, remote_loop
-    try:
-        send, receive = socket_listener(address, socket)
-        remote_loop(RemoteProtocol(send, receive, to_export))
-    except Finished:
-        pass
-
-def socket_connecter(address, socket=socket):
-    s = socket(AF_INET, SOCK_STREAM)
-    print "Connecting %s" % (address,)
-    s.connect(address)
-    
-    return SenderWrapper(s).send, ReceiverWrapper(s).receive
-
-def connect(address, socket=socket):
-    from distributed.support import RemoteView
-    from distributed import RemoteProtocol
-    return RemoteView(RemoteProtocol(*socket_connecter(address, socket)))
-
-def spawn_remote_side(code, gw):
-    """ A very simple wrapper around greenexecnet to allow
-    spawning a remote side of lib/distributed
-    """
-    from distributed import RemoteProtocol
-    extra = str(py.code.Source("""
-    from distributed import remote_loop, RemoteProtocol
-    remote_loop(RemoteProtocol(channel.send, channel.receive, globals()))
-    """))
-    channel = gw.remote_exec(code + "\n" + extra)
-    return RemoteProtocol(channel.send, channel.receive)
diff --git a/lib_pypy/distributed/support.py b/lib_pypy/distributed/support.py
deleted file mode 100644
--- a/lib_pypy/distributed/support.py
+++ /dev/null
@@ -1,17 +0,0 @@
-
-""" Some random support functions
-"""
-
-from distributed.protocol import ObjectNotFound
-
-class RemoteView(object):
-    def __init__(self, protocol):
-        self.__dict__['__protocol'] = protocol
-
-    def __getattr__(self, name):
-        if name == '__dict__':
-            return super(RemoteView, self).__getattr__(name)
-        try:
-            return self.__dict__['__protocol'].get_remote(name)
-        except ObjectNotFound:
-            raise AttributeError(name)
diff --git a/lib_pypy/distributed/test/__init__.py b/lib_pypy/distributed/test/__init__.py
deleted file mode 100644
diff --git a/lib_pypy/distributed/test/test_distributed.py b/lib_pypy/distributed/test/test_distributed.py
deleted file mode 100644
--- a/lib_pypy/distributed/test/test_distributed.py
+++ /dev/null
@@ -1,301 +0,0 @@
-
-""" Controllers tests
-"""
-
-from pypy.conftest import gettestobjspace
-import sys
-import pytest
-
-class AppTestDistributed(object):
-    def setup_class(cls):
-        cls.space = gettestobjspace(**{"objspace.std.withtproxy": True,
-            "usemodules":("_continuation",)})
-
-    def test_init(self):
-        import distributed
-
-    def test_protocol(self):
-        from distributed.protocol import AbstractProtocol
-        protocol = AbstractProtocol()
-        for item in ("aaa", 3, u"aa", 344444444444444444L, 1.2, (1, "aa")):
-            assert protocol.unwrap(protocol.wrap(item)) == item
-        assert type(protocol.unwrap(protocol.wrap([1,2,3]))) is list
-        assert type(protocol.unwrap(protocol.wrap({"a":3}))) is dict
-        
-        def f():
-            pass
-        
-        assert type(protocol.unwrap(protocol.wrap(f))) is type(f)
-
-    def test_method_of_false_obj(self):
-        from distributed.protocol import AbstractProtocol
-        protocol = AbstractProtocol()
-        lst = []
-        m = lst.append
-        assert type(protocol.unwrap(protocol.wrap(m))) is type(m)
-
-    def test_protocol_run(self):
-        l = [1,2,3]
-        from distributed.protocol import LocalProtocol
-        protocol = LocalProtocol()
-        wrap = protocol.wrap
-        unwrap = protocol.unwrap
-        item = unwrap(wrap(l))
-        assert len(item) == 3
-        assert item[2] == 3
-        item += [1,1,1]
-        assert len(item) == 6
-
-    def test_protocol_call(self):
-        def f(x, y):
-            return x + y
-        
-        from distributed.protocol import LocalProtocol
-        protocol = LocalProtocol()
-        wrap = protocol.wrap
-        unwrap = protocol.unwrap
-        item = unwrap(wrap(f))
-        assert item(3, 2) == 5
-
-    def test_simulation_call(self):
-        def f(x, y):
-            return x + y
-        
-        import types
-        from distributed import RemoteProtocol
-        import sys
-
-        data = []
-        result = []
-        protocol = RemoteProtocol(result.append, data.pop)
-        data += [("finished", protocol.wrap(5)), ("finished", protocol.wrap(f))]
-        fun = protocol.get_remote("f")
-        assert isinstance(fun, types.FunctionType)
-        assert fun(2, 3) == 5
-
-    def test_local_obj(self):
-        class A(object):
-            def __init__(self, x):
-                self.x = x
-            
-            def __len__(self):
-                return self.x + 8
-        
-        from distributed.protocol import LocalProtocol
-        protocol = LocalProtocol()
-        wrap = protocol.wrap
-        unwrap = protocol.unwrap
-        item = unwrap(wrap(A(3)))
-        assert item.x == 3
-        assert len(item) == 11
-
-class AppTestDistributedTasklets(object):
-    spaceconfig = {"objspace.std.withtproxy": True,
-                   "objspace.usemodules._continuation": True}
-    def setup_class(cls):
-        cls.w_test_env = cls.space.appexec([], """():
-        from distributed import test_env
-        return test_env
-        """)
-        cls.reclimit = sys.getrecursionlimit()
-        sys.setrecursionlimit(100000)
-
-    def teardown_class(cls):
-        sys.setrecursionlimit(cls.reclimit)
-    
-    def test_remote_protocol_call(self):
-        def f(x, y):
-            return x + y
-        
-        protocol = self.test_env({"f": f})
-        fun = protocol.get_remote("f")
-        assert fun(2, 3) == 5
-
-    def test_callback(self):
-        def g():
-            return 8
-        
-        def f(x):
-            return x + g()
-        
-        protocol = self.test_env({"f":f})
-        fun = protocol.get_remote("f")
-        assert fun(8) == 16
-    
-    def test_remote_dict(self):
-        #skip("Land of infinite recursion")
-        d = {'a':3}
-        protocol = self.test_env({'d':d})
-        xd = protocol.get_remote('d')
-        #assert d['a'] == xd['a']
-        assert d.keys() == xd.keys()
-        assert d.values() == xd.values()
-        assert d == xd
-        
-    def test_remote_obj(self):
-        class A(object):
-            def __init__(self, x):
-                self.x = x
-            
-            def __len__(self):
-                return self.x + 8
-        a = A(3)
-        
-        protocol = self.test_env({'a':a})
-        xa = protocol.get_remote("a")
-        assert xa.x == 3
-        assert len(xa) == 11
-    
-    def test_remote_doc_and_callback(self):
-        class A(object):
-            """xxx"""
-            def __init__(self):
-                pass
-
-            def meth(self, x):
-                return x() + 3
-        
-        def x():
-            return 1
-        
-        a = A()
-        
-        protocol = self.test_env({'a':a})
-        xa = protocol.get_remote('a')
-        assert xa.__class__.__doc__ == 'xxx'
-        assert xa.meth(x) == 4
-
-    def test_double_reference(self):
-        class A(object):
-            def meth(self, one):
-                self.one = one
-            
-            def perform(self):
-                return 1 + len(self.one())
-        
-        class B(object):
-            def __call__(self):
-                return [1,2,3]
-        
-        a = A()
-        protocol = self.test_env({'a': a})
-        xa = protocol.get_remote('a')
-        xa.meth(B())
-        assert xa.perform() == 4
-
-    def test_frame(self):
-        #skip("Land of infinite recursion")
-        import sys
-        f = sys._getframe()
-        protocol = self.test_env({'f':f})
-        xf = protocol.get_remote('f')
-        assert f.f_globals.keys() == xf.f_globals.keys()
-        assert f.f_locals.keys() == xf.f_locals.keys()
-
-    def test_remote_exception(self):
-        def raising():
-            1/0
-        
-        protocol = self.test_env({'raising':raising})
-        xr = protocol.get_remote('raising')
-        try:
-            xr()
-        except ZeroDivisionError:
-            import sys
-            exc_info, val, tb  = sys.exc_info()
-            #assert tb.tb_next is None
-        else:
-            raise AssertionError("Did not raise")
-
-    def test_remote_classmethod(self):
-        class A(object):
-            z = 8
-
-            @classmethod
-            def x(cls):
-                return cls.z
-
-        a = A()
-        protocol = self.test_env({'a':a})
-        xa = protocol.get_remote("a")
-        res = xa.x()
-        assert res == 8
-
-    def test_types_reverse_mapping(self):
-        class A(object):
-            def m(self, tp):
-                assert type(self) is tp
-
-        a = A()
-        protocol = self.test_env({'a':a, 'A':A})
-        xa = protocol.get_remote('a')
-        xA = protocol.get_remote('A')
-        xa.m(xA)
-
-    def test_instantiate_remote_type(self):
-        class C(object):
-            def __init__(self, y):
-                self.y = y
-            
-            def x(self):
-                return self.y
-
-        protocol = self.test_env({'C':C})
-        xC = protocol.get_remote('C')
-        xc = xC(3)
-        res = xc.x()
-        assert res == 3
-
-    def test_remote_sys(self):
-        import sys
-
-        protocol = self.test_env({'sys':sys})
-        s = protocol.get_remote('sys')
-        l = dir(s)
-        assert l
-
-    def test_remote_file_access(self):
-        skip("Descriptor logic seems broken")
-        protocol = self.test_env({'f':open})
-        xf = protocol.get_remote('f')
-        data = xf('/etc/passwd').read()
-        assert data
-
-    def test_real_descriptor(self):
-        class getdesc(object):
-            def __get__(self, obj, val=None):
-                if obj is not None:
-                    assert type(obj) is X
-                return 3
-
-        class X(object):
-            x = getdesc()
-
-        x = X()
-
-        protocol = self.test_env({'x':x})
-        xx = protocol.get_remote('x')
-        assert xx.x == 3
-    
-    def test_bases(self):
-        class X(object):
-            pass
-
-        class Y(X):
-            pass
-
-        y = Y()
-        protocol = self.test_env({'y':y, 'X':X})
-        xy = protocol.get_remote('y')
-        xX = protocol.get_remote('X')
-        assert isinstance(xy, xX)
-
-    def test_key_error(self):
-        from distributed import ObjectNotFound
-        protocol = self.test_env({})
-        raises(ObjectNotFound, "protocol.get_remote('x')")
-
-    def test_list_items(self):
-        protocol = self.test_env({'x':3, 'y':8})
-        assert sorted(protocol.remote_keys()) == ['x', 'y']
-
diff --git a/lib_pypy/distributed/test/test_greensock.py b/lib_pypy/distributed/test/test_greensock.py
deleted file mode 100644
--- a/lib_pypy/distributed/test/test_greensock.py
+++ /dev/null
@@ -1,62 +0,0 @@
-
-import py
-from pypy.conftest import gettestobjspace, option
-
-def setup_module(mod):
-    py.test.importorskip("pygreen")   # found e.g. in py/trunk/contrib 
-
-class AppTestDistributedGreensock(object):
-    def setup_class(cls):
-        if not option.runappdirect:
-            py.test.skip("Cannot run this on top of py.py because of PopenGateway")
-        cls.space = gettestobjspace(**{"objspace.std.withtproxy": True,
-                                       "usemodules":("_continuation",)})
-        cls.w_remote_side_code = cls.space.appexec([], """():
-        import sys
-        sys.path.insert(0, '%s')
-        remote_side_code = '''
-class A:
-   def __init__(self, x):
-       self.x = x
-            
-   def __len__(self):
-       return self.x + 8
-
-   def raising(self):
-       1/0
-
-   def method(self, x):
-       return x() + self.x
-
-a = A(3)
-
-def count():
-    x = 10
-    # naive counting :)
-    result = 1
-    for i in range(x):
-        result += 1
-    return result
-'''
-        return remote_side_code
-        """ % str(py.path.local(__file__).dirpath().dirpath().dirpath().dirpath()))
-
-    def test_remote_call(self):
-        from distributed import socklayer
-        import sys
-        from pygreen.greenexecnet import PopenGateway
-        gw = PopenGateway()
-        rp = socklayer.spawn_remote_side(self.remote_side_code, gw)
-        a = rp.get_remote("a")
-        assert a.method(lambda : 13) == 16
-    
-    def test_remote_counting(self):
-        from distributed import socklayer
-        from pygreen.greensock2 import allof
-        from pygreen.greenexecnet import PopenGateway
-        gws = [PopenGateway() for i in range(3)]
-        rps = [socklayer.spawn_remote_side(self.remote_side_code, gw)
-               for gw in gws]
-        counters = [rp.get_remote("count") for rp in rps]
-        assert allof(*counters) == (11, 11, 11)
-
diff --git a/lib_pypy/distributed/test/test_socklayer.py b/lib_pypy/distributed/test/test_socklayer.py
deleted file mode 100644
--- a/lib_pypy/distributed/test/test_socklayer.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import py
-from pypy.conftest import gettestobjspace
-
-def setup_module(mod):
-    py.test.importorskip("pygreen")   # found e.g. in py/trunk/contrib 
-
-# XXX think how to close the socket
-
-class AppTestSocklayer:
-    def setup_class(cls):
-        cls.space = gettestobjspace(**{"objspace.std.withtproxy": True,
-                                       "usemodules":("_continuation",
-                                                     "_socket", "select")})
-    
-    def test_socklayer(self):
-        class X(object):
-            z = 3
-
-        x = X()
-
-        try:
-            import py
-        except ImportError:
-            skip("pylib not importable")
-        from pygreen.pipe.gsocke import GreenSocket
-        from distributed.socklayer import socket_loop, connect
-        from pygreen.greensock2 import oneof, allof
-
-        def one():
-            socket_loop(('127.0.0.1', 21211), {'x':x}, socket=GreenSocket)
-
-        def two():
-            rp = connect(('127.0.0.1', 21211), GreenSocket)
-            assert rp.x.z == 3
-
-        oneof(one, two)
diff --git a/lib_pypy/sip.py b/lib_pypy/sip.py
deleted file mode 100644
--- a/lib_pypy/sip.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from _rpyc_support import proxy_module
-
-proxy_module(globals())
-del proxy_module
diff --git a/pypy/annotation/binaryop.py b/pypy/annotation/binaryop.py
--- a/pypy/annotation/binaryop.py
+++ b/pypy/annotation/binaryop.py
@@ -7,7 +7,7 @@
 from pypy.tool.pairtype import pair, pairtype
 from pypy.annotation.model import SomeObject, SomeInteger, SomeBool, s_Bool
 from pypy.annotation.model import SomeString, SomeChar, SomeList, SomeDict
-from pypy.annotation.model import SomeUnicodeCodePoint
+from pypy.annotation.model import SomeUnicodeCodePoint, SomeStringOrUnicode
 from pypy.annotation.model import SomeTuple, SomeImpossibleValue, s_ImpossibleValue
 from pypy.annotation.model import SomeInstance, SomeBuiltin, SomeIterator
 from pypy.annotation.model import SomePBC, SomeFloat, s_None
@@ -470,30 +470,37 @@
             "string formatting mixing strings and unicode not supported")
 
 
-class __extend__(pairtype(SomeString, SomeTuple)):
-    def mod((str, s_tuple)):
+class __extend__(pairtype(SomeString, SomeTuple),
+                 pairtype(SomeUnicodeString, SomeTuple)):
+    def mod((s_string, s_tuple)):
+        is_string = isinstance(s_string, SomeString)
+        is_unicode = isinstance(s_string, SomeUnicodeString)
+        assert is_string or is_unicode
         for s_item in s_tuple.items:
-            if isinstance(s_item, (SomeUnicodeCodePoint, SomeUnicodeString)):
+            if (is_unicode and isinstance(s_item, (SomeChar, SomeString)) or
+                is_string and isinstance(s_item, (SomeUnicodeCodePoint,
+                                                  SomeUnicodeString))):
                 raise NotImplementedError(
                     "string formatting mixing strings and unicode not supported")
-        getbookkeeper().count('strformat', str, s_tuple)
-        no_nul = str.no_nul
+        getbookkeeper().count('strformat', s_string, s_tuple)
+        no_nul = s_string.no_nul
         for s_item in s_tuple.items:
             if isinstance(s_item, SomeFloat):
                 pass   # or s_item is a subclass, like SomeInteger
-            elif isinstance(s_item, SomeString) and s_item.no_nul:
+            elif isinstance(s_item, SomeStringOrUnicode) and s_item.no_nul:
                 pass
             else:
                 no_nul = False
                 break
-        return SomeString(no_nul=no_nul)
+        return s_string.__class__(no_nul=no_nul)
 
 
-class __extend__(pairtype(SomeString, SomeObject)):
+class __extend__(pairtype(SomeString, SomeObject),
+                 pairtype(SomeUnicodeString, SomeObject)):
 
-    def mod((str, args)):
-        getbookkeeper().count('strformat', str, args)
-        return SomeString()
+    def mod((s_string, args)):
+        getbookkeeper().count('strformat', s_string, args)
+        return s_string.__class__()
 
 class __extend__(pairtype(SomeFloat, SomeFloat)):
     
diff --git a/pypy/annotation/bookkeeper.py b/pypy/annotation/bookkeeper.py
--- a/pypy/annotation/bookkeeper.py
+++ b/pypy/annotation/bookkeeper.py
@@ -201,6 +201,7 @@
                     for op in block.operations:
                         if op.opname in ('simple_call', 'call_args'):
                             yield op
+
                         # some blocks are partially annotated
                         if binding(op.result, None) is None:
                             break   # ignore the unannotated part
diff --git a/pypy/annotation/test/test_annrpython.py b/pypy/annotation/test/test_annrpython.py
--- a/pypy/annotation/test/test_annrpython.py
+++ b/pypy/annotation/test/test_annrpython.py
@@ -3389,6 +3389,22 @@
         s = a.build_types(f, [str])
         assert isinstance(s, annmodel.SomeString)
 
+    def test_unicodeformatting(self):
+        def f(x):
+            return u'%s' % x
+
+        a = self.RPythonAnnotator()
+        s = a.build_types(f, [unicode])
+        assert isinstance(s, annmodel.SomeUnicodeString)
+
+    def test_unicodeformatting_tuple(self):
+        def f(x):
+            return u'%s' % (x,)
+
+        a = self.RPythonAnnotator()
+        s = a.build_types(f, [unicode])
+        assert isinstance(s, annmodel.SomeUnicodeString)
+
 
     def test_negative_slice(self):
         def f(s, e):
@@ -3793,7 +3809,37 @@
         assert isinstance(s, annmodel.SomeString)
         assert s.no_nul
 
-
+    def test_base_iter(self):
+        class A(object):
+            def __iter__(self):
+                return self
+        
+        def fn():
+            return iter(A())
+
+        a = self.RPythonAnnotator()
+        s = a.build_types(fn, [])
+        assert isinstance(s, annmodel.SomeInstance)
+        assert s.classdef.name.endswith('.A')
+
+    def test_iter_next(self):
+        class A(object):
+            def __iter__(self):
+                return self
+
+            def next(self):
+                return 1
+        
+        def fn():
+            s = 0
+            for x in A():
+                s += x
+            return s
+
+        a = self.RPythonAnnotator()
+        s = a.build_types(fn, [])
+        assert len(a.translator.graphs) == 3 # fn, __iter__, next
+        assert isinstance(s, annmodel.SomeInteger)
 
 def g(n):
     return [0,1,2,n]
diff --git a/pypy/annotation/unaryop.py b/pypy/annotation/unaryop.py
--- a/pypy/annotation/unaryop.py
+++ b/pypy/annotation/unaryop.py
@@ -609,33 +609,36 @@
 
 class __extend__(SomeInstance):
 
+    def _true_getattr(ins, attr):
+        if attr == '__class__':
+            return ins.classdef.read_attr__class__()
+        attrdef = ins.classdef.find_attribute(attr)
+        position = getbookkeeper().position_key
+        attrdef.read_locations[position] = True
+        s_result = attrdef.getvalue()
+        # hack: if s_result is a set of methods, discard the ones
+        #       that can't possibly apply to an instance of ins.classdef.
+        # XXX do it more nicely
+        if isinstance(s_result, SomePBC):
+            s_result = ins.classdef.lookup_filter(s_result, attr,
+                                                  ins.flags)
+        elif isinstance(s_result, SomeImpossibleValue):
+            ins.classdef.check_missing_attribute_update(attr)
+            # blocking is harmless if the attribute is explicitly listed
+            # in the class or a parent class.
+            for basedef in ins.classdef.getmro():
+                if basedef.classdesc.all_enforced_attrs is not None:
+                    if attr in basedef.classdesc.all_enforced_attrs:
+                        raise HarmlesslyBlocked("get enforced attr")
+        elif isinstance(s_result, SomeList):
+            s_result = ins.classdef.classdesc.maybe_return_immutable_list(
+                attr, s_result)
+        return s_result
+
     def getattr(ins, s_attr):
         if s_attr.is_constant() and isinstance(s_attr.const, str):
             attr = s_attr.const
-            if attr == '__class__':
-                return ins.classdef.read_attr__class__()
-            attrdef = ins.classdef.find_attribute(attr)
-            position = getbookkeeper().position_key
-            attrdef.read_locations[position] = True
-            s_result = attrdef.getvalue()
-            # hack: if s_result is a set of methods, discard the ones
-            #       that can't possibly apply to an instance of ins.classdef.
-            # XXX do it more nicely
-            if isinstance(s_result, SomePBC):
-                s_result = ins.classdef.lookup_filter(s_result, attr,
-                                                      ins.flags)
-            elif isinstance(s_result, SomeImpossibleValue):
-                ins.classdef.check_missing_attribute_update(attr)
-                # blocking is harmless if the attribute is explicitly listed
-                # in the class or a parent class.
-                for basedef in ins.classdef.getmro():
-                    if basedef.classdesc.all_enforced_attrs is not None:
-                        if attr in basedef.classdesc.all_enforced_attrs:
-                            raise HarmlesslyBlocked("get enforced attr")
-            elif isinstance(s_result, SomeList):
-                s_result = ins.classdef.classdesc.maybe_return_immutable_list(
-                    attr, s_result)
-            return s_result
+            return ins._true_getattr(attr)
         return SomeObject()
     getattr.can_only_throw = []
 
@@ -657,6 +660,19 @@
         if not ins.can_be_None:
             s.const = True
 
+    def iter(ins):
+        s_iterable = ins._true_getattr('__iter__')
+        bk = getbookkeeper()
+        # record for calltables
+        bk.emulate_pbc_call(bk.position_key, s_iterable, [])
+        return s_iterable.call(bk.build_args("simple_call", []))
+
+    def next(ins):
+        s_next = ins._true_getattr('next')
+        bk = getbookkeeper()
+        # record for calltables
+        bk.emulate_pbc_call(bk.position_key, s_next, [])
+        return s_next.call(bk.build_args("simple_call", []))
 
 class __extend__(SomeBuiltin):
     def _can_only_throw(bltn, *args):
diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
--- a/pypy/config/pypyoption.py
+++ b/pypy/config/pypyoption.py
@@ -41,6 +41,7 @@
 translation_modules.update(dict.fromkeys(
     ["fcntl", "rctime", "select", "signal", "_rawffi", "zlib",
      "struct", "_md5", "cStringIO", "array", "_ffi",
+     "binascii",
      # the following are needed for pyrepl (and hence for the
      # interactive prompt/pdb)
      "termios", "_minimal_curses",
diff --git a/pypy/doc/coding-guide.rst b/pypy/doc/coding-guide.rst
--- a/pypy/doc/coding-guide.rst
+++ b/pypy/doc/coding-guide.rst
@@ -255,7 +255,12 @@
   code if the translator can prove that they are non-negative.  When
   slicing a string it is necessary to prove that the slice start and
   stop indexes are non-negative. There is no implicit str-to-unicode cast
-  anywhere.
+  anywhere. Simple string formatting using the ``%`` operator works, as long
+  as the format string is known at translation time; the only supported
+  formatting specifiers are ``%s``, ``%d``, ``%x``, ``%o``, ``%f``, plus
+  ``%r`` but only for user-defined instances. Modifiers such as conversion
+  flags, precision, length etc. are not supported. Moreover, it is forbidden
+  to mix unicode and strings when formatting.
 
 **tuples**
 
@@ -341,8 +346,8 @@
 
 **objects**
 
-  Normal rules apply. Special methods are not honoured, except ``__init__`` and
-  ``__del__``.
+  Normal rules apply. Special methods are not honoured, except ``__init__``,
+  ``__del__`` and ``__iter__``.
 
 This layout makes the number of types to take care about quite limited.
 
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -14,5 +14,11 @@
 .. branch: nupypy-axis-arg-check
 Check that axis arg is valid in _numpypy
 
+.. branch: iterator-in-rpython
+.. branch: numpypy_count_nonzero
+.. branch: even-more-jit-hooks
+
+
 .. "uninteresting" branches that we should just ignore for the whatsnew:
 .. branch: slightly-shorter-c
+.. branch: better-enforceargs
diff --git a/pypy/jit/backend/llgraph/runner.py b/pypy/jit/backend/llgraph/runner.py
--- a/pypy/jit/backend/llgraph/runner.py
+++ b/pypy/jit/backend/llgraph/runner.py
@@ -4,6 +4,7 @@
 
 from pypy.rlib.unroll import unrolling_iterable
 from pypy.rlib.objectmodel import we_are_translated
+from pypy.rlib.jit_hooks import LOOP_RUN_CONTAINER
 from pypy.rpython.lltypesystem import lltype, llmemory, rclass
 from pypy.rpython.ootypesystem import ootype
 from pypy.rpython.llinterp import LLInterpreter
@@ -33,6 +34,10 @@
         self.arg_types = arg_types
         self.count_fields_if_immut = count_fields_if_immut
         self.ffi_flags = ffi_flags
+        self._debug = False
+
+    def set_debug(self, v):
+        self._debug = True
 
     def get_arg_types(self):
         return self.arg_types
@@ -583,6 +588,9 @@
             for x in args_f:
                 llimpl.do_call_pushfloat(x)
 
+    def get_all_loop_runs(self):
+        return lltype.malloc(LOOP_RUN_CONTAINER, 0)
+
     def force(self, force_token):
         token = llmemory.cast_int_to_adr(force_token)
         frame = llimpl.get_forced_token_frame(token)
diff --git a/pypy/jit/backend/model.py b/pypy/jit/backend/model.py
--- a/pypy/jit/backend/model.py
+++ b/pypy/jit/backend/model.py
@@ -55,6 +55,21 @@
         """Called once by the front-end when the program stops."""
         pass
 
+    def get_all_loop_runs(self):
+        """ Function that will return number of times all the loops were run.
+        Requires earlier setting of set_debug(True), otherwise you won't
+        get the information.
+
+        Returns an instance of LOOP_RUN_CONTAINER from rlib.jit_hooks
+        """
+        raise NotImplementedError
+
+    def set_debug(self, value):
+        """ Enable or disable debugging info. Does nothing by default. Returns
+        the previous setting.
+        """
+        return False
+
     def compile_loop(self, inputargs, operations, looptoken, log=True, name=''):
         """Assemble the given loop.
         Should create and attach a fresh CompiledLoopToken to
diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py
--- a/pypy/jit/backend/x86/assembler.py
+++ b/pypy/jit/backend/x86/assembler.py
@@ -101,7 +101,9 @@
                                       llmemory.cast_ptr_to_adr(ptrs))
 
     def set_debug(self, v):
+        r = self._debug
         self._debug = v
+        return r
 
     def setup_once(self):
         # the address of the function called by 'new'
@@ -750,7 +752,6 @@
     @specialize.argtype(1)
     def _inject_debugging_code(self, looptoken, operations, tp, number):
         if self._debug:
-            # before doing anything, let's increase a counter
             s = 0
             for op in operations:
                 s += op.getopnum()
diff --git a/pypy/jit/backend/x86/runner.py b/pypy/jit/backend/x86/runner.py
--- a/pypy/jit/backend/x86/runner.py
+++ b/pypy/jit/backend/x86/runner.py
@@ -3,6 +3,7 @@
 from pypy.rpython.lltypesystem.lloperation import llop
 from pypy.rpython.llinterp import LLInterpreter
 from pypy.rlib.objectmodel import we_are_translated
+from pypy.rlib.jit_hooks import LOOP_RUN_CONTAINER
 from pypy.jit.codewriter import longlong
 from pypy.jit.metainterp import history, compile
 from pypy.jit.backend.x86.assembler import Assembler386
@@ -44,6 +45,9 @@
 
         self.profile_agent = profile_agent
 
+    def set_debug(self, flag):
+        return self.assembler.set_debug(flag)
+
     def setup(self):
         if self.opts is not None:
             failargs_limit = self.opts.failargs_limit
@@ -181,6 +185,14 @@
         # positions invalidated
         looptoken.compiled_loop_token.invalidate_positions = []
 
+    def get_all_loop_runs(self):
+        l = lltype.malloc(LOOP_RUN_CONTAINER,
+                          len(self.assembler.loop_run_counters))
+        for i, ll_s in enumerate(self.assembler.loop_run_counters):
+            l[i].type = ll_s.type
+            l[i].number = ll_s.number
+            l[i].counter = ll_s.i
+        return l
 
 class CPU386(AbstractX86CPU):
     backend_name = 'x86'
diff --git a/pypy/jit/backend/x86/test/test_ztranslation.py b/pypy/jit/backend/x86/test/test_ztranslation.py
--- a/pypy/jit/backend/x86/test/test_ztranslation.py
+++ b/pypy/jit/backend/x86/test/test_ztranslation.py
@@ -3,6 +3,7 @@
 from pypy.rlib.jit import JitDriver, unroll_parameters, set_param
 from pypy.rlib.jit import PARAMETERS, dont_look_inside
 from pypy.rlib.jit import promote
+from pypy.rlib import jit_hooks
 from pypy.jit.metainterp.jitprof import Profiler
 from pypy.jit.backend.detect_cpu import getcpuclass
 from pypy.jit.backend.test.support import CCompiledMixin
@@ -170,6 +171,22 @@
         assert 1024 <= bound <= 131072
         assert bound & (bound-1) == 0       # a power of two
 
+    def test_jit_get_stats(self):
+        driver = JitDriver(greens = [], reds = ['i'])
+        
+        def f():
+            i = 0
+            while i < 100000:
+                driver.jit_merge_point(i=i)
+                i += 1
+
+        def main():
+            f()
+            ll_times = jit_hooks.stats_get_loop_run_times(None)
+            return len(ll_times)
+
+        res = self.meta_interp(main, [])
+        assert res == 1
 
 class TestTranslationRemoveTypePtrX86(CCompiledMixin):
     CPUClass = getcpuclass()
diff --git a/pypy/jit/metainterp/compile.py b/pypy/jit/metainterp/compile.py
--- a/pypy/jit/metainterp/compile.py
+++ b/pypy/jit/metainterp/compile.py
@@ -5,7 +5,7 @@
 from pypy.rlib.objectmodel import we_are_translated
 from pypy.rlib.debug import debug_start, debug_stop, debug_print
 from pypy.rlib import rstack
-from pypy.rlib.jit import JitDebugInfo
+from pypy.rlib.jit import JitDebugInfo, Counters
 from pypy.conftest import option
 from pypy.tool.sourcetools import func_with_new_name
 
@@ -22,8 +22,7 @@
 
 def giveup():
     from pypy.jit.metainterp.pyjitpl import SwitchToBlackhole
-    from pypy.jit.metainterp.jitprof import ABORT_BRIDGE
-    raise SwitchToBlackhole(ABORT_BRIDGE)
+    raise SwitchToBlackhole(Counters.ABORT_BRIDGE)
 
 def show_procedures(metainterp_sd, procedure=None, error=None):
     # debugging
@@ -226,6 +225,8 @@
     assert isinstance(target_token, TargetToken)
     assert loop_jitcell_token.target_tokens
     loop_jitcell_token.target_tokens.append(target_token)
+    if target_token.short_preamble:
+        metainterp_sd.logger_ops.log_short_preamble([], target_token.short_preamble)
 
     loop = partial_trace
     loop.operations = loop.operations[:-1] + part.operations
diff --git a/pypy/jit/metainterp/history.py b/pypy/jit/metainterp/history.py
--- a/pypy/jit/metainterp/history.py
+++ b/pypy/jit/metainterp/history.py
@@ -706,6 +706,7 @@
 
         self.virtual_state = None
         self.exported_state = None
+        self.short_preamble = None
 
     def repr_of_descr(self):
         return 'TargetToken(%d)' % compute_unique_id(self)
diff --git a/pypy/jit/metainterp/jitprof.py b/pypy/jit/metainterp/jitprof.py
--- a/pypy/jit/metainterp/jitprof.py
+++ b/pypy/jit/metainterp/jitprof.py
@@ -6,42 +6,11 @@
 from pypy.rlib.debug import debug_print, debug_start, debug_stop
 from pypy.rlib.debug import have_debug_prints
 from pypy.jit.metainterp.jitexc import JitException
+from pypy.rlib.jit import Counters
 
-counters="""
-TRACING
-BACKEND
-OPS
-RECORDED_OPS
-GUARDS
-OPT_OPS
-OPT_GUARDS
-OPT_FORCINGS
-ABORT_TOO_LONG
-ABORT_BRIDGE
-ABORT_BAD_LOOP
-ABORT_ESCAPE
-ABORT_FORCE_QUASIIMMUT
-NVIRTUALS
-NVHOLES
-NVREUSED
-TOTAL_COMPILED_LOOPS
-TOTAL_COMPILED_BRIDGES
-TOTAL_FREED_LOOPS
-TOTAL_FREED_BRIDGES
-"""
 
-counter_names = []
-
-def _setup():
-    names = counters.split()
-    for i, name in enumerate(names):
-        globals()[name] = i
-        counter_names.append(name)
-    global ncounters
-    ncounters = len(names)
-_setup()
-
-JITPROF_LINES = ncounters + 1 + 1 # one for TOTAL, 1 for calls, update if needed
+JITPROF_LINES = Counters.ncounters + 1 + 1
+# one for TOTAL, 1 for calls, update if needed
 _CPU_LINES = 4       # the last 4 lines are stored on the cpu
 
 class BaseProfiler(object):
@@ -71,9 +40,12 @@
     def count(self, kind, inc=1):
         pass
 
-    def count_ops(self, opnum, kind=OPS):
+    def count_ops(self, opnum, kind=Counters.OPS):
         pass
 
+    def get_counter(self, num):
+        return -1.0
+
 class Profiler(BaseProfiler):
     initialized = False
     timer = time.time
@@ -89,7 +61,7 @@
         self.starttime = self.timer()
         self.t1 = self.starttime
         self.times = [0, 0]
-        self.counters = [0] * (ncounters - _CPU_LINES)
+        self.counters = [0] * (Counters.ncounters - _CPU_LINES)
         self.calls = 0
         self.current = []
 
@@ -117,19 +89,30 @@
             return
         self.times[ev1] += self.t1 - t0
 
-    def start_tracing(self):   self._start(TRACING)
-    def end_tracing(self):     self._end  (TRACING)
+    def start_tracing(self):   self._start(Counters.TRACING)
+    def end_tracing(self):     self._end  (Counters.TRACING)
 
-    def start_backend(self):   self._start(BACKEND)
-    def end_backend(self):     self._end  (BACKEND)
+    def start_backend(self):   self._start(Counters.BACKEND)
+    def end_backend(self):     self._end  (Counters.BACKEND)
 
     def count(self, kind, inc=1):
         self.counters[kind] += inc        
-    
-    def count_ops(self, opnum, kind=OPS):
+
+    def get_counter(self, num):
+        if num == Counters.TOTAL_COMPILED_LOOPS:
+            return self.cpu.total_compiled_loops
+        elif num == Counters.TOTAL_COMPILED_BRIDGES:
+            return self.cpu.total_compiled_bridges
+        elif num == Counters.TOTAL_FREED_LOOPS:
+            return self.cpu.total_freed_loops
+        elif num == Counters.TOTAL_FREED_BRIDGES:
+            return self.cpu.total_freed_bridges
+        return self.counters[num]
+
+    def count_ops(self, opnum, kind=Counters.OPS):
         from pypy.jit.metainterp.resoperation import rop
         self.counters[kind] += 1
-        if opnum == rop.CALL and kind == RECORDED_OPS:# or opnum == rop.OOSEND:
+        if opnum == rop.CALL and kind == Counters.RECORDED_OPS:# or opnum == rop.OOSEND:
             self.calls += 1
 
     def print_stats(self):
@@ -142,26 +125,29 @@
         cnt = self.counters
         tim = self.times
         calls = self.calls
-        self._print_line_time("Tracing", cnt[TRACING],   tim[TRACING])
-        self._print_line_time("Backend", cnt[BACKEND],   tim[BACKEND])
+        self._print_line_time("Tracing", cnt[Counters.TRACING],
+                              tim[Counters.TRACING])
+        self._print_line_time("Backend", cnt[Counters.BACKEND],
+                              tim[Counters.BACKEND])
         line = "TOTAL:      \t\t%f" % (self.tk - self.starttime, )
         debug_print(line)
-        self._print_intline("ops", cnt[OPS])
-        self._print_intline("recorded ops", cnt[RECORDED_OPS])
+        self._print_intline("ops", cnt[Counters.OPS])
+        self._print_intline("recorded ops", cnt[Counters.RECORDED_OPS])
         self._print_intline("  calls", calls)
-        self._print_intline("guards", cnt[GUARDS])
-        self._print_intline("opt ops", cnt[OPT_OPS])
-        self._print_intline("opt guards", cnt[OPT_GUARDS])
-        self._print_intline("forcings", cnt[OPT_FORCINGS])
-        self._print_intline("abort: trace too long", cnt[ABORT_TOO_LONG])
-        self._print_intline("abort: compiling", cnt[ABORT_BRIDGE])
-        self._print_intline("abort: vable escape", cnt[ABORT_ESCAPE])
-        self._print_intline("abort: bad loop", cnt[ABORT_BAD_LOOP])
+        self._print_intline("guards", cnt[Counters.GUARDS])
+        self._print_intline("opt ops", cnt[Counters.OPT_OPS])
+        self._print_intline("opt guards", cnt[Counters.OPT_GUARDS])
+        self._print_intline("forcings", cnt[Counters.OPT_FORCINGS])
+        self._print_intline("abort: trace too long",
+                            cnt[Counters.ABORT_TOO_LONG])
+        self._print_intline("abort: compiling", cnt[Counters.ABORT_BRIDGE])
+        self._print_intline("abort: vable escape", cnt[Counters.ABORT_ESCAPE])
+        self._print_intline("abort: bad loop", cnt[Counters.ABORT_BAD_LOOP])
         self._print_intline("abort: force quasi-immut",
-                                               cnt[ABORT_FORCE_QUASIIMMUT])
-        self._print_intline("nvirtuals", cnt[NVIRTUALS])
-        self._print_intline("nvholes", cnt[NVHOLES])
-        self._print_intline("nvreused", cnt[NVREUSED])
+                            cnt[Counters.ABORT_FORCE_QUASIIMMUT])
+        self._print_intline("nvirtuals", cnt[Counters.NVIRTUALS])
+        self._print_intline("nvholes", cnt[Counters.NVHOLES])
+        self._print_intline("nvreused", cnt[Counters.NVREUSED])
         cpu = self.cpu
         if cpu is not None:   # for some tests
             self._print_intline("Total # of loops",
diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py
--- a/pypy/jit/metainterp/optimizeopt/optimizer.py
+++ b/pypy/jit/metainterp/optimizeopt/optimizer.py
@@ -401,7 +401,7 @@
             o.turned_constant(value)
 
     def forget_numberings(self, virtualbox):
-        self.metainterp_sd.profiler.count(jitprof.OPT_FORCINGS)
+        self.metainterp_sd.profiler.count(jitprof.Counters.OPT_FORCINGS)
         self.resumedata_memo.forget_numberings(virtualbox)
 
     def getinterned(self, box):
@@ -535,9 +535,9 @@
             else:
                 self.ensure_imported(value)
                 op.setarg(i, value.force_box(self))
-        self.metainterp_sd.profiler.count(jitprof.OPT_OPS)
+        self.metainterp_sd.profiler.count(jitprof.Counters.OPT_OPS)
         if op.is_guard():
-            self.metainterp_sd.profiler.count(jitprof.OPT_GUARDS)
+            self.metainterp_sd.profiler.count(jitprof.Counters.OPT_GUARDS)
             if self.replaces_guard and op in self.replaces_guard:
                 self.replace_op(self.replaces_guard[op], op)
                 del self.replaces_guard[op]
diff --git a/pypy/jit/metainterp/optimizeopt/rewrite.py b/pypy/jit/metainterp/optimizeopt/rewrite.py
--- a/pypy/jit/metainterp/optimizeopt/rewrite.py
+++ b/pypy/jit/metainterp/optimizeopt/rewrite.py
@@ -241,6 +241,16 @@
             # guard_nonnull_class on this value, which is rather silly.
             # replace the original guard with a guard_value
             old_guard_op = value.last_guard
+            if old_guard_op.getopnum() != rop.GUARD_NONNULL:
+                # This is only safe if the class of the guard_value matches the
+                # class of the guard_*_class, otherwise the intermediate ops might
+                # be executed with wrong classes.
+                previous_classbox = value.get_constant_class(self.optimizer.cpu)            
+                expected_classbox = self.optimizer.cpu.ts.cls_of_box(op.getarg(1))
+                assert previous_classbox is not None
+                assert expected_classbox is not None
+                if not previous_classbox.same_constant(expected_classbox):
+                    raise InvalidLoop('A GUARD_VALUE was proven to always fail')
             op = old_guard_op.copy_and_change(rop.GUARD_VALUE,
                                       args = [old_guard_op.getarg(0), op.getarg(1)])
             self.optimizer.replaces_guard[op] = old_guard_op
@@ -251,6 +261,8 @@
             assert isinstance(descr, compile.ResumeGuardDescr)
             descr.guard_opnum = rop.GUARD_VALUE
             descr.make_a_counter_per_value(op)
+            # to be safe
+            value.last_guard = None
         constbox = op.getarg(1)
         assert isinstance(constbox, Const)
         self.optimize_guard(op, constbox)
diff --git a/pypy/jit/metainterp/optimizeopt/test/test_optimizeopt.py b/pypy/jit/metainterp/optimizeopt/test/test_optimizeopt.py
--- a/pypy/jit/metainterp/optimizeopt/test/test_optimizeopt.py
+++ b/pypy/jit/metainterp/optimizeopt/test/test_optimizeopt.py
@@ -7862,6 +7862,17 @@
         """
         self.optimize_loop(ops, expected)
 
+    def test_only_strengthen_guard_if_class_matches(self):
+        ops = """
+        [p1]
+        guard_class(p1, ConstClass(node_vtable2)) []
+        guard_value(p1, ConstPtr(myptr)) []
+        jump(p1)
+        """
+        self.raises(InvalidLoop, self.optimize_loop,
+                       ops, ops)
+
+
 class TestLLtype(OptimizeOptTest, LLtypeMixin):
     pass
 
diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py
--- a/pypy/jit/metainterp/optimizeopt/unroll.py
+++ b/pypy/jit/metainterp/optimizeopt/unroll.py
@@ -120,9 +120,9 @@
                 limit = self.optimizer.metainterp_sd.warmrunnerdesc.memory_manager.retrace_limit
                 if cell_token.retraced_count < limit:
                     cell_token.retraced_count += 1
-                    #debug_print('Retracing (%d/%d)' % (cell_token.retraced_count, limit))
+                    debug_print('Retracing (%d/%d)' % (cell_token.retraced_count, limit))
                 else:
-                    #debug_print("Retrace count reached, jumping to preamble")
+                    debug_print("Retrace count reached, jumping to preamble")
                     assert cell_token.target_tokens[0].virtual_state is None
                     jumpop.setdescr(cell_token.target_tokens[0])
                     self.optimizer.send_extra_operation(jumpop)
diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py
--- a/pypy/jit/metainterp/pyjitpl.py
+++ b/pypy/jit/metainterp/pyjitpl.py
@@ -13,9 +13,7 @@
 from pypy.jit.metainterp import executor
 from pypy.jit.metainterp.logger import Logger
 from pypy.jit.metainterp.jitprof import EmptyProfiler
-from pypy.jit.metainterp.jitprof import GUARDS, RECORDED_OPS, ABORT_ESCAPE
-from pypy.jit.metainterp.jitprof import ABORT_TOO_LONG, ABORT_BRIDGE, \
-                                        ABORT_FORCE_QUASIIMMUT, ABORT_BAD_LOOP
+from pypy.rlib.jit import Counters
 from pypy.jit.metainterp.jitexc import JitException, get_llexception
 from pypy.jit.metainterp.heapcache import HeapCache
 from pypy.rlib.objectmodel import specialize
@@ -675,7 +673,7 @@
             from pypy.jit.metainterp.quasiimmut import do_force_quasi_immutable
             do_force_quasi_immutable(self.metainterp.cpu, box.getref_base(),
                                      mutatefielddescr)
-            raise SwitchToBlackhole(ABORT_FORCE_QUASIIMMUT)
+            raise SwitchToBlackhole(Counters.ABORT_FORCE_QUASIIMMUT)
         self.generate_guard(rop.GUARD_ISNULL, mutatebox, resumepc=orgpc)
 
     def _nonstandard_virtualizable(self, pc, box):
@@ -1255,7 +1253,7 @@
         guard_op = metainterp.history.record(opnum, moreargs, None,
                                              descr=resumedescr)
         self.capture_resumedata(resumedescr, resumepc)
-        self.metainterp.staticdata.profiler.count_ops(opnum, GUARDS)
+        self.metainterp.staticdata.profiler.count_ops(opnum, Counters.GUARDS)
         # count
         metainterp.attach_debug_info(guard_op)
         return guard_op
@@ -1776,7 +1774,7 @@
             return resbox.constbox()
         # record the operation
         profiler = self.staticdata.profiler
-        profiler.count_ops(opnum, RECORDED_OPS)
+        profiler.count_ops(opnum, Counters.RECORDED_OPS)
         self.heapcache.invalidate_caches(opnum, descr, argboxes)
         op = self.history.record(opnum, argboxes, resbox, descr)
         self.attach_debug_info(op)
@@ -1837,7 +1835,7 @@
             if greenkey_of_huge_function is not None:
                 warmrunnerstate.disable_noninlinable_function(
                     greenkey_of_huge_function)
-            raise SwitchToBlackhole(ABORT_TOO_LONG)
+            raise SwitchToBlackhole(Counters.ABORT_TOO_LONG)
 
     def _interpret(self):
         # Execute the frames forward until we raise a DoneWithThisFrame,
@@ -1921,7 +1919,7 @@
         try:
             self.prepare_resume_from_failure(key.guard_opnum, dont_change_position)
             if self.resumekey_original_loop_token is None:   # very rare case
-                raise SwitchToBlackhole(ABORT_BRIDGE)
+                raise SwitchToBlackhole(Counters.ABORT_BRIDGE)
             self.interpret()
         except SwitchToBlackhole, stb:
             self.run_blackhole_interp_to_cancel_tracing(stb)
@@ -1996,7 +1994,7 @@
                 # raises in case it works -- which is the common case
                 if self.partial_trace:
                     if  start != self.retracing_from:
-                        raise SwitchToBlackhole(ABORT_BAD_LOOP) # For now
+                        raise SwitchToBlackhole(Counters.ABORT_BAD_LOOP) # For now
                 self.compile_loop(original_boxes, live_arg_boxes, start, resumedescr)
                 # creation of the loop was cancelled!
                 self.cancel_count += 1
@@ -2005,7 +2003,7 @@
                     if memmgr:
                         if self.cancel_count > memmgr.max_unroll_loops:
                             self.staticdata.log('cancelled too many times!')
-                            raise SwitchToBlackhole(ABORT_BAD_LOOP)
+                            raise SwitchToBlackhole(Counters.ABORT_BAD_LOOP)
                 self.staticdata.log('cancelled, tracing more...')
 
         # Otherwise, no loop found so far, so continue tracing.
@@ -2299,7 +2297,8 @@
             if vinfo.tracing_after_residual_call(virtualizable):
                 # the virtualizable escaped during CALL_MAY_FORCE.
                 self.load_fields_from_virtualizable()
-                raise SwitchToBlackhole(ABORT_ESCAPE, raising_exception=True)
+                raise SwitchToBlackhole(Counters.ABORT_ESCAPE,
+                                        raising_exception=True)
                 # ^^^ we set 'raising_exception' to True because we must still
                 # have the eventual exception raised (this is normally done
                 # after the call to vable_after_residual_call()).
diff --git a/pypy/jit/metainterp/resume.py b/pypy/jit/metainterp/resume.py
--- a/pypy/jit/metainterp/resume.py
+++ b/pypy/jit/metainterp/resume.py
@@ -254,9 +254,9 @@
         self.cached_virtuals.clear()
 
     def update_counters(self, profiler):
-        profiler.count(jitprof.NVIRTUALS, self.nvirtuals)
-        profiler.count(jitprof.NVHOLES, self.nvholes)
-        profiler.count(jitprof.NVREUSED, self.nvreused)
+        profiler.count(jitprof.Counters.NVIRTUALS, self.nvirtuals)
+        profiler.count(jitprof.Counters.NVHOLES, self.nvholes)
+        profiler.count(jitprof.Counters.NVREUSED, self.nvreused)
 
 _frame_info_placeholder = (None, 0, 0)
 
diff --git a/pypy/jit/metainterp/test/test_jitiface.py b/pypy/jit/metainterp/test/test_jitiface.py
--- a/pypy/jit/metainterp/test/test_jitiface.py
+++ b/pypy/jit/metainterp/test/test_jitiface.py
@@ -1,13 +1,15 @@
 
-from pypy.rlib.jit import JitDriver, JitHookInterface
+from pypy.rlib.jit import JitDriver, JitHookInterface, Counters
 from pypy.rlib import jit_hooks
 from pypy.jit.metainterp.test.support import LLJitMixin
 from pypy.jit.codewriter.policy import JitPolicy
-from pypy.jit.metainterp.jitprof import ABORT_FORCE_QUASIIMMUT
 from pypy.jit.metainterp.resoperation import rop
 from pypy.rpython.annlowlevel import hlstr
+from pypy.jit.metainterp.jitprof import Profiler
 
-class TestJitHookInterface(LLJitMixin):
+class JitHookInterfaceTests(object):
+    # !!!note!!! - don't subclass this from the backend. Subclass the LL
+    # class later instead
     def test_abort_quasi_immut(self):
         reasons = []
         
@@ -41,7 +43,7 @@
         assert f(100, 7) == 721
         res = self.meta_interp(f, [100, 7], policy=JitPolicy(iface))
         assert res == 721
-        assert reasons == [ABORT_FORCE_QUASIIMMUT] * 2
+        assert reasons == [Counters.ABORT_FORCE_QUASIIMMUT] * 2
 
     def test_on_compile(self):
         called = []
@@ -146,3 +148,74 @@
             assert jit_hooks.resop_getresult(op) == box5
 
         self.meta_interp(main, [])
+
+    def test_get_stats(self):
+        driver = JitDriver(greens = [], reds = ['i', 's'])
+
+        def loop(i):
+            s = 0
+            while i > 0:
+                driver.jit_merge_point(i=i, s=s)
+                if i % 2:
+                    s += 1
+                i -= 1
+                s+= 2
+            return s
+
+        def main():
+            loop(30)
+            assert jit_hooks.stats_get_counter_value(None,
+                                           Counters.TOTAL_COMPILED_LOOPS) == 1
+            assert jit_hooks.stats_get_counter_value(None,
+                                           Counters.TOTAL_COMPILED_BRIDGES) == 1
+            assert jit_hooks.stats_get_counter_value(None,
+                                                     Counters.TRACING) == 2
+            assert jit_hooks.stats_get_times_value(None, Counters.TRACING) >= 0
+
+        self.meta_interp(main, [], ProfilerClass=Profiler)
+
+class LLJitHookInterfaceTests(JitHookInterfaceTests):
+    # use this for any backend, instead of the super class
+    
+    def test_ll_get_stats(self):
+        driver = JitDriver(greens = [], reds = ['i', 's'])
+
+        def loop(i):
+            s = 0
+            while i > 0:
+                driver.jit_merge_point(i=i, s=s)
+                if i % 2:
+                    s += 1
+                i -= 1
+                s+= 2
+            return s
+
+        def main(b):
+            jit_hooks.stats_set_debug(None, b)
+            loop(30)
+            l = jit_hooks.stats_get_loop_run_times(None)
+            if b:
+                assert len(l) == 4
+                # completely specific test that would fail each time
+                # we change anything major. for now it's 4
+                # (loop, bridge, 2 entry points)
+                assert l[0].type == 'e'
+                assert l[0].number == 0
+                assert l[0].counter == 4
+                assert l[1].type == 'l'
+                assert l[1].counter == 4
+                assert l[2].type == 'l'
+                assert l[2].counter == 23
+                assert l[3].type == 'b'
+                assert l[3].number == 4
+                assert l[3].counter == 11
+            else:
+                assert len(l) == 0
+        self.meta_interp(main, [True], ProfilerClass=Profiler)
+        # this so far does not work because of the way setup_once is done,
+        # but fine, it's only about untranslated version anyway
+        #self.meta_interp(main, [False], ProfilerClass=Profiler)
+        
+
+class TestJitHookInterface(JitHookInterfaceTests, LLJitMixin):
+    pass
diff --git a/pypy/jit/metainterp/test/test_jitprof.py b/pypy/jit/metainterp/test/test_jitprof.py
--- a/pypy/jit/metainterp/test/test_jitprof.py
+++ b/pypy/jit/metainterp/test/test_jitprof.py
@@ -1,9 +1,9 @@
 
 from pypy.jit.metainterp.warmspot import ll_meta_interp
-from pypy.rlib.jit import JitDriver, dont_look_inside, elidable
+from pypy.rlib.jit import JitDriver, dont_look_inside, elidable, Counters
 from pypy.jit.metainterp.test.support import LLJitMixin
 from pypy.jit.metainterp import pyjitpl
-from pypy.jit.metainterp.jitprof import *
+from pypy.jit.metainterp.jitprof import Profiler
 
 class FakeProfiler(Profiler):
     def start(self):
@@ -46,10 +46,10 @@
         assert res == 84
         profiler = pyjitpl._warmrunnerdesc.metainterp_sd.profiler
         expected = [
-            TRACING,
-            BACKEND,
-            ~ BACKEND,
-            ~ TRACING,
+            Counters.TRACING,
+            Counters.BACKEND,
+            ~ Counters.BACKEND,
+            ~ Counters.TRACING,
             ]
         assert profiler.events == expected
         assert profiler.times == [2, 1]
diff --git a/pypy/jit/metainterp/warmspot.py b/pypy/jit/metainterp/warmspot.py
--- a/pypy/jit/metainterp/warmspot.py
+++ b/pypy/jit/metainterp/warmspot.py
@@ -6,6 +6,7 @@
 from pypy.annotation import model as annmodel
 from pypy.rpython.llinterp import LLException
 from pypy.rpython.test.test_llinterp import get_interpreter, clear_tcache
+from pypy.rpython.annlowlevel import cast_instance_to_base_ptr
 from pypy.objspace.flow.model import SpaceOperation, Variable, Constant
 from pypy.objspace.flow.model import checkgraph, Link, copygraph
 from pypy.rlib.objectmodel import we_are_translated
@@ -221,7 +222,7 @@
         self.rewrite_access_helpers()
         self.codewriter.make_jitcodes(verbose=verbose)
         self.rewrite_can_enter_jits()
-        self.rewrite_set_param()
+        self.rewrite_set_param_and_get_stats()
         self.rewrite_force_virtual(vrefinfo)
         self.rewrite_force_quasi_immutable()
         self.add_finish()
@@ -632,14 +633,22 @@
             self.rewrite_access_helper(op)
 
     def rewrite_access_helper(self, op):
-        ARGS = [arg.concretetype for arg in op.args[2:]]
-        RESULT = op.result.concretetype
-        FUNCPTR = lltype.Ptr(lltype.FuncType(ARGS, RESULT))
         # make sure we make a copy of function so it no longer belongs
         # to extregistry
         func = op.args[1].value
-        func = func_with_new_name(func, func.func_name + '_compiled')
-        ptr = self.helper_func(FUNCPTR, func)
+        if func.func_name.startswith('stats_'):
+            # get special treatment since we rewrite it to a call that accepts
+            # jit driver
+            func = func_with_new_name(func, func.func_name + '_compiled')
+            def new_func(ignored, *args):
+                return func(self, *args)
+            ARGS = [lltype.Void] + [arg.concretetype for arg in op.args[3:]]
+        else:
+            ARGS = [arg.concretetype for arg in op.args[2:]]
+            new_func = func_with_new_name(func, func.func_name + '_compiled')
+        RESULT = op.result.concretetype
+        FUNCPTR = lltype.Ptr(lltype.FuncType(ARGS, RESULT))
+        ptr = self.helper_func(FUNCPTR, new_func)
         op.opname = 'direct_call'
         op.args = [Constant(ptr, FUNCPTR)] + op.args[2:]
 
@@ -859,7 +868,7 @@
             call_final_function(self.translator, finish,
                                 annhelper = self.annhelper)
 
-    def rewrite_set_param(self):
+    def rewrite_set_param_and_get_stats(self):
         from pypy.rpython.lltypesystem.rstr import STR
 
         closures = {}
diff --git a/pypy/module/pypyjit/__init__.py b/pypy/module/pypyjit/__init__.py
--- a/pypy/module/pypyjit/__init__.py
+++ b/pypy/module/pypyjit/__init__.py
@@ -10,8 +10,12 @@
         'set_compile_hook': 'interp_resop.set_compile_hook',
         'set_optimize_hook': 'interp_resop.set_optimize_hook',
         'set_abort_hook': 'interp_resop.set_abort_hook',
+        'get_stats_snapshot': 'interp_resop.get_stats_snapshot',
+        'enable_debug': 'interp_resop.enable_debug',
+        'disable_debug': 'interp_resop.disable_debug',
         'ResOperation': 'interp_resop.WrappedOp',
         'DebugMergePoint': 'interp_resop.DebugMergePoint',
+        'JitLoopInfo': 'interp_resop.W_JitLoopInfo',
         'Box': 'interp_resop.WrappedBox',
         'PARAMETER_DOCS': 'space.wrap(pypy.rlib.jit.PARAMETER_DOCS)',
     }
diff --git a/pypy/module/pypyjit/interp_resop.py b/pypy/module/pypyjit/interp_resop.py
--- a/pypy/module/pypyjit/interp_resop.py
+++ b/pypy/module/pypyjit/interp_resop.py
@@ -11,16 +11,23 @@
 from pypy.jit.metainterp.resoperation import rop, AbstractResOp
 from pypy.rlib.nonconst import NonConstant
 from pypy.rlib import jit_hooks
+from pypy.rlib.jit import Counters
+from pypy.rlib.rarithmetic import r_uint
 from pypy.module.pypyjit.interp_jit import pypyjitdriver
 
 class Cache(object):
     in_recursion = False
+    no = 0
 
     def __init__(self, space):
         self.w_compile_hook = space.w_None
         self.w_abort_hook = space.w_None
         self.w_optimize_hook = space.w_None
 
+    def getno(self):
+        self.no += 1
+        return self.no - 1
+
 def wrap_greenkey(space, jitdriver, greenkey, greenkey_repr):
     if greenkey is None:
         return space.w_None
@@ -40,23 +47,9 @@
     """ set_compile_hook(hook)
 
     Set a compiling hook that will be called each time a loop is compiled.
-    The hook will be called with the following signature:
-    hook(jitdriver_name, loop_type, greenkey or guard_number, operations,
-         assembler_addr, assembler_length)
 
-    jitdriver_name is the name of this particular jitdriver, 'pypyjit' is
-    the main interpreter loop
-
-    loop_type can be either `loop` `entry_bridge` or `bridge`
-    in case loop is not `bridge`, greenkey will be a tuple of constants
-    or a string describing it.
-
-    for the interpreter loop` it'll be a tuple
-    (code, offset, is_being_profiled)
-
-    assembler_addr is an integer describing where assembler starts,
-    can be accessed via ctypes, assembler_lenght is the lenght of compiled
-    asm
+    The hook will be called with the pypyjit.JitLoopInfo object. Refer to it's
+    docstring for details.
 
     Note that jit hook is not reentrant. It means that if the code
     inside the jit hook is itself jitted, it will get compiled, but the
@@ -73,22 +66,8 @@
     but before assembler compilation. This allows to add additional
     optimizations on Python level.
 
-    The hook will be called with the following signature:
-    hook(jitdriver_name, loop_type, greenkey or guard_number, operations)
-
-    jitdriver_name is the name of this particular jitdriver, 'pypyjit' is
-    the main interpreter loop
-
-    loop_type can be either `loop` `entry_bridge` or `bridge`
-    in case loop is not `bridge`, greenkey will be a tuple of constants
-    or a string describing it.
-
-    for the interpreter loop` it'll be a tuple
-    (code, offset, is_being_profiled)
-
-    Note that jit hook is not reentrant. It means that if the code
-    inside the jit hook is itself jitted, it will get compiled, but the
-    jit hook won't be called for that.
+    The hook will be called with the pypyjit.JitLoopInfo object. Refer to it's
+    docstring for details.
 
     Result value will be the resulting list of operations, or None
     """
@@ -209,6 +188,10 @@
         jit_hooks.resop_setresult(self.op, box.llbox)
 
 class DebugMergePoint(WrappedOp):
+    """ A class representing Debug Merge Point - the entry point
+    to a jitted loop.
+    """
+    
     def __init__(self, space, op, repr_of_resop, jd_name, call_depth, call_id,
         w_greenkey):
 
@@ -248,13 +231,149 @@
 DebugMergePoint.typedef = TypeDef(
     'DebugMergePoint', WrappedOp.typedef,
     __new__ = interp2app(descr_new_dmp),
-    greenkey = interp_attrproperty_w("w_greenkey", cls=DebugMergePoint),
+    __doc__ = DebugMergePoint.__doc__,
+    greenkey = interp_attrproperty_w("w_greenkey", cls=DebugMergePoint,
+               doc="Representation of place where the loop was compiled. "
+                    "In the case of the main interpreter loop, it's a triplet "
+                    "(code, ofs, is_profiled)"),
     pycode = GetSetProperty(DebugMergePoint.get_pycode),
-    bytecode_no = GetSetProperty(DebugMergePoint.get_bytecode_no),
-    call_depth = interp_attrproperty("call_depth", cls=DebugMergePoint),
-    call_id = interp_attrproperty("call_id", cls=DebugMergePoint),
-    jitdriver_name = GetSetProperty(DebugMergePoint.get_jitdriver_name),
+    bytecode_no = GetSetProperty(DebugMergePoint.get_bytecode_no,
+                                 doc="offset in the bytecode"),
+    call_depth = interp_attrproperty("call_depth", cls=DebugMergePoint,
+                                     doc="Depth of calls within this loop"),
+    call_id = interp_attrproperty("call_id", cls=DebugMergePoint,
+                     doc="Number of applevel function traced in this loop"),
+    jitdriver_name = GetSetProperty(DebugMergePoint.get_jitdriver_name,
+                     doc="Name of the jitdriver 'pypyjit' in the case "
+                                    "of the main interpreter loop"),
 )
 DebugMergePoint.acceptable_as_base_class = False
 
+class W_JitLoopInfo(Wrappable):
+    """ Loop debug information
+    """
+    
+    w_green_key = None
+    bridge_no   = 0
+    asmaddr     = 0
+    asmlen      = 0
+    
+    def __init__(self, space, debug_info, is_bridge=False):
+        logops = debug_info.logger._make_log_operations()
+        if debug_info.asminfo is not None:
+            ofs = debug_info.asminfo.ops_offset
+        else:
+            ofs = {}
+        self.w_ops = space.newlist(
+            wrap_oplist(space, logops, debug_info.operations, ofs))
+        
+        self.jd_name = debug_info.get_jitdriver().name
+        self.type = debug_info.type
+        if is_bridge:
+            self.bridge_no = debug_info.fail_descr_no
+            self.w_green_key = space.w_None
+        else:
+            self.w_green_key = wrap_greenkey(space,
+                                             debug_info.get_jitdriver(),
+                                             debug_info.greenkey,
+                                             debug_info.get_greenkey_repr())
+        self.loop_no = debug_info.looptoken.number
+        asminfo = debug_info.asminfo
+        if asminfo is not None:
+            self.asmaddr = asminfo.asmaddr
+            self.asmlen = asminfo.asmlen
 
+    def descr_repr(self, space):
+        lgt = space.int_w(space.len(self.w_ops))
+        if self.type == "bridge":
+            code_repr = 'bridge no %d' % self.bridge_no
+        else:
+            code_repr = space.str_w(space.repr(self.w_green_key))
+        return space.wrap('<JitLoopInfo %s, %d operations, starting at <%s>>' %
+                          (self.jd_name, lgt, code_repr))
+
+ at unwrap_spec(loopno=int, asmaddr=int, asmlen=int, loop_no=int,
+             type=str, jd_name=str, bridge_no=int)
+def descr_new_jit_loop_info(space, w_subtype, w_greenkey, w_ops, loopno,
+                            asmaddr, asmlen, loop_no, type, jd_name, bridge_no):
+    w_info = space.allocate_instance(W_JitLoopInfo, w_subtype)
+    w_info.w_green_key = w_greenkey
+    w_info.w_ops = w_ops
+    w_info.asmaddr = asmaddr
+    w_info.asmlen = asmlen
+    w_info.loop_no = loop_no
+    w_info.type = type
+    w_info.jd_name = jd_name
+    w_info.bridge_no = bridge_no
+    return w_info
+
+W_JitLoopInfo.typedef = TypeDef(
+    'JitLoopInfo',
+    __doc__ = W_JitLoopInfo.__doc__,
+    __new__ = interp2app(descr_new_jit_loop_info),
+    jitdriver_name = interp_attrproperty('jd_name', cls=W_JitLoopInfo,
+                       doc="Name of the JitDriver, pypyjit for the main one"),
+    greenkey = interp_attrproperty_w('w_green_key', cls=W_JitLoopInfo,
+               doc="Representation of place where the loop was compiled. "
+                    "In the case of the main interpreter loop, it's a triplet "
+                    "(code, ofs, is_profiled)"),
+    operations = interp_attrproperty_w('w_ops', cls=W_JitLoopInfo, doc=
+                                       "List of operations in this loop."),
+    loop_no = interp_attrproperty('loop_no', cls=W_JitLoopInfo, doc=
+                                  "Loop cardinal number"),
+    __repr__ = interp2app(W_JitLoopInfo.descr_repr),
+)
+W_JitLoopInfo.acceptable_as_base_class = False
+
+class W_JitInfoSnapshot(Wrappable):
+    def __init__(self, space, w_times, w_counters, w_counter_times):
+        self.w_loop_run_times = w_times
+        self.w_counters = w_counters
+        self.w_counter_times = w_counter_times
+
+W_JitInfoSnapshot.typedef = TypeDef(
+    "JitInfoSnapshot",
+    loop_run_times = interp_attrproperty_w("w_loop_run_times",
+                                             cls=W_JitInfoSnapshot),
+    counters = interp_attrproperty_w("w_counters",
+                                       cls=W_JitInfoSnapshot,
+                                       doc="various JIT counters"),
+    counter_times = interp_attrproperty_w("w_counter_times",
+                                            cls=W_JitInfoSnapshot,
+                                            doc="various JIT timers")
+)
+W_JitInfoSnapshot.acceptable_as_base_class = False
+
+def get_stats_snapshot(space):
+    """ Get the jit status in the specific moment in time. Note that this
+    is eager - the attribute access is not lazy, if you need new stats
+    you need to call this function again.
+    """
+    ll_times = jit_hooks.stats_get_loop_run_times(None)
+    w_times = space.newdict()
+    for i in range(len(ll_times)):
+        space.setitem(w_times, space.wrap(ll_times[i].number),
+                      space.wrap(ll_times[i].counter))
+    w_counters = space.newdict()
+    for i, counter_name in enumerate(Counters.counter_names):
+        v = jit_hooks.stats_get_counter_value(None, i)
+        space.setitem_str(w_counters, counter_name, space.wrap(v))
+    w_counter_times = space.newdict()
+    tr_time = jit_hooks.stats_get_times_value(None, Counters.TRACING)
+    space.setitem_str(w_counter_times, 'TRACING', space.wrap(tr_time))
+    b_time = jit_hooks.stats_get_times_value(None, Counters.BACKEND)
+    space.setitem_str(w_counter_times, 'BACKEND', space.wrap(b_time))
+    return space.wrap(W_JitInfoSnapshot(space, w_times, w_counters,
+                                        w_counter_times))
+
+def enable_debug(space):
+    """ Set the jit debugging - completely necessary for some stats to work,
+    most notably assembler counters.
+    """
+    jit_hooks.stats_set_debug(None, True)
+
+def disable_debug(space):
+    """ Disable the jit debugging. This means some very small loops will be
+    marginally faster and the counters will stop working.
+    """
+    jit_hooks.stats_set_debug(None, False)
diff --git a/pypy/module/pypyjit/policy.py b/pypy/module/pypyjit/policy.py
--- a/pypy/module/pypyjit/policy.py
+++ b/pypy/module/pypyjit/policy.py
@@ -1,10 +1,9 @@
 from pypy.jit.codewriter.policy import JitPolicy
-from pypy.rlib.jit import JitHookInterface
+from pypy.rlib.jit import JitHookInterface, Counters
 from pypy.rlib import jit_hooks
 from pypy.interpreter.error import OperationError
-from pypy.jit.metainterp.jitprof import counter_names
-from pypy.module.pypyjit.interp_resop import wrap_oplist, Cache, wrap_greenkey,\
-     WrappedOp
+from pypy.module.pypyjit.interp_resop import Cache, wrap_greenkey,\
+     WrappedOp, W_JitLoopInfo
 
 class PyPyJitIface(JitHookInterface):
     def on_abort(self, reason, jitdriver, greenkey, greenkey_repr):
@@ -20,75 +19,54 @@
                                         space.wrap(jitdriver.name),
                                         wrap_greenkey(space, jitdriver,
                                                       greenkey, greenkey_repr),
-                                        space.wrap(counter_names[reason]))
+                                        space.wrap(
+                                            Counters.counter_names[reason]))
                 except OperationError, e:
                     e.write_unraisable(space, "jit hook ", cache.w_abort_hook)
             finally:
                 cache.in_recursion = False
 
     def after_compile(self, debug_info):
-        w_greenkey = wrap_greenkey(self.space, debug_info.get_jitdriver(),
-                                   debug_info.greenkey,
-                                   debug_info.get_greenkey_repr())
-        self._compile_hook(debug_info, w_greenkey)
+        self._compile_hook(debug_info, is_bridge=False)
 
     def after_compile_bridge(self, debug_info):
-        self._compile_hook(debug_info,
-                           self.space.wrap(debug_info.fail_descr_no))
+        self._compile_hook(debug_info, is_bridge=True)
 
     def before_compile(self, debug_info):
-        w_greenkey = wrap_greenkey(self.space, debug_info.get_jitdriver(),
-                                   debug_info.greenkey,
-                                   debug_info.get_greenkey_repr())
-        self._optimize_hook(debug_info, w_greenkey)
+        self._optimize_hook(debug_info, is_bridge=False)
 
     def before_compile_bridge(self, debug_info):
-        self._optimize_hook(debug_info,
-                            self.space.wrap(debug_info.fail_descr_no))
+        self._optimize_hook(debug_info, is_bridge=True)
 
-    def _compile_hook(self, debug_info, w_arg):
+    def _compile_hook(self, debug_info, is_bridge):
         space = self.space
         cache = space.fromcache(Cache)
         if cache.in_recursion:
             return
         if space.is_true(cache.w_compile_hook):
-            logops = debug_info.logger._make_log_operations()
-            list_w = wrap_oplist(space, logops, debug_info.operations,
-                                 debug_info.asminfo.ops_offset)
+            w_debug_info = W_JitLoopInfo(space, debug_info, is_bridge)
             cache.in_recursion = True
             try:
                 try:
-                    jd_name = debug_info.get_jitdriver().name
-                    asminfo = debug_info.asminfo
                     space.call_function(cache.w_compile_hook,
-                                        space.wrap(jd_name),
-                                        space.wrap(debug_info.type),
-                                        w_arg,
-                                        space.newlist(list_w),
-                                        space.wrap(asminfo.asmaddr),
-                                        space.wrap(asminfo.asmlen))
+                                        space.wrap(w_debug_info))
                 except OperationError, e:
                     e.write_unraisable(space, "jit hook ", cache.w_compile_hook)
             finally:
                 cache.in_recursion = False
 
-    def _optimize_hook(self, debug_info, w_arg):
+    def _optimize_hook(self, debug_info, is_bridge=False):
         space = self.space
         cache = space.fromcache(Cache)
         if cache.in_recursion:
             return
         if space.is_true(cache.w_optimize_hook):
-            logops = debug_info.logger._make_log_operations()
-            list_w = wrap_oplist(space, logops, debug_info.operations)
+            w_debug_info = W_JitLoopInfo(space, debug_info, is_bridge)
             cache.in_recursion = True
             try:
                 try:
-                    jd_name = debug_info.get_jitdriver().name
                     w_res = space.call_function(cache.w_optimize_hook,
-                                                space.wrap(jd_name),
-                                                space.wrap(debug_info.type),
-                                                w_arg,
-                                                space.newlist(list_w))
+                                                space.wrap(w_debug_info))
                     if space.is_w(w_res, space.w_None):
                         return
                     l = []
diff --git a/pypy/module/pypyjit/test/test_jit_hook.py b/pypy/module/pypyjit/test/test_jit_hook.py
--- a/pypy/module/pypyjit/test/test_jit_hook.py
+++ b/pypy/module/pypyjit/test/test_jit_hook.py
@@ -14,8 +14,7 @@
 from pypy.module.pypyjit.policy import pypy_hooks
 from pypy.jit.tool.oparser import parse
 from pypy.jit.metainterp.typesystem import llhelper
-from pypy.jit.metainterp.jitprof import ABORT_TOO_LONG
-from pypy.rlib.jit import JitDebugInfo, AsmInfo
+from pypy.rlib.jit import JitDebugInfo, AsmInfo, Counters
 
 class MockJitDriverSD(object):
     class warmstate(object):
@@ -64,8 +63,10 @@
             if i != 1:
                offset[op] = i
 
-        di_loop = JitDebugInfo(MockJitDriverSD, logger, JitCellToken(),
-                               oplist, 'loop', greenkey)
+        token = JitCellToken()
+        token.number = 0
+        di_loop = JitDebugInfo(MockJitDriverSD, logger, token, oplist, 'loop',
+                   greenkey)
         di_loop_optimize = JitDebugInfo(MockJitDriverSD, logger, JitCellToken(),
                                         oplist, 'loop', greenkey)
         di_loop.asminfo = AsmInfo(offset, 0, 0)
@@ -85,8 +86,8 @@
             pypy_hooks.before_compile(di_loop_optimize)
 
         def interp_on_abort():
-            pypy_hooks.on_abort(ABORT_TOO_LONG, pypyjitdriver, greenkey,
-                                'blah')
+            pypy_hooks.on_abort(Counters.ABORT_TOO_LONG, pypyjitdriver,
+                                greenkey, 'blah')
 
         cls.w_on_compile = space.wrap(interp2app(interp_on_compile))
         cls.w_on_compile_bridge = space.wrap(interp2app(interp_on_compile_bridge))
@@ -95,6 +96,7 @@
         cls.w_dmp_num = space.wrap(rop.DEBUG_MERGE_POINT)
         cls.w_on_optimize = space.wrap(interp2app(interp_on_optimize))
         cls.orig_oplist = oplist
+        cls.w_sorted_keys = space.wrap(sorted(Counters.counter_names))
 
     def setup_method(self, meth):
         self.__class__.oplist = self.orig_oplist[:]
@@ -103,22 +105,23 @@
         import pypyjit
         all = []
 
-        def hook(name, looptype, tuple_or_guard_no, ops, asmstart, asmlen):
-            all.append((name, looptype, tuple_or_guard_no, ops))
+        def hook(info):
+            all.append(info)
 
         self.on_compile()
         pypyjit.set_compile_hook(hook)
         assert not all
         self.on_compile()
         assert len(all) == 1
-        elem = all[0]
-        assert elem[0] == 'pypyjit'
-        assert elem[2][0].co_name == 'function'
-        assert elem[2][1] == 0
-        assert elem[2][2] == False
-        assert len(elem[3]) == 4
-        int_add = elem[3][0]
-        dmp = elem[3][1]
+        info = all[0]
+        assert info.jitdriver_name == 'pypyjit'
+        assert info.greenkey[0].co_name == 'function'
+        assert info.greenkey[1] == 0
+        assert info.greenkey[2] == False
+        assert info.loop_no == 0
+        assert len(info.operations) == 4
+        int_add = info.operations[0]
+        dmp = info.operations[1]
         assert isinstance(dmp, pypyjit.DebugMergePoint)
         assert dmp.pycode is self.f.func_code
         assert dmp.greenkey == (self.f.func_code, 0, False)
@@ -127,6 +130,8 @@
         assert int_add.name == 'int_add'
         assert int_add.num == self.int_add_num
         self.on_compile_bridge()
+        code_repr = "(<code object function, file '?', line 2>, 0, False)"
+        assert repr(all[0]) == '<JitLoopInfo pypyjit, 4 operations, starting at <%s>>' % code_repr
         assert len(all) == 2
         pypyjit.set_compile_hook(None)
         self.on_compile()
@@ -168,12 +173,12 @@
         import pypyjit
         l = []
 
-        def hook(*args):
-            l.append(args)
+        def hook(info):
+            l.append(info)
 
         pypyjit.set_compile_hook(hook)
         self.on_compile()
-        op = l[0][3][1]
+        op = l[0].operations[1]
         assert isinstance(op, pypyjit.ResOperation)
         assert 'function' in repr(op)
 
@@ -192,17 +197,17 @@
         import pypyjit
         l = []
 
-        def hook(name, looptype, tuple_or_guard_no, ops, *args):
-            l.append(ops)
+        def hook(info):
+            l.append(info.jitdriver_name)
 
-        def optimize_hook(name, looptype, tuple_or_guard_no, ops):
+        def optimize_hook(info):
             return []
 
         pypyjit.set_compile_hook(hook)
         pypyjit.set_optimize_hook(optimize_hook)
         self.on_optimize()
         self.on_compile()
-        assert l == [[]]
+        assert l == ['pypyjit']
 
     def test_creation(self):
         from pypyjit import Box, ResOperation
@@ -236,3 +241,13 @@
         op = DebugMergePoint([Box(0)], 'repr', 'notmain', 5, 4, ('str',))
         raises(AttributeError, 'op.pycode')
         assert op.call_depth == 5
+
+    def test_get_stats_snapshot(self):
+        skip("a bit no idea how to test it")
+        from pypyjit import get_stats_snapshot
+
+        stats = get_stats_snapshot() # we can't do much here, unfortunately
+        assert stats.w_loop_run_times == []
+        assert isinstance(stats.w_counters, dict)
+        assert sorted(stats.w_counters.keys()) == self.sorted_keys
+
diff --git a/pypy/module/test_lib_pypy/test_distributed/__init__.py b/pypy/module/test_lib_pypy/test_distributed/__init__.py
deleted file mode 100644
diff --git a/pypy/module/test_lib_pypy/test_distributed/test_distributed.py b/pypy/module/test_lib_pypy/test_distributed/test_distributed.py
deleted file mode 100644
--- a/pypy/module/test_lib_pypy/test_distributed/test_distributed.py
+++ /dev/null
@@ -1,305 +0,0 @@
-import py; py.test.skip("xxx remove")
-
-""" Controllers tests
-"""
-
-from pypy.conftest import gettestobjspace
-import sys
-
-class AppTestDistributed(object):
-    def setup_class(cls):
-        cls.space = gettestobjspace(**{"objspace.std.withtproxy": True,
-            "usemodules":("_continuation",)})
-
-    def test_init(self):
-        import distributed
-
-    def test_protocol(self):
-        from distributed.protocol import AbstractProtocol
-        protocol = AbstractProtocol()
-        for item in ("aaa", 3, u"aa", 344444444444444444L, 1.2, (1, "aa")):
-            assert protocol.unwrap(protocol.wrap(item)) == item
-        assert type(protocol.unwrap(protocol.wrap([1,2,3]))) is list
-        assert type(protocol.unwrap(protocol.wrap({"a":3}))) is dict
-        
-        def f():
-            pass
-        
-        assert type(protocol.unwrap(protocol.wrap(f))) is type(f)
-
-    def test_method_of_false_obj(self):
-        from distributed.protocol import AbstractProtocol
-        protocol = AbstractProtocol()
-        lst = []
-        m = lst.append
-        assert type(protocol.unwrap(protocol.wrap(m))) is type(m)
-
-    def test_protocol_run(self):
-        l = [1,2,3]
-        from distributed.protocol import LocalProtocol
-        protocol = LocalProtocol()
-        wrap = protocol.wrap
-        unwrap = protocol.unwrap
-        item = unwrap(wrap(l))
-        assert len(item) == 3
-        assert item[2] == 3
-        item += [1,1,1]
-        assert len(item) == 6
-
-    def test_protocol_call(self):
-        def f(x, y):
-            return x + y
-        
-        from distributed.protocol import LocalProtocol
-        protocol = LocalProtocol()
-        wrap = protocol.wrap
-        unwrap = protocol.unwrap
-        item = unwrap(wrap(f))
-        assert item(3, 2) == 5
-
-    def test_simulation_call(self):
-        def f(x, y):
-            return x + y
-        
-        import types
-        from distributed import RemoteProtocol
-        import sys
-
-        data = []
-        result = []
-        protocol = RemoteProtocol(result.append, data.pop)
-        data += [("finished", protocol.wrap(5)), ("finished", protocol.wrap(f))]
-        fun = protocol.get_remote("f")
-        assert isinstance(fun, types.FunctionType)
-        assert fun(2, 3) == 5
-
-    def test_local_obj(self):
-        class A(object):
-            def __init__(self, x):
-                self.x = x
-            
-            def __len__(self):
-                return self.x + 8
-        
-        from distributed.protocol import LocalProtocol
-        protocol = LocalProtocol()
-        wrap = protocol.wrap
-        unwrap = protocol.unwrap
-        item = unwrap(wrap(A(3)))
-        assert item.x == 3
-        assert len(item) == 11
-
-class AppTestDistributedTasklets(object):
-    spaceconfig = {"objspace.std.withtproxy": True,
-                   "objspace.usemodules._continuation": True}
-    reclimit = sys.getrecursionlimit()
-
-    def setup_class(cls):
-        import py.test
-        py.test.importorskip('greenlet')
-        cls.w_test_env_ = cls.space.appexec([], """():
-        from distributed import test_env
-        return (test_env,)
-        """)
-        sys.setrecursionlimit(100000)
-
-    def teardown_class(cls):
-        sys.setrecursionlimit(cls.reclimit)
-
-    def test_remote_protocol_call(self):
-        def f(x, y):
-            return x + y
-        
-        protocol = self.test_env_[0]({"f": f})
-        fun = protocol.get_remote("f")
-        assert fun(2, 3) == 5
-
-    def test_callback(self):
-        def g():
-            return 8
-        
-        def f(x):
-            return x + g()
-        
-        protocol = self.test_env_[0]({"f":f})
-        fun = protocol.get_remote("f")
-        assert fun(8) == 16
-    
-    def test_remote_dict(self):
-        #skip("Land of infinite recursion")
-        d = {'a':3}
-        protocol = self.test_env_[0]({'d':d})
-        xd = protocol.get_remote('d')
-        #assert d['a'] == xd['a']
-        assert d.keys() == xd.keys()
-        assert d.values() == xd.values()
-        assert d == xd
-        
-    def test_remote_obj(self):
-        class A(object):
-            def __init__(self, x):
-                self.x = x
-            
-            def __len__(self):
-                return self.x + 8
-        a = A(3)
-        
-        protocol = self.test_env_[0]({'a':a})
-        xa = protocol.get_remote("a")
-        assert xa.x == 3
-        assert len(xa) == 11
-    
-    def test_remote_doc_and_callback(self):
-        class A(object):
-            """xxx"""
-            def __init__(self):
-                pass
-
-            def meth(self, x):
-                return x() + 3
-        
-        def x():
-            return 1
-        
-        a = A()
-        
-        protocol = self.test_env_[0]({'a':a})
-        xa = protocol.get_remote('a')
-        assert xa.__class__.__doc__ == 'xxx'
-        assert xa.meth(x) == 4
-
-    def test_double_reference(self):
-        class A(object):
-            def meth(self, one):
-                self.one = one
-            
-            def perform(self):
-                return 1 + len(self.one())
-        
-        class B(object):
-            def __call__(self):
-                return [1,2,3]
-        
-        a = A()
-        protocol = self.test_env_[0]({'a': a})
-        xa = protocol.get_remote('a')
-        xa.meth(B())
-        assert xa.perform() == 4
-
-    def test_frame(self):
-        #skip("Land of infinite recursion")
-        import sys
-        f = sys._getframe()
-        protocol = self.test_env_[0]({'f':f})
-        xf = protocol.get_remote('f')
-        assert f.f_globals.keys() == xf.f_globals.keys()
-        assert f.f_locals.keys() == xf.f_locals.keys()
-
-    def test_remote_exception(self):
-        def raising():
-            1/0
-        
-        protocol = self.test_env_[0]({'raising':raising})
-        xr = protocol.get_remote('raising')
-        try:
-            xr()
-        except ZeroDivisionError:
-            import sys
-            exc_info, val, tb  = sys.exc_info()
-            #assert tb.tb_next is None
-        else:
-            raise AssertionError("Did not raise")
-
-    def test_remote_classmethod(self):
-        class A(object):
-            z = 8
-
-            @classmethod
-            def x(cls):
-                return cls.z
-
-        a = A()
-        protocol = self.test_env_[0]({'a':a})
-        xa = protocol.get_remote("a")
-        res = xa.x()
-        assert res == 8
-
-    def test_types_reverse_mapping(self):
-        class A(object):
-            def m(self, tp):
-                assert type(self) is tp
-
-        a = A()
-        protocol = self.test_env_[0]({'a':a, 'A':A})
-        xa = protocol.get_remote('a')
-        xA = protocol.get_remote('A')
-        xa.m(xA)
-
-    def test_instantiate_remote_type(self):
-        class C(object):
-            def __init__(self, y):
-                self.y = y
-            
-            def x(self):
-                return self.y
-
-        protocol = self.test_env_[0]({'C':C})
-        xC = protocol.get_remote('C')
-        xc = xC(3)
-        res = xc.x()
-        assert res == 3
-
-    def test_remote_sys(self):
-        skip("Fix me some day maybe")
-        import sys
-
-        protocol = self.test_env_[0]({'sys':sys})
-        s = protocol.get_remote('sys')
-        l = dir(s)
-        assert l
-
-    def test_remote_file_access(self):
-        skip("Descriptor logic seems broken")
-        protocol = self.test_env_[0]({'f':open})
-        xf = protocol.get_remote('f')
-        data = xf('/etc/passwd').read()
-        assert data
-
-    def test_real_descriptor(self):
-        class getdesc(object):
-            def __get__(self, obj, val=None):
-                if obj is not None:
-                    assert type(obj) is X
-                return 3
-
-        class X(object):
-            x = getdesc()
-
-        x = X()
-
-        protocol = self.test_env_[0]({'x':x})
-        xx = protocol.get_remote('x')
-        assert xx.x == 3
-    
-    def test_bases(self):
-        class X(object):
-            pass
-
-        class Y(X):
-            pass
-
-        y = Y()
-        protocol = self.test_env_[0]({'y':y, 'X':X})
-        xy = protocol.get_remote('y')
-        xX = protocol.get_remote('X')
-        assert isinstance(xy, xX)
-
-    def test_key_error(self):
-        from distributed import ObjectNotFound
-        protocol = self.test_env_[0]({})
-        raises(ObjectNotFound, "protocol.get_remote('x')")
-
-    def test_list_items(self):
-        protocol = self.test_env_[0]({'x':3, 'y':8})
-        assert sorted(protocol.remote_keys()) == ['x', 'y']
-
diff --git a/pypy/module/test_lib_pypy/test_distributed/test_greensock.py b/pypy/module/test_lib_pypy/test_distributed/test_greensock.py
deleted file mode 100644
--- a/pypy/module/test_lib_pypy/test_distributed/test_greensock.py
+++ /dev/null
@@ -1,61 +0,0 @@
-import py; py.test.skip("xxx remove")
-from pypy.conftest import gettestobjspace, option
-
-def setup_module(mod):
-    py.test.importorskip("pygreen")   # found e.g. in py/trunk/contrib 
-
-class AppTestDistributedGreensock(object):
-    def setup_class(cls):
-        if not option.runappdirect:
-            py.test.skip("Cannot run this on top of py.py because of PopenGateway")
-        cls.space = gettestobjspace(**{"objspace.std.withtproxy": True,
-                                       "usemodules":("_continuation",)})
-        cls.w_remote_side_code = cls.space.appexec([], """():
-        import sys
-        sys.path.insert(0, '%s')
-        remote_side_code = '''
-class A:
-   def __init__(self, x):
-       self.x = x
-            
-   def __len__(self):
-       return self.x + 8
-
-   def raising(self):
-       1/0
-
-   def method(self, x):
-       return x() + self.x
-
-a = A(3)
-
-def count():
-    x = 10
-    # naive counting :)
-    result = 1
-    for i in range(x):
-        result += 1
-    return result
-'''
-        return remote_side_code
-        """ % str(py.path.local(__file__).dirpath().dirpath().dirpath().dirpath()))
-
-    def test_remote_call(self):
-        from distributed import socklayer
-        import sys
-        from pygreen.greenexecnet import PopenGateway
-        gw = PopenGateway()
-        rp = socklayer.spawn_remote_side(self.remote_side_code, gw)
-        a = rp.get_remote("a")
-        assert a.method(lambda : 13) == 16
-    
-    def test_remote_counting(self):
-        from distributed import socklayer
-        from pygreen.greensock2 import allof
-        from pygreen.greenexecnet import PopenGateway
-        gws = [PopenGateway() for i in range(3)]
-        rps = [socklayer.spawn_remote_side(self.remote_side_code, gw)
-               for gw in gws]
-        counters = [rp.get_remote("count") for rp in rps]
-        assert allof(*counters) == (11, 11, 11)
-
diff --git a/pypy/module/test_lib_pypy/test_distributed/test_socklayer.py b/pypy/module/test_lib_pypy/test_distributed/test_socklayer.py
deleted file mode 100644
--- a/pypy/module/test_lib_pypy/test_distributed/test_socklayer.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import py; py.test.skip("xxx remove")
-from pypy.conftest import gettestobjspace
-
-def setup_module(mod):
-    py.test.importorskip("pygreen")   # found e.g. in py/trunk/contrib 
-
-# XXX think how to close the socket
-
-class AppTestSocklayer:
-    def setup_class(cls):
-        cls.space = gettestobjspace(**{"objspace.std.withtproxy": True,
-                                       "usemodules":("_continuation",
-                                                     "_socket", "select")})
-    
-    def test_socklayer(self):
-        class X(object):
-            z = 3
-
-        x = X()
-
-        try:
-            import py
-        except ImportError:
-            skip("pylib not importable")
-        from pygreen.pipe.gsocke import GreenSocket
-        from distributed.socklayer import socket_loop, connect
-        from pygreen.greensock2 import oneof, allof
-
-        def one():
-            socket_loop(('127.0.0.1', 21211), {'x':x}, socket=GreenSocket)
-
-        def two():
-            rp = connect(('127.0.0.1', 21211), GreenSocket)
-            assert rp.x.z == 3
-
-        oneof(one, two)
diff --git a/pypy/objspace/std/strutil.py b/pypy/objspace/std/strutil.py
--- a/pypy/objspace/std/strutil.py
+++ b/pypy/objspace/std/strutil.py
@@ -185,4 +185,4 @@
     try:
         return rstring_to_float(s)
     except ValueError:
-        raise ParseStringError("invalid literal for float()")
+        raise ParseStringError("invalid literal for float(): '%s'" % s)
diff --git a/pypy/objspace/std/test/test_floatobject.py b/pypy/objspace/std/test/test_floatobject.py
--- a/pypy/objspace/std/test/test_floatobject.py
+++ b/pypy/objspace/std/test/test_floatobject.py
@@ -441,6 +441,13 @@
         b = A(5).real
         assert type(b) is float
 
+    def test_invalid_literal_message(self):
+        try:
+            float('abcdef')
+        except ValueError, e:
+            assert 'abcdef' in e.message
+        else:
+            assert False, 'did not raise'
 
 class AppTestFloatHex:
     def w_identical(self, x, y):
diff --git a/pypy/objspace/std/test/test_methodcache.py b/pypy/objspace/std/test/test_methodcache.py
--- a/pypy/objspace/std/test/test_methodcache.py
+++ b/pypy/objspace/std/test/test_methodcache.py
@@ -1,8 +1,8 @@
 from pypy.conftest import gettestobjspace
-from pypy.objspace.std.test.test_typeobject import AppTestTypeObject
+from pypy.objspace.std.test import test_typeobject
 
 
-class AppTestMethodCaching(AppTestTypeObject):
+class AppTestMethodCaching(test_typeobject.AppTestTypeObject):
     def setup_class(cls):
         cls.space = gettestobjspace(
             **{"objspace.std.withmethodcachecounter": True})
diff --git a/pypy/rlib/jit.py b/pypy/rlib/jit.py
--- a/pypy/rlib/jit.py
+++ b/pypy/rlib/jit.py
@@ -600,7 +600,6 @@
                 raise ValueError
 set_user_param._annspecialcase_ = 'specialize:arg(0)'
 
-
 # ____________________________________________________________
 #
 # Annotation and rtyping of some of the JitDriver methods
@@ -901,11 +900,6 @@
         instance, overwrite for custom behavior
         """
 
-    def get_stats(self):
-        """ Returns various statistics
-        """
-        raise NotImplementedError
-
 def record_known_class(value, cls):
     """
     Assure the JIT that value is an instance of cls. This is not a precise
@@ -932,3 +926,39 @@
         v_cls = hop.inputarg(classrepr, arg=1)
         return hop.genop('jit_record_known_class', [v_inst, v_cls],
                          resulttype=lltype.Void)
+
+class Counters(object):
+    counters="""
+    TRACING
+    BACKEND
+    OPS
+    RECORDED_OPS
+    GUARDS
+    OPT_OPS
+    OPT_GUARDS
+    OPT_FORCINGS
+    ABORT_TOO_LONG
+    ABORT_BRIDGE
+    ABORT_BAD_LOOP
+    ABORT_ESCAPE
+    ABORT_FORCE_QUASIIMMUT
+    NVIRTUALS
+    NVHOLES
+    NVREUSED
+    TOTAL_COMPILED_LOOPS
+    TOTAL_COMPILED_BRIDGES
+    TOTAL_FREED_LOOPS
+    TOTAL_FREED_BRIDGES
+    """
+
+    counter_names = []
+
+    @staticmethod
+    def _setup():
+        names = Counters.counters.split()
+        for i, name in enumerate(names):
+            setattr(Counters, name, i)
+            Counters.counter_names.append(name)
+        Counters.ncounters = len(names)
+
+Counters._setup()
diff --git a/pypy/rlib/jit_hooks.py b/pypy/rlib/jit_hooks.py
--- a/pypy/rlib/jit_hooks.py
+++ b/pypy/rlib/jit_hooks.py
@@ -13,7 +13,10 @@
             _about_ = helper
 
             def compute_result_annotation(self, *args):
-                return s_result
+                if (isinstance(s_result, annmodel.SomeObject) or
+                    s_result is None):
+                    return s_result
+                return annmodel.lltype_to_annotation(s_result)
 
             def specialize_call(self, hop):
                 from pypy.rpython.lltypesystem import lltype
@@ -108,3 +111,26 @@
 def box_isconst(llbox):
     from pypy.jit.metainterp.history import Const
     return isinstance(_cast_to_box(llbox), Const)
+
+# ------------------------- stats interface ---------------------------
+
+ at register_helper(annmodel.SomeBool())
+def stats_set_debug(warmrunnerdesc, flag):
+    return warmrunnerdesc.metainterp_sd.cpu.set_debug(flag)
+
+ at register_helper(annmodel.SomeInteger())
+def stats_get_counter_value(warmrunnerdesc, no):
+    return warmrunnerdesc.metainterp_sd.profiler.get_counter(no)
+
+ at register_helper(annmodel.SomeFloat())
+def stats_get_times_value(warmrunnerdesc, no):
+    return warmrunnerdesc.metainterp_sd.profiler.times[no]
+
+LOOP_RUN_CONTAINER = lltype.GcArray(lltype.Struct('elem',
+                                                  ('type', lltype.Char),
+                                                  ('number', lltype.Signed),
+                                                  ('counter', lltype.Signed)))
+
+ at register_helper(lltype.Ptr(LOOP_RUN_CONTAINER))
+def stats_get_loop_run_times(warmrunnerdesc):
+    return warmrunnerdesc.metainterp_sd.cpu.get_all_loop_runs()
diff --git a/pypy/rlib/objectmodel.py b/pypy/rlib/objectmodel.py
--- a/pypy/rlib/objectmodel.py
+++ b/pypy/rlib/objectmodel.py
@@ -3,9 +3,11 @@
 RPython-compliant way.
 """
 
+import py
 import sys
 import types
 import math
+import inspect
 
 # specialize is a decorator factory for attaching _annspecialcase_
 # attributes to functions: for example
@@ -106,15 +108,68 @@
 
 specialize = _Specialize()
 
-def enforceargs(*args):
+def enforceargs(*types, **kwds):
     """ Decorate a function with forcing of RPython-level types on arguments.
     None means no enforcing.
 
-    XXX shouldn't we also add asserts in function body?
+    When not translated, the type of the actual arguments are checked against
+    the enforced types every time the function is called. You can disable the
+    typechecking by passing ``typecheck=False`` to @enforceargs.
     """
+    typecheck = kwds.pop('typecheck', True)
+    if kwds:
+        raise TypeError, 'got an unexpected keyword argument: %s' % kwds.keys()
+    if not typecheck:
+        def decorator(f):
+            f._annenforceargs_ = types
+            return f
+        return decorator
+    #
+    from pypy.annotation.signature import annotationoftype
+    from pypy.annotation.model import SomeObject
     def decorator(f):
-        f._annenforceargs_ = args
-        return f
+        def get_annotation(t):
+            if isinstance(t, SomeObject):
+                return t
+            return annotationoftype(t)
+        def typecheck(*args):
+            for i, (expected_type, arg) in enumerate(zip(types, args)):
+                if expected_type is None:
+                    continue
+                s_expected = get_annotation(expected_type)
+                s_argtype = get_annotation(type(arg))
+                if not s_expected.contains(s_argtype):
+                    msg = "%s argument number %d must be of type %s" % (
+                        f.func_name, i+1, expected_type)
+                    raise TypeError, msg
+        #
+        # we cannot simply wrap the function using *args, **kwds, because it's
+        # not RPython. Instead, we generate a function with exactly the same
+        # argument list
+        argspec = inspect.getargspec(f)
+        assert len(argspec.args) == len(types), (
+            'not enough types provided: expected %d, got %d' %
+            (len(types), len(argspec.args)))
+        assert not argspec.varargs, '*args not supported by enforceargs'
+        assert not argspec.keywords, '**kwargs not supported by enforceargs'
+        #
+        arglist = ', '.join(argspec.args)
+        src = py.code.Source("""
+            def {name}({arglist}):
+                if not we_are_translated():
+                    typecheck({arglist})
+                return {name}_original({arglist})
+        """.format(name=f.func_name, arglist=arglist))
+        #
+        mydict = {f.func_name + '_original': f,
+                  'typecheck': typecheck,
+                  'we_are_translated': we_are_translated}
+        exec src.compile() in mydict
+        result = mydict[f.func_name]
+        result.func_defaults = f.func_defaults
+        result.func_dict.update(f.func_dict)
+        result._annenforceargs_ = types
+        return result
     return decorator
 
 # ____________________________________________________________
diff --git a/pypy/rlib/rgc.py b/pypy/rlib/rgc.py
--- a/pypy/rlib/rgc.py
+++ b/pypy/rlib/rgc.py
@@ -138,8 +138,8 @@
         return hop.genop(opname, vlist, resulttype = hop.r_result.lowleveltype)
 
 @jit.oopspec('list.ll_arraycopy(source, dest, source_start, dest_start, length)')
+ at enforceargs(None, None, int, int, int)
 @specialize.ll()
- at enforceargs(None, None, int, int, int)
 def ll_arraycopy(source, dest, source_start, dest_start, length):
     from pypy.rpython.lltypesystem.lloperation import llop
     from pypy.rlib.objectmodel import keepalive_until_here
diff --git a/pypy/rlib/rsre/rpy.py b/pypy/rlib/rsre/rpy.py
--- a/pypy/rlib/rsre/rpy.py
+++ b/pypy/rlib/rsre/rpy.py
@@ -1,6 +1,7 @@
 
 from pypy.rlib.rsre import rsre_char
 from pypy.rlib.rsre.rsre_core import match
+from pypy.rlib.rarithmetic import intmask
 
 def get_hacked_sre_compile(my_compile):
     """Return a copy of the sre_compile module for which the _sre
@@ -33,7 +34,7 @@
 class GotIt(Exception):
     pass
 def my_compile(pattern, flags, code, *args):
-    raise GotIt(code, flags, args)
+    raise GotIt([intmask(i) for i in code], flags, args)
 sre_compile_hacked = get_hacked_sre_compile(my_compile)
 
 def get_code(regexp, flags=0, allargs=False):
diff --git a/pypy/rlib/test/test_objectmodel.py b/pypy/rlib/test/test_objectmodel.py
--- a/pypy/rlib/test/test_objectmodel.py
+++ b/pypy/rlib/test/test_objectmodel.py
@@ -420,9 +420,45 @@
 def test_enforceargs_decorator():
     @enforceargs(int, str, None)
     def f(a, b, c):
-        pass
+        return a, b, c
+    f.foo = 'foo'
+    assert f._annenforceargs_ == (int, str, None)
+    assert f.func_name == 'f'
+    assert f.foo == 'foo'
+    assert f(1, 'hello', 42) == (1, 'hello', 42)
+    exc = py.test.raises(TypeError, "f(1, 2, 3)")
+    assert exc.value.message == "f argument number 2 must be of type <type 'str'>"
+    py.test.raises(TypeError, "f('hello', 'world', 3)")
+    
 
+def test_enforceargs_defaults():
+    @enforceargs(int, int)
+    def f(a, b=40):
+        return a+b
+    assert f(2) == 42
+
+def test_enforceargs_int_float_promotion():
+    @enforceargs(float)
+    def f(x):
+        return x
+    # in RPython there is an implicit int->float promotion
+    assert f(42) == 42
+
+def test_enforceargs_no_typecheck():
+    @enforceargs(int, str, None, typecheck=False)
+    def f(a, b, c):
+        return a, b, c
     assert f._annenforceargs_ == (int, str, None)
+    assert f(1, 2, 3) == (1, 2, 3) # no typecheck
+
+def test_enforceargs_translates():
+    from pypy.rpython.lltypesystem import lltype
+    @enforceargs(int, float)
+    def f(a, b):
+        return a, b
+    graph = getgraph(f, [int, int])
+    TYPES = [v.concretetype for v in graph.getargs()]
+    assert TYPES == [lltype.Signed, lltype.Float]
 
 def getgraph(f, argtypes):
     from pypy.translator.translator import TranslationContext, graphof
diff --git a/pypy/rpython/annlowlevel.py b/pypy/rpython/annlowlevel.py
--- a/pypy/rpython/annlowlevel.py
+++ b/pypy/rpython/annlowlevel.py
@@ -12,6 +12,7 @@
 from pypy.rpython import extregistry
 from pypy.objspace.flow.model import Constant
 from pypy.translator.simplify import get_functype
+from pypy.rpython.rmodel import warning
 
 class KeyComp(object):
     def __init__(self, val):
@@ -483,6 +484,8 @@
     """NOT_RPYTHON: hack. The object may be disguised as a PTR now.
     Limited to casting a given object to a single type.
     """
+    if hasattr(object, '_freeze_'):
+        warning("Trying to cast a frozen object to pointer")
     if isinstance(PTR, lltype.Ptr):
         TO = PTR.TO
     else:
diff --git a/pypy/rpython/lltypesystem/rstr.py b/pypy/rpython/lltypesystem/rstr.py
--- a/pypy/rpython/lltypesystem/rstr.py
+++ b/pypy/rpython/lltypesystem/rstr.py
@@ -1,5 +1,6 @@
 from weakref import WeakValueDictionary
 from pypy.tool.pairtype import pairtype
+from pypy.annotation import model as annmodel
 from pypy.rpython.error import TyperError
 from pypy.rlib.objectmodel import malloc_zero_filled, we_are_translated
 from pypy.rlib.objectmodel import _hash_string, enforceargs
@@ -169,6 +170,13 @@
         return result
 
     @jit.elidable
+    def ll_unicode(self, s):
+        if s:
+            return s
+        else:
+            return self.ll.ll_constant(u'None')
+
+    @jit.elidable
     def ll_encode_latin1(self, s):
         length = len(s.chars)
         result = mallocstr(length)
@@ -956,19 +964,29 @@
         return LLHelpers.ll_join_strs(len(builder), builder)
 
     def ll_constant(s):
-        return string_repr.convert_const(s)
+        if isinstance(s, str):
+            return string_repr.convert_const(s)
+        elif isinstance(s, unicode):
+            return unicode_repr.convert_const(s)
+        else:
+            assert False
     ll_constant._annspecialcase_ = 'specialize:memo'
 
     def do_stringformat(cls, hop, sourcevarsrepr):
         s_str = hop.args_s[0]
         assert s_str.is_constant()
+        is_unicode = isinstance(s_str, annmodel.SomeUnicodeString)
+        if is_unicode:
+            TEMPBUF = TEMP_UNICODE
+        else:
+            TEMPBUF = TEMP
         s = s_str.const
         things = cls.parse_fmt_string(s)
         size = inputconst(Signed, len(things)) # could be unsigned?
-        cTEMP = inputconst(Void, TEMP)
+        cTEMP = inputconst(Void, TEMPBUF)
         cflags = inputconst(Void, {'flavor': 'gc'})
         vtemp = hop.genop("malloc_varsize", [cTEMP, cflags, size],
-                          resulttype=Ptr(TEMP))
+                          resulttype=Ptr(TEMPBUF))
 
         argsiter = iter(sourcevarsrepr)
 
@@ -979,7 +997,13 @@
                 vitem, r_arg = argsiter.next()
                 if not hasattr(r_arg, 'll_str'):
                     raise TyperError("ll_str unsupported for: %r" % r_arg)
-                if code == 's' or (code == 'r' and isinstance(r_arg, InstanceRepr)):
+                if code == 's':
+                    if is_unicode:
+                        # only UniCharRepr and UnicodeRepr has it so far
+                        vchunk = hop.gendirectcall(r_arg.ll_unicode, vitem)
+                    else:
+                        vchunk = hop.gendirectcall(r_arg.ll_str, vitem)
+                elif code == 'r' and isinstance(r_arg, InstanceRepr):
                     vchunk = hop.gendirectcall(r_arg.ll_str, vitem)
                 elif code == 'd':
                     assert isinstance(r_arg, IntegerRepr)
@@ -999,9 +1023,17 @@
                 else:
                     raise TyperError, "%%%s is not RPython" % (code, )
             else:
-                from pypy.rpython.lltypesystem.rstr import string_repr
-                vchunk = inputconst(string_repr, thing)
+                from pypy.rpython.lltypesystem.rstr import string_repr, unicode_repr
+                if is_unicode:
+                    vchunk = inputconst(unicode_repr, thing)
+                else:
+                    vchunk = inputconst(string_repr, thing)
             i = inputconst(Signed, i)
+            if is_unicode and vchunk.concretetype != Ptr(UNICODE):
+                # if we are here, one of the ll_str.* functions returned some
+                # STR, so we convert it to unicode. It's a bit suboptimal
+                # because we do one extra copy.
+                vchunk = hop.gendirectcall(cls.ll_str2unicode, vchunk)
             hop.genop('setarrayitem', [vtemp, i, vchunk])
 
         hop.exception_cannot_occur()   # to ignore the ZeroDivisionError of '%'
@@ -1009,6 +1041,7 @@
     do_stringformat = classmethod(do_stringformat)
 
 TEMP = GcArray(Ptr(STR))
+TEMP_UNICODE = GcArray(Ptr(UNICODE))
 
 # ____________________________________________________________
 
diff --git a/pypy/rpython/ootypesystem/ooregistry.py b/pypy/rpython/ootypesystem/ooregistry.py
--- a/pypy/rpython/ootypesystem/ooregistry.py
+++ b/pypy/rpython/ootypesystem/ooregistry.py
@@ -47,7 +47,7 @@
     _type_ = ootype._string
 
     def compute_annotation(self):
-        return annmodel.SomeOOInstance(ootype=ootype.String)
+        return annmodel.SomeOOInstance(ootype=ootype.typeOf(self.instance))
 
 
 class Entry_ooparse_int(ExtRegistryEntry):
diff --git a/pypy/rpython/ootypesystem/rstr.py b/pypy/rpython/ootypesystem/rstr.py
--- a/pypy/rpython/ootypesystem/rstr.py
+++ b/pypy/rpython/ootypesystem/rstr.py
@@ -1,4 +1,5 @@
 from pypy.tool.pairtype import pairtype
+from pypy.annotation import model as annmodel
 from pypy.rlib.rarithmetic import ovfcheck
 from pypy.rpython.error import TyperError
 from pypy.rpython.rstr import AbstractStringRepr,AbstractCharRepr,\
@@ -79,6 +80,12 @@
             sb.ll_append_char(cast_primitive(Char, c))
         return sb.ll_build()
 
+    def ll_unicode(self, s):
+        if s:
+            return s
+        else:
+            return self.ll.ll_constant(u'None')
+
     def ll_encode_latin1(self, value):
         sb = ootype.new(ootype.StringBuilder)
         length = value.ll_strlen()
@@ -304,7 +311,12 @@
         return buf.ll_build()
 
     def ll_constant(s):
-        return ootype.make_string(s)
+        if isinstance(s, str):
+            return ootype.make_string(s)
+        elif isinstance(s, unicode):
+            return ootype.make_unicode(s)
+        else:
+            assert False
     ll_constant._annspecialcase_ = 'specialize:memo'
 
     def do_stringformat(cls, hop, sourcevarsrepr):
@@ -312,6 +324,7 @@
         string_repr = hop.rtyper.type_system.rstr.string_repr
         s_str = hop.args_s[0]
         assert s_str.is_constant()
+        is_unicode = isinstance(s_str, annmodel.SomeUnicodeString)
         s = s_str.const
 
         c_append = hop.inputconst(ootype.Void, 'll_append')
@@ -320,8 +333,15 @@
         c8 = hop.inputconst(ootype.Signed, 8)
         c10 = hop.inputconst(ootype.Signed, 10)
         c16 = hop.inputconst(ootype.Signed, 16)
-        c_StringBuilder = hop.inputconst(ootype.Void, ootype.StringBuilder)
-        v_buf = hop.genop("new", [c_StringBuilder], resulttype=ootype.StringBuilder)
+        if is_unicode:
+            StringBuilder = ootype.UnicodeBuilder
+            RESULT = ootype.Unicode
+        else:
+            StringBuilder = ootype.StringBuilder
+            RESULT = ootype.String
+            
+        c_StringBuilder = hop.inputconst(ootype.Void, StringBuilder)
+        v_buf = hop.genop("new", [c_StringBuilder], resulttype=StringBuilder)
 
         things = cls.parse_fmt_string(s)
         argsiter = iter(sourcevarsrepr)
@@ -331,7 +351,12 @@
                 vitem, r_arg = argsiter.next()
                 if not hasattr(r_arg, 'll_str'):
                     raise TyperError("ll_str unsupported for: %r" % r_arg)
-                if code == 's' or (code == 'r' and isinstance(r_arg, InstanceRepr)):
+                if code == 's':
+                    if is_unicode:
+                        vchunk = hop.gendirectcall(r_arg.ll_unicode, vitem)
+                    else:
+                        vchunk = hop.gendirectcall(r_arg.ll_str, vitem)
+                elif code == 'r' and isinstance(r_arg, InstanceRepr):
                     vchunk = hop.gendirectcall(r_arg.ll_str, vitem)
                 elif code == 'd':
                     assert isinstance(r_arg, IntegerRepr)
@@ -348,13 +373,19 @@
                 else:
                     raise TyperError, "%%%s is not RPython" % (code, )
             else:
-                vchunk = hop.inputconst(string_repr, thing)
-            #i = inputconst(Signed, i)
-            #hop.genop('setarrayitem', [vtemp, i, vchunk])
+                if is_unicode:
+                    vchunk = hop.inputconst(unicode_repr, thing)
+                else:
+                    vchunk = hop.inputconst(string_repr, thing)
+            if is_unicode and vchunk.concretetype != ootype.Unicode:
+                # if we are here, one of the ll_str.* functions returned some
+                # STR, so we convert it to unicode. It's a bit suboptimal
+                # because we do one extra copy.
+                vchunk = hop.gendirectcall(cls.ll_str2unicode, vchunk)
             hop.genop('oosend', [c_append, v_buf, vchunk], resulttype=ootype.Void)
 
         hop.exception_cannot_occur()   # to ignore the ZeroDivisionError of '%'
-        return hop.genop('oosend', [c_build, v_buf], resulttype=ootype.String)
+        return hop.genop('oosend', [c_build, v_buf], resulttype=RESULT)
     do_stringformat = classmethod(do_stringformat)
 
 
diff --git a/pypy/rpython/rclass.py b/pypy/rpython/rclass.py
--- a/pypy/rpython/rclass.py
+++ b/pypy/rpython/rclass.py
@@ -378,6 +378,30 @@
     def rtype_is_true(self, hop):
         raise NotImplementedError
 
+    def _emulate_call(self, hop, meth_name):
+        vinst, = hop.inputargs(self)
+        clsdef = hop.args_s[0].classdef
+        s_unbound_attr = clsdef.find_attribute(meth_name).getvalue()
+        s_attr = clsdef.lookup_filter(s_unbound_attr, meth_name,
+                                      hop.args_s[0].flags)
+        if s_attr.is_constant():
+            xxx # does that even happen?
+        if '__iter__' in self.allinstancefields:
+            raise Exception("__iter__ on instance disallowed")
+        r_method = self.rtyper.makerepr(s_attr)
+        r_method.get_method_from_instance(self, vinst, hop.llops)
+        hop2 = hop.copy()
+        hop2.spaceop.opname = 'simple_call'
+        hop2.args_r = [r_method]
+        hop2.args_s = [s_attr]
+        return hop2.dispatch()
+
+    def rtype_iter(self, hop):
+        return self._emulate_call(hop, '__iter__')
+
+    def rtype_next(self, hop):
+        return self._emulate_call(hop, 'next')
+
     def ll_str(self, i):
         raise NotImplementedError
 
diff --git a/pypy/rpython/rpbc.py b/pypy/rpython/rpbc.py
--- a/pypy/rpython/rpbc.py
+++ b/pypy/rpython/rpbc.py
@@ -11,7 +11,7 @@
         mangle, inputdesc, warning, impossible_repr
 from pypy.rpython import rclass
 from pypy.rpython import robject
-from pypy.rpython.annlowlevel import llstr
+from pypy.rpython.annlowlevel import llstr, llunicode
 
 from pypy.rpython import callparse
 
diff --git a/pypy/rpython/rstr.py b/pypy/rpython/rstr.py
--- a/pypy/rpython/rstr.py
+++ b/pypy/rpython/rstr.py
@@ -483,6 +483,8 @@
         # xxx suboptimal, maybe
         return str(unicode(ch))
 
+    def ll_unicode(self, ch):
+        return unicode(ch)
 
 class __extend__(AbstractCharRepr,
                  AbstractUniCharRepr):
diff --git a/pypy/rpython/test/test_rclass.py b/pypy/rpython/test/test_rclass.py
--- a/pypy/rpython/test/test_rclass.py
+++ b/pypy/rpython/test/test_rclass.py
@@ -1143,6 +1143,62 @@
                                       'cast_pointer': 1,
                                       'setfield': 1}
 
+    def test_iter(self):
+        class Iterable(object):
+            def __init__(self):
+                self.counter = 0
+            
+            def __iter__(self):
+                return self
+
+            def next(self):
+                if self.counter == 5:
+                    raise StopIteration
+                self.counter += 1
+                return self.counter - 1
+
+        def f():
+            i = Iterable()
+            s = 0
+            for elem in i:
+                s += elem
+            return s
+
+        assert self.interpret(f, []) == f()
+
+    def test_iter_2_kinds(self):
+        class BaseIterable(object):
+            def __init__(self):
+                self.counter = 0
+            
+            def __iter__(self):
+                return self
+
+            def next(self):
+                if self.counter >= 5:
+                    raise StopIteration
+                self.counter += self.step
+                return self.counter - 1
+        
+        class Iterable(BaseIterable):
+            step = 1
+
+        class OtherIter(BaseIterable):
+            step = 2
+
+        def f(k):
+            if k:
+                i = Iterable()
+            else:
+                i = OtherIter()
+            s = 0
+            for elem in i:
+                s += elem
+            return s
+
+        assert self.interpret(f, [True]) == f(True)
+        assert self.interpret(f, [False]) == f(False)
+
 
 class TestOOtype(BaseTestRclass, OORtypeMixin):
 
diff --git a/pypy/rpython/test/test_runicode.py b/pypy/rpython/test/test_runicode.py
--- a/pypy/rpython/test/test_runicode.py
+++ b/pypy/rpython/test/test_runicode.py
@@ -1,3 +1,4 @@
+# -*- encoding: utf-8 -*-
 
 from pypy.rpython.lltypesystem.lltype import malloc
 from pypy.rpython.lltypesystem.rstr import LLHelpers, UNICODE
@@ -194,7 +195,20 @@
         assert self.interpret(fn, [u'(']) == False
         assert self.interpret(fn, [u'\u1058']) == False
         assert self.interpret(fn, [u'X']) == True
-    
+
+    def test_strformat_unicode_arg(self):
+        const = self.const
+        def percentS(s, i):
+            s = [s, None][i]
+            return const("before %s after") % (s,)
+        #
+        res = self.interpret(percentS, [const(u'&#224;'), 0])
+        assert self.ll_to_string(res) == const(u'before &#224; after')
+        #
+        res = self.interpret(percentS, [const(u'&#224;'), 1])
+        assert self.ll_to_string(res) == const(u'before None after')
+        #
+
     def unsupported(self):
         py.test.skip("not supported")
 
@@ -202,12 +216,6 @@
     test_upper = unsupported
     test_lower = unsupported
     test_splitlines = unsupported
-    test_strformat = unsupported
-    test_strformat_instance = unsupported
-    test_strformat_nontuple = unsupported
-    test_percentformat_instance = unsupported
-    test_percentformat_tuple = unsupported
-    test_percentformat_list = unsupported
     test_int = unsupported
     test_int_valueerror = unsupported
     test_float = unsupported
diff --git a/pypy/translator/goal/richards.py b/pypy/translator/goal/richards.py
--- a/pypy/translator/goal/richards.py
+++ b/pypy/translator/goal/richards.py
@@ -343,8 +343,6 @@
 
 import time
 
-
-
 def schedule():
     t = taskWorkArea.taskList
     while t is not None:


More information about the pypy-commit mailing list