[pypy-commit] pypy cpyext-ext: merge default into branch

mattip pypy.commits at gmail.com
Wed May 11 03:15:50 EDT 2016


Author: Matti Picus <matti.picus at gmail.com>
Branch: cpyext-ext
Changeset: r84356:f5d55063ed2d
Date: 2016-05-09 21:00 +0300
http://bitbucket.org/pypy/pypy/changeset/f5d55063ed2d/

Log:	merge default into branch

diff too long, truncating to 2000 out of 24254 lines

diff --git a/dotviewer/graphserver.py b/dotviewer/graphserver.py
--- a/dotviewer/graphserver.py
+++ b/dotviewer/graphserver.py
@@ -143,6 +143,11 @@
 
 if __name__ == '__main__':
     if len(sys.argv) != 2:
+        if len(sys.argv) == 1:
+            # start locally
+            import sshgraphserver
+            sshgraphserver.ssh_graph_server(['LOCAL'])
+            sys.exit(0)
         print >> sys.stderr, __doc__
         sys.exit(2)
     if sys.argv[1] == '--stdio':
diff --git a/dotviewer/sshgraphserver.py b/dotviewer/sshgraphserver.py
--- a/dotviewer/sshgraphserver.py
+++ b/dotviewer/sshgraphserver.py
@@ -4,11 +4,14 @@
 
 Usage:
     sshgraphserver.py  hostname  [more args for ssh...]
+    sshgraphserver.py  LOCAL
 
 This logs in to 'hostname' by passing the arguments on the command-line
 to ssh.  No further configuration is required: it works for all programs
 using the dotviewer library as long as they run on 'hostname' under the
 same username as the one sshgraphserver logs as.
+
+If 'hostname' is the string 'LOCAL', then it starts locally without ssh.
 """
 
 import graphserver, socket, subprocess, random
@@ -18,12 +21,19 @@
     s1 = socket.socket()
     s1.bind(('127.0.0.1', socket.INADDR_ANY))
     localhost, localport = s1.getsockname()
-    remoteport = random.randrange(10000, 20000)
-    #  ^^^ and just hope there is no conflict
 
-    args = ['ssh', '-S', 'none', '-C', '-R%d:127.0.0.1:%d' % (remoteport, localport)]
-    args = args + sshargs + ['python -u -c "exec input()"']
-    print ' '.join(args[:-1])
+    if sshargs[0] != 'LOCAL':
+        remoteport = random.randrange(10000, 20000)
+        #  ^^^ and just hope there is no conflict
+
+        args = ['ssh', '-S', 'none', '-C', '-R%d:127.0.0.1:%d' % (
+            remoteport, localport)]
+        args = args + sshargs + ['python -u -c "exec input()"']
+    else:
+        remoteport = localport
+        args = ['python', '-u', '-c', 'exec input()']
+
+    print ' '.join(args)
     p = subprocess.Popen(args, bufsize=0,
                          stdin=subprocess.PIPE,
                          stdout=subprocess.PIPE)
diff --git a/lib-python/2.7/test/test_descr.py b/lib-python/2.7/test/test_descr.py
--- a/lib-python/2.7/test/test_descr.py
+++ b/lib-python/2.7/test/test_descr.py
@@ -1735,7 +1735,6 @@
             ("__reversed__", reversed, empty_seq, set(), {}),
             ("__length_hint__", list, zero, set(),
              {"__iter__" : iden, "next" : stop}),
-            ("__sizeof__", sys.getsizeof, zero, set(), {}),
             ("__instancecheck__", do_isinstance, return_true, set(), {}),
             ("__missing__", do_dict_missing, some_number,
              set(("__class__",)), {}),
@@ -1747,6 +1746,8 @@
             ("__format__", format, format_impl, set(), {}),
             ("__dir__", dir, empty_seq, set(), {}),
             ]
+        if test_support.check_impl_detail():
+            specials.append(("__sizeof__", sys.getsizeof, zero, set(), {}))
 
         class Checker(object):
             def __getattr__(self, attr, test=self):
@@ -1768,10 +1769,6 @@
                 raise MyException
 
         for name, runner, meth_impl, ok, env in specials:
-            if name == '__length_hint__' or name == '__sizeof__':
-                if not test_support.check_impl_detail():
-                    continue
-
             class X(Checker):
                 pass
             for attr, obj in env.iteritems():
diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py
--- a/lib_pypy/cffi/api.py
+++ b/lib_pypy/cffi/api.py
@@ -397,20 +397,7 @@
         data.  Later, when this new cdata object is garbage-collected,
         'destructor(old_cdata_object)' will be called.
         """
-        try:
-            gcp = self._backend.gcp
-        except AttributeError:
-            pass
-        else:
-            return gcp(cdata, destructor)
-        #
-        with self._lock:
-            try:
-                gc_weakrefs = self.gc_weakrefs
-            except AttributeError:
-                from .gc_weakref import GcWeakrefs
-                gc_weakrefs = self.gc_weakrefs = GcWeakrefs(self)
-            return gc_weakrefs.build(cdata, destructor)
+        return self._backend.gcp(cdata, destructor)
 
     def _get_cached_btype(self, type):
         assert self._lock.acquire(False) is False
diff --git a/lib_pypy/cffi/backend_ctypes.py b/lib_pypy/cffi/backend_ctypes.py
--- a/lib_pypy/cffi/backend_ctypes.py
+++ b/lib_pypy/cffi/backend_ctypes.py
@@ -460,6 +460,11 @@
                         return x._value
                     raise TypeError("character expected, got %s" %
                                     type(x).__name__)
+                def __nonzero__(self):
+                    return ord(self._value) != 0
+            else:
+                def __nonzero__(self):
+                    return self._value != 0
 
             if kind == 'float':
                 @staticmethod
@@ -993,6 +998,31 @@
         assert onerror is None   # XXX not implemented
         return BType(source, error)
 
+    def gcp(self, cdata, destructor):
+        BType = self.typeof(cdata)
+
+        if destructor is None:
+            if not (hasattr(BType, '_gcp_type') and
+                    BType._gcp_type is BType):
+                raise TypeError("Can remove destructor only on a object "
+                                "previously returned by ffi.gc()")
+            cdata._destructor = None
+            return None
+
+        try:
+            gcp_type = BType._gcp_type
+        except AttributeError:
+            class CTypesDataGcp(BType):
+                __slots__ = ['_orig', '_destructor']
+                def __del__(self):
+                    if self._destructor is not None:
+                        self._destructor(self._orig)
+            gcp_type = BType._gcp_type = CTypesDataGcp
+        new_cdata = self.cast(gcp_type, cdata)
+        new_cdata._orig = cdata
+        new_cdata._destructor = destructor
+        return new_cdata
+
     typeof = type
 
     def getcname(self, BType, replace_with):
diff --git a/pypy/doc/coding-guide.rst b/pypy/doc/coding-guide.rst
--- a/pypy/doc/coding-guide.rst
+++ b/pypy/doc/coding-guide.rst
@@ -266,7 +266,13 @@
 
 To raise an application-level exception::
 
-    raise OperationError(space.w_XxxError, space.wrap("message"))
+    from pypy.interpreter.error import oefmt
+
+    raise oefmt(space.w_XxxError, "message")
+
+    raise oefmt(space.w_XxxError, "file '%s' not found in '%s'", filename, dir)
+
+    raise oefmt(space.w_XxxError, "file descriptor '%d' not open", fd)
 
 To catch a specific application-level exception::
 
diff --git a/pypy/doc/discussion/finalizer-order.rst b/pypy/doc/discussion/finalizer-order.rst
--- a/pypy/doc/discussion/finalizer-order.rst
+++ b/pypy/doc/discussion/finalizer-order.rst
@@ -1,19 +1,127 @@
-.. XXX armin, what do we do with this?
+Ordering finalizers in the MiniMark GC
+======================================
 
 
-Ordering finalizers in the SemiSpace GC
-=======================================
+RPython interface
+-----------------
 
-Goal
-----
+In RPython programs like PyPy, we need a fine-grained method of
+controlling the RPython- as well as the app-level ``__del__()``.  To
+make it possible, the RPython interface is now the following one (from
+May 2016):
 
-After a collection, the SemiSpace GC should call the finalizers on
+* RPython objects can have ``__del__()``.  These are called
+  immediately by the GC when the last reference to the object goes
+  away, like in CPython.  However, the long-term goal is that all
+  ``__del__()`` methods should only contain simple enough code.  If
+  they do, we call them "destructors".  They can't use operations that
+  would resurrect the object, for example.  Use the decorator
+  ``@rgc.must_be_light_finalizer`` to ensure they are destructors.
+
+* RPython-level ``__del__()`` that are not passing the destructor test
+  are supported for backward compatibility, but deprecated.  The rest
+  of this document assumes that ``__del__()`` are all destructors.
+
+* For any more advanced usage --- in particular for any app-level
+  object with a __del__ --- we don't use the RPython-level
+  ``__del__()`` method.  Instead we use
+  ``rgc.FinalizerController.register_finalizer()``.  This allows us to
+  attach a finalizer method to the object, giving more control over
+  the ordering than just an RPython ``__del__()``.
+
+We try to consistently call ``__del__()`` a destructor, to distinguish
+it from a finalizer.  A finalizer runs earlier, and in topological
+order; care must be taken that the object might still be reachable at
+this point if we're clever enough.  A destructor on the other hand runs
+last; nothing can be done with the object any more, and the GC frees it
+immediately.
+
+
+Destructors
+-----------
+
+A destructor is an RPython ``__del__()`` method that is called directly
+by the GC when it is about to free the memory.  Intended for objects
+that just need to free an extra block of raw memory.
+
+There are restrictions on the kind of code you can put in ``__del__()``,
+including all other functions called by it.  These restrictions are
+checked.  In particular you cannot access fields containing GC objects.
+Right now you can't call any external C function either.
+
+Destructors are called precisely when the GC frees the memory of the
+object.  As long as the object exists (even in some finalizer queue or
+anywhere), its destructor is not called.
+
+
+Register_finalizer
+------------------
+
+The interface for full finalizers is made with PyPy in mind, but should
+be generally useful.
+
+The idea is that you subclass the ``rgc.FinalizerQueue`` class::
+
+* You must give a class-level attribute ``base_class``, which is the
+  base class of all instances with a finalizer.  (If you need
+  finalizers on several unrelated classes, you need several unrelated
+  ``FinalizerQueue`` subclasses.)
+
+* You override the ``finalizer_trigger()`` method; see below.
+
+Then you create one global (or space-specific) instance of this
+subclass; call it ``fin``.  At runtime, you call
+``fin.register_finalizer(obj)`` for every instance ``obj`` that needs
+a finalizer.  Each ``obj`` must be an instance of ``fin.base_class``,
+but not every such instance needs to have a finalizer registered;
+typically we try to register a finalizer on as few objects as possible
+(e.g. only if it is an object which has an app-level ``__del__()``
+method).
+
+After a major collection, the GC finds all objects ``obj`` on which a
+finalizer was registered and which are unreachable, and mark them as
+reachable again, as well as all objects they depend on.  It then picks
+a topological ordering (breaking cycles randomly, if any) and enqueues
+the objects and their registered finalizer functions in that order, in
+a queue specific to the prebuilt ``fin`` instance.  Finally, when the
+major collection is done, it calls ``fin.finalizer_trigger()``.
+
+This method ``finalizer_trigger()`` can either do some work directly,
+or delay it to be done later (e.g. between two bytecodes).  If it does
+work directly, note that it cannot (directly or indirectly) cause the
+GIL to be released.
+
+To find the queued items, call ``fin.next_dead()`` repeatedly.  It
+returns the next queued item, or ``None`` when the queue is empty.
+
+In theory, it would kind of work if you cumulate several different
+``FinalizerQueue`` instances for objects of the same class, and
+(always in theory) the same ``obj`` could be registered several times
+in the same queue, or in several queues.  This is not tested though.
+For now the untranslated emulation does not support registering the
+same object several times.
+
+Note that the Boehm garbage collector, used in ``rpython -O0``,
+completely ignores ``register_finalizer()``.
+
+
+Ordering of finalizers
+----------------------
+
+After a collection, the MiniMark GC should call the finalizers on
 *some* of the objects that have one and that have become unreachable.
 Basically, if there is a reference chain from an object a to an object b
 then it should not call the finalizer for b immediately, but just keep b
 alive and try again to call its finalizer after the next collection.
 
-This basic idea fails when there are cycles.  It's not a good idea to
+(Note that this creates rare but annoying issues as soon as the program
+creates chains of objects with finalizers more quickly than the rate at
+which major collections go (which is very slow).  In August 2013 we tried
+instead to call all finalizers of all objects found unreachable at a major
+collection.  That branch, ``gc-del``, was never merged.  It is still
+unclear what the real consequences would be on programs in the wild.)
+
+The basic idea fails in the presence of cycles.  It's not a good idea to
 keep the objects alive forever or to never call any of the finalizers.
 The model we came up with is that in this case, we could just call the
 finalizer of one of the objects in the cycle -- but only, of course, if
@@ -33,6 +141,7 @@
         detach the finalizer (so that it's not called more than once)
         call the finalizer
 
+
 Algorithm
 ---------
 
@@ -136,28 +245,8 @@
 that doesn't change the state of an object, we don't follow its children
 recursively.
 
-In practice, in the SemiSpace, Generation and Hybrid GCs, we can encode
-the 4 states with a single extra bit in the header:
-
-      =====  =============  ========  ====================
-      state  is_forwarded?  bit set?  bit set in the copy?
-      =====  =============  ========  ====================
-        0      no             no        n/a
-        1      no             yes       n/a
-        2      yes            yes       yes
-        3      yes          whatever    no
-      =====  =============  ========  ====================
-
-So the loop above that does the transition from state 1 to state 2 is
-really just a copy(x) followed by scan_copied().  We must also clear the
-bit in the copy at the end, to clean up before the next collection
-(which means recursively bumping the state from 2 to 3 in the final
-loop).
-
-In the MiniMark GC, the objects don't move (apart from when they are
-copied out of the nursery), but we use the flag GCFLAG_VISITED to mark
-objects that survive, so we can also have a single extra bit for
-finalizers:
+In practice, in the MiniMark GCs, we can encode
+the 4 states with a combination of two bits in the header:
 
       =====  ==============  ============================
       state  GCFLAG_VISITED  GCFLAG_FINALIZATION_ORDERING
@@ -167,3 +256,8 @@
         2        yes             yes
         3        yes             no
       =====  ==============  ============================
+
+So the loop above that does the transition from state 1 to state 2 is
+really just a recursive visit.  We must also clear the
+FINALIZATION_ORDERING bit at the end (state 2 to state 3) to clean up
+before the next collection.
diff --git a/pypy/doc/index-of-release-notes.rst b/pypy/doc/index-of-release-notes.rst
--- a/pypy/doc/index-of-release-notes.rst
+++ b/pypy/doc/index-of-release-notes.rst
@@ -6,6 +6,7 @@
 
 .. toctree::
 
+   release-5.1.1.rst
    release-5.1.0.rst
    release-5.0.1.rst
    release-5.0.0.rst
diff --git a/pypy/doc/tool/mydot.py b/pypy/doc/tool/mydot.py
--- a/pypy/doc/tool/mydot.py
+++ b/pypy/doc/tool/mydot.py
@@ -68,7 +68,7 @@
                       help="output format")
     options, args = parser.parse_args()
     if len(args) != 1:
-        raise ValueError, "need exactly one argument"
+        raise ValueError("need exactly one argument")
     epsfile = process_dot(py.path.local(args[0]))
     if options.format == "ps" or options.format == "eps":
         print epsfile.read()
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -61,3 +61,31 @@
 calls PyXxx", we now silently acquire/release the GIL.  Helps with
 CPython C extension modules that call some PyXxx() functions without
 holding the GIL (arguably, they are theorically buggy).
+
+.. branch: cpyext-test-A
+
+Get the cpyext tests to pass with "-A" (i.e. when tested directly with
+CPython).
+
+.. branch: oefmt
+
+.. branch: cpyext-werror
+
+Compile c snippets with -Werror in cpyext
+
+.. branch: gc-del-3
+
+Add rgc.FinalizerQueue, documented in pypy/doc/discussion/finalizer-order.rst.
+It is a more flexible way to make RPython finalizers.
+
+.. branch: unpacking-cpython-shortcut
+
+.. branch: cleanups
+
+.. branch: cpyext-more-slots
+
+.. branch: use-gc-del-3
+
+Use the new rgc.FinalizerQueue mechanism to clean up the handling of
+``__del__`` methods.  Fixes notably issue #2287.  (All RPython
+subclasses of W_Root need to use FinalizerQueue now.)
diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py
--- a/pypy/goal/targetpypystandalone.py
+++ b/pypy/goal/targetpypystandalone.py
@@ -63,7 +63,7 @@
             ##    from pypy.interpreter import main, interactive, error
             ##    con = interactive.PyPyConsole(space)
             ##    con.interact()
-            except OperationError, e:
+            except OperationError as e:
                 debug("OperationError:")
                 debug(" operror-type: " + e.w_type.getname(space))
                 debug(" operror-value: " + space.str_w(space.str(e.get_w_value(space))))
@@ -71,7 +71,7 @@
         finally:
             try:
                 space.finish()
-            except OperationError, e:
+            except OperationError as e:
                 debug("OperationError:")
                 debug(" operror-type: " + e.w_type.getname(space))
                 debug(" operror-value: " + space.str_w(space.str(e.get_w_value(space))))
@@ -115,7 +115,7 @@
                                     space.wrap('__import__'))
             space.call_function(import_, space.wrap('site'))
             return rffi.cast(rffi.INT, 0)
-        except OperationError, e:
+        except OperationError as e:
             if verbose:
                 debug("OperationError:")
                 debug(" operror-type: " + e.w_type.getname(space))
@@ -167,7 +167,7 @@
                 sys._pypy_execute_source.append(glob)
                 exec stmt in glob
             """)
-        except OperationError, e:
+        except OperationError as e:
             debug("OperationError:")
             debug(" operror-type: " + e.w_type.getname(space))
             debug(" operror-value: " + space.str_w(space.str(e.get_w_value(space))))
diff --git a/pypy/interpreter/argument.py b/pypy/interpreter/argument.py
--- a/pypy/interpreter/argument.py
+++ b/pypy/interpreter/argument.py
@@ -84,7 +84,7 @@
         space = self.space
         try:
             args_w = space.fixedview(w_stararg)
-        except OperationError, e:
+        except OperationError as e:
             if e.match(space, space.w_TypeError):
                 raise oefmt(space.w_TypeError,
                             "argument after * must be a sequence, not %T",
@@ -111,7 +111,7 @@
         else:
             try:
                 w_keys = space.call_method(w_starstararg, "keys")
-            except OperationError, e:
+            except OperationError as e:
                 if e.match(space, space.w_AttributeError):
                     raise oefmt(space.w_TypeError,
                                 "argument after ** must be a mapping, not %T",
@@ -134,11 +134,11 @@
         """The simplest argument parsing: get the 'argcount' arguments,
         or raise a real ValueError if the length is wrong."""
         if self.keywords:
-            raise ValueError, "no keyword arguments expected"
+            raise ValueError("no keyword arguments expected")
         if len(self.arguments_w) > argcount:
-            raise ValueError, "too many arguments (%d expected)" % argcount
+            raise ValueError("too many arguments (%d expected)" % argcount)
         elif len(self.arguments_w) < argcount:
-            raise ValueError, "not enough arguments (%d expected)" % argcount
+            raise ValueError("not enough arguments (%d expected)" % argcount)
         return self.arguments_w
 
     def firstarg(self):
@@ -279,7 +279,7 @@
         try:
             self._match_signature(w_firstarg,
                                   scope_w, signature, defaults_w, 0)
-        except ArgErr, e:
+        except ArgErr as e:
             raise oefmt(self.space.w_TypeError, "%s() %s", fnname, e.getmsg())
         return signature.scope_length()
 
@@ -301,7 +301,7 @@
         """
         try:
             return self._parse(w_firstarg, signature, defaults_w, blindargs)
-        except ArgErr, e:
+        except ArgErr as e:
             raise oefmt(self.space.w_TypeError, "%s() %s", fnname, e.getmsg())
 
     @staticmethod
@@ -352,11 +352,9 @@
     for w_key in keys_w:
         try:
             key = space.str_w(w_key)
-        except OperationError, e:
+        except OperationError as e:
             if e.match(space, space.w_TypeError):
-                raise OperationError(
-                    space.w_TypeError,
-                    space.wrap("keywords must be strings"))
+                raise oefmt(space.w_TypeError, "keywords must be strings")
             if e.match(space, space.w_UnicodeEncodeError):
                 # Allow this to pass through
                 key = None
diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py
--- a/pypy/interpreter/astcompiler/ast.py
+++ b/pypy/interpreter/astcompiler/ast.py
@@ -16,8 +16,8 @@
 def check_string(space, w_obj):
     if not (space.isinstance_w(w_obj, space.w_str) or
             space.isinstance_w(w_obj, space.w_unicode)):
-        raise OperationError(space.w_TypeError, space.wrap(
-                'AST string must be of type str or unicode'))
+        raise oefmt(space.w_TypeError,
+                    "AST string must be of type str or unicode")
     return w_obj
 
 def get_field(space, w_node, name, optional):
diff --git a/pypy/interpreter/astcompiler/astbuilder.py b/pypy/interpreter/astcompiler/astbuilder.py
--- a/pypy/interpreter/astcompiler/astbuilder.py
+++ b/pypy/interpreter/astcompiler/astbuilder.py
@@ -115,16 +115,16 @@
     def check_forbidden_name(self, name, node):
         try:
             misc.check_forbidden_name(name)
-        except misc.ForbiddenNameAssignment, e:
+        except misc.ForbiddenNameAssignment as e:
             self.error("cannot assign to %s" % (e.name,), node)
 
     def set_context(self, expr, ctx):
         """Set the context of an expression to Store or Del if possible."""
         try:
             expr.set_context(ctx)
-        except ast.UnacceptableExpressionContext, e:
+        except ast.UnacceptableExpressionContext as e:
             self.error_ast(e.msg, e.node)
-        except misc.ForbiddenNameAssignment, e:
+        except misc.ForbiddenNameAssignment as e:
             self.error_ast("cannot assign to %s" % (e.name,), e.node)
 
     def handle_print_stmt(self, print_node):
@@ -1080,7 +1080,7 @@
             return self.space.call_function(tp, w_num_str)
         try:
             return self.space.call_function(self.space.w_int, w_num_str, w_base)
-        except error.OperationError, e:
+        except error.OperationError as e:
             if not e.match(self.space, self.space.w_ValueError):
                 raise
             return self.space.call_function(self.space.w_float, w_num_str)
@@ -1100,7 +1100,7 @@
                 sub_strings_w = [parsestring.parsestr(space, encoding, atom_node.get_child(i).get_value(),
                                                       unicode_literals)
                                  for i in range(atom_node.num_children())]
-            except error.OperationError, e:
+            except error.OperationError as e:
                 if not e.match(space, space.w_UnicodeError):
                     raise
                 # UnicodeError in literal: turn into SyntaxError
diff --git a/pypy/interpreter/astcompiler/symtable.py b/pypy/interpreter/astcompiler/symtable.py
--- a/pypy/interpreter/astcompiler/symtable.py
+++ b/pypy/interpreter/astcompiler/symtable.py
@@ -325,7 +325,7 @@
         try:
             module.walkabout(self)
             top.finalize(None, {}, {})
-        except SyntaxError, e:
+        except SyntaxError as e:
             e.filename = compile_info.filename
             raise
         self.pop_scope()
diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py
--- a/pypy/interpreter/astcompiler/test/test_compiler.py
+++ b/pypy/interpreter/astcompiler/test/test_compiler.py
@@ -705,7 +705,7 @@
         """)
         try:
             self.simple_test(source, None, None)
-        except IndentationError, e:
+        except IndentationError as e:
             assert e.msg == 'unexpected indent'
         else:
             raise Exception("DID NOT RAISE")
@@ -717,7 +717,7 @@
         """)
         try:
             self.simple_test(source, None, None)
-        except IndentationError, e:
+        except IndentationError as e:
             assert e.msg == 'expected an indented block'
         else:
             raise Exception("DID NOT RAISE")
@@ -969,7 +969,7 @@
     def test_assert_with_tuple_arg(self):
         try:
             assert False, (3,)
-        except AssertionError, e:
+        except AssertionError as e:
             assert str(e) == "(3,)"
 
     # BUILD_LIST_FROM_ARG is PyPy specific
diff --git a/pypy/interpreter/astcompiler/tools/asdl.py b/pypy/interpreter/astcompiler/tools/asdl.py
--- a/pypy/interpreter/astcompiler/tools/asdl.py
+++ b/pypy/interpreter/astcompiler/tools/asdl.py
@@ -96,7 +96,7 @@
 
     def t_default(self, s):
         r" . +"
-        raise ValueError, "unmatched input: %s" % `s`
+        raise ValueError("unmatched input: %s" % `s`)
 
 class ASDLParser(spark.GenericParser, object):
     def __init__(self):
@@ -377,7 +377,7 @@
     tokens = scanner.tokenize(buf)
     try:
         return parser.parse(tokens)
-    except ASDLSyntaxError, err:
+    except ASDLSyntaxError as err:
         print err
         lines = buf.split("\n")
         print lines[err.lineno - 1] # lines starts at 0, files at 1
diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py
--- a/pypy/interpreter/astcompiler/tools/asdl_py.py
+++ b/pypy/interpreter/astcompiler/tools/asdl_py.py
@@ -399,8 +399,8 @@
 def check_string(space, w_obj):
     if not (space.isinstance_w(w_obj, space.w_str) or
             space.isinstance_w(w_obj, space.w_unicode)):
-        raise OperationError(space.w_TypeError, space.wrap(
-                'AST string must be of type str or unicode'))
+        raise oefmt(space.w_TypeError,
+                   "AST string must be of type str or unicode")
     return w_obj
 
 def get_field(space, w_node, name, optional):
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -11,7 +11,7 @@
     INT_MIN, INT_MAX, UINT_MAX, USHRT_MAX
 
 from pypy.interpreter.executioncontext import (ExecutionContext, ActionFlag,
-    UserDelAction)
+    make_finalizer_queue)
 from pypy.interpreter.error import OperationError, new_exception_class, oefmt
 from pypy.interpreter.argument import Arguments
 from pypy.interpreter.miscutils import ThreadLocals, make_weak_value_dictionary
@@ -28,6 +28,7 @@
     """This is the abstract root class of all wrapped objects that live
     in a 'normal' object space like StdObjSpace."""
     __slots__ = ('__weakref__',)
+    _must_be_light_finalizer_ = True
     user_overridden_class = False
 
     def getdict(self, space):
@@ -52,7 +53,7 @@
             try:
                 space.delitem(w_dict, space.wrap(attr))
                 return True
-            except OperationError, ex:
+            except OperationError as ex:
                 if not ex.match(space, space.w_KeyError):
                     raise
         return False
@@ -67,8 +68,8 @@
         return space.gettypeobject(self.typedef)
 
     def setclass(self, space, w_subtype):
-        raise OperationError(space.w_TypeError,
-                             space.wrap("__class__ assignment: only for heap types"))
+        raise oefmt(space.w_TypeError,
+                    "__class__ assignment: only for heap types")
 
     def user_setup(self, space, w_subtype):
         raise NotImplementedError("only for interp-level user subclasses "
@@ -77,7 +78,7 @@
     def getname(self, space):
         try:
             return space.str_w(space.getattr(self, space.wrap('__name__')))
-        except OperationError, e:
+        except OperationError as e:
             if e.match(space, space.w_TypeError) or e.match(space, space.w_AttributeError):
                 return '?'
             raise
@@ -136,9 +137,8 @@
         pass
 
     def clear_all_weakrefs(self):
-        """Call this at the beginning of interp-level __del__() methods
-        in subclasses.  It ensures that weakrefs (if any) are cleared
-        before the object is further destroyed.
+        """Ensures that weakrefs (if any) are cleared now.  This is
+        called by UserDelAction before the object is finalized further.
         """
         lifeline = self.getweakref()
         if lifeline is not None:
@@ -151,25 +151,37 @@
             self.delweakref()
             lifeline.clear_all_weakrefs()
 
-    __already_enqueued_for_destruction = ()
+    def _finalize_(self):
+        """The RPython-level finalizer.
 
-    def enqueue_for_destruction(self, space, callback, descrname):
-        """Put the object in the destructor queue of the space.
-        At a later, safe point in time, UserDelAction will call
-        callback(self).  If that raises OperationError, prints it
-        to stderr with the descrname string.
+        By default, it is *not called*.  See self.register_finalizer().
+        Be ready to handle the case where the object is only half
+        initialized.  Also, in some cases the object might still be
+        visible to app-level after _finalize_() is called (e.g. if
+        there is a __del__ that resurrects).
+        """
 
-        Note that 'callback' will usually need to start with:
-            assert isinstance(self, W_SpecificClass)
+    def register_finalizer(self, space):
+        """Register a finalizer for this object, so that
+        self._finalize_() will be called.  You must call this method at
+        most once.  Be ready to handle in _finalize_() the case where
+        the object is half-initialized, even if you only call
+        self.register_finalizer() at the end of the initialization.
+        This is because there are cases where the finalizer is already
+        registered before: if the user makes an app-level subclass with
+        a __del__.  (In that case only, self.register_finalizer() does
+        nothing, because the finalizer is already registered in
+        allocate_instance().)
         """
-        # this function always resurect the object, so when
-        # running on top of CPython we must manually ensure that
-        # we enqueue it only once
-        if not we_are_translated():
-            if callback in self.__already_enqueued_for_destruction:
-                return
-            self.__already_enqueued_for_destruction += (callback,)
-        space.user_del_action.register_callback(self, callback, descrname)
+        if self.user_overridden_class and self.getclass(space).hasuserdel:
+            # already registered by space.allocate_instance()
+            if not we_are_translated():
+                assert space.finalizer_queue._already_registered(self)
+        else:
+            if not we_are_translated():
+                # does not make sense if _finalize_ is not overridden
+                assert self._finalize_.im_func is not W_Root._finalize_.im_func
+            space.finalizer_queue.register_finalizer(self)
 
     # hooks that the mapdict implementations needs:
     def _get_mapdict_map(self):
@@ -318,7 +330,7 @@
         space = self.space
         try:
             return space.next(self.w_iter)
-        except OperationError, e:
+        except OperationError as e:
             if not e.match(space, space.w_StopIteration):
                 raise
             raise StopIteration
@@ -389,9 +401,9 @@
         self.interned_strings = make_weak_value_dictionary(self, str, W_Root)
         self.actionflag = ActionFlag()    # changed by the signal module
         self.check_signal_action = None   # changed by the signal module
-        self.user_del_action = UserDelAction(self)
+        make_finalizer_queue(W_Root, self)
         self._code_of_sys_exc_info = None
-        
+
         # can be overridden to a subclass
         self.initialize()
 
@@ -406,7 +418,7 @@
                                 self.sys.get('builtin_module_names')):
             try:
                 w_mod = self.getitem(w_modules, w_modname)
-            except OperationError, e:
+            except OperationError as e:
                 if e.match(self, self.w_KeyError):
                     continue
                 raise
@@ -440,7 +452,7 @@
 
         try:
             self.call_method(w_mod, "_shutdown")
-        except OperationError, e:
+        except OperationError as e:
             e.write_unraisable(self, "threading._shutdown()")
 
     def __repr__(self):
@@ -476,7 +488,7 @@
             assert reuse
             try:
                 return self.getitem(w_modules, w_name)
-            except OperationError, e:
+            except OperationError as e:
                 if not e.match(self, self.w_KeyError):
                     raise
 
@@ -706,8 +718,7 @@
         try:
             return rthread.allocate_lock()
         except rthread.error:
-            raise OperationError(self.w_RuntimeError,
-                                 self.wrap("out of resources"))
+            raise oefmt(self.w_RuntimeError, "out of resources")
 
     # Following is a friendly interface to common object space operations
     # that can be defined in term of more primitive ones.  Subclasses
@@ -764,7 +775,7 @@
     def finditem(self, w_obj, w_key):
         try:
             return self.getitem(w_obj, w_key)
-        except OperationError, e:
+        except OperationError as e:
             if e.match(self, self.w_KeyError):
                 return None
             raise
@@ -772,7 +783,7 @@
     def findattr(self, w_object, w_name):
         try:
             return self.getattr(w_object, w_name)
-        except OperationError, e:
+        except OperationError as e:
             # a PyPy extension: let SystemExit and KeyboardInterrupt go through
             if e.async(self):
                 raise
@@ -872,7 +883,7 @@
                                                   items=items)
             try:
                 w_item = self.next(w_iterator)
-            except OperationError, e:
+            except OperationError as e:
                 if not e.match(self, self.w_StopIteration):
                     raise
                 break  # done
@@ -896,13 +907,12 @@
         while True:
             try:
                 w_item = self.next(w_iterator)
-            except OperationError, e:
+            except OperationError as e:
                 if not e.match(self, self.w_StopIteration):
                     raise
                 break  # done
             if idx == expected_length:
-                raise OperationError(self.w_ValueError,
-                                    self.wrap("too many values to unpack"))
+                raise oefmt(self.w_ValueError, "too many values to unpack")
             items[idx] = w_item
             idx += 1
         if idx < expected_length:
@@ -942,7 +952,7 @@
         """
         try:
             return self.len_w(w_obj)
-        except OperationError, e:
+        except OperationError as e:
             if not (e.match(self, self.w_TypeError) or
                     e.match(self, self.w_AttributeError)):
                 raise
@@ -952,7 +962,7 @@
             return default
         try:
             w_hint = self.get_and_call_function(w_descr, w_obj)
-        except OperationError, e:
+        except OperationError as e:
             if not (e.match(self, self.w_TypeError) or
                     e.match(self, self.w_AttributeError)):
                 raise
@@ -962,8 +972,8 @@
 
         hint = self.int_w(w_hint)
         if hint < 0:
-            raise OperationError(self.w_ValueError, self.wrap(
-                    "__length_hint__() should return >= 0"))
+            raise oefmt(self.w_ValueError,
+                        "__length_hint__() should return >= 0")
         return hint
 
     def fixedview(self, w_iterable, expected_length=-1):
@@ -1049,7 +1059,7 @@
                 else:
                     return False
             return self.exception_issubclass_w(w_exc_type, w_check_class)
-        except OperationError, e:
+        except OperationError as e:
             if e.match(self, self.w_TypeError):   # string exceptions maybe
                 return False
             raise
@@ -1167,7 +1177,7 @@
                 try:
                     self.getattr(w_obj, self.wrap("__call__"))
                     return self.w_True
-                except OperationError, e:
+                except OperationError as e:
                     if not e.match(self, self.w_AttributeError):
                         raise
                     return self.w_False
@@ -1287,7 +1297,7 @@
     def _next_or_none(self, w_it):
         try:
             return self.next(w_it)
-        except OperationError, e:
+        except OperationError as e:
             if not e.match(self, self.w_StopIteration):
                 raise
             return None
@@ -1330,8 +1340,7 @@
             if start < 0:
                 start += seqlength
             if not (0 <= start < seqlength):
-                raise OperationError(self.w_IndexError,
-                                     self.wrap("index out of range"))
+                raise oefmt(self.w_IndexError, "index out of range")
             stop = 0
             step = 0
         return start, stop, step
@@ -1351,8 +1360,7 @@
             if start < 0:
                 start += seqlength
             if not (0 <= start < seqlength):
-                raise OperationError(self.w_IndexError,
-                                     self.wrap("index out of range"))
+                raise oefmt(self.w_IndexError, "index out of range")
             stop = 0
             step = 0
             length = 1
@@ -1365,7 +1373,7 @@
         """
         try:
             w_index = self.index(w_obj)
-        except OperationError, err:
+        except OperationError as err:
             if objdescr is None or not err.match(self, self.w_TypeError):
                 raise
             raise oefmt(self.w_TypeError, "%s must be an integer, not %T",
@@ -1375,7 +1383,7 @@
             # return type of __index__ is already checked by space.index(),
             # but there is no reason to allow conversions anyway
             index = self.int_w(w_index, allow_conversion=False)
-        except OperationError, err:
+        except OperationError as err:
             if not err.match(self, self.w_OverflowError):
                 raise
             if not w_exception:
@@ -1396,20 +1404,17 @@
         try:
             return bigint.tolonglong()
         except OverflowError:
-            raise OperationError(self.w_OverflowError,
-                                 self.wrap('integer too large'))
+            raise oefmt(self.w_OverflowError, "integer too large")
 
     def r_ulonglong_w(self, w_obj, allow_conversion=True):
         bigint = self.bigint_w(w_obj, allow_conversion)
         try:
             return bigint.toulonglong()
         except OverflowError:
-            raise OperationError(self.w_OverflowError,
-                                 self.wrap('integer too large'))
+            raise oefmt(self.w_OverflowError, "integer too large")
         except ValueError:
-            raise OperationError(self.w_ValueError,
-                                 self.wrap('cannot convert negative integer '
-                                           'to unsigned int'))
+            raise oefmt(self.w_ValueError,
+                        "cannot convert negative integer to unsigned int")
 
     BUF_SIMPLE   = 0x0000
     BUF_WRITABLE = 0x0001
@@ -1526,7 +1531,7 @@
         # the unicode buffer.)
         try:
             return self.str_w(w_obj)
-        except OperationError, e:
+        except OperationError as e:
             if not e.match(self, self.w_TypeError):
                 raise
         try:
@@ -1555,8 +1560,8 @@
         from rpython.rlib import rstring
         result = w_obj.str_w(self)
         if '\x00' in result:
-            raise OperationError(self.w_TypeError, self.wrap(
-                    'argument must be a string without NUL characters'))
+            raise oefmt(self.w_TypeError,
+                        "argument must be a string without NUL characters")
         return rstring.assert_str0(result)
 
     def int_w(self, w_obj, allow_conversion=True):
@@ -1596,8 +1601,7 @@
     def realstr_w(self, w_obj):
         # Like str_w, but only works if w_obj is really of type 'str'.
         if not self.isinstance_w(w_obj, self.w_str):
-            raise OperationError(self.w_TypeError,
-                                 self.wrap('argument must be a string'))
+            raise oefmt(self.w_TypeError, "argument must be a string")
         return self.str_w(w_obj)
 
     def unicode_w(self, w_obj):
@@ -1608,16 +1612,16 @@
         from rpython.rlib import rstring
         result = w_obj.unicode_w(self)
         if u'\x00' in result:
-            raise OperationError(self.w_TypeError, self.wrap(
-                    'argument must be a unicode string without NUL characters'))
+            raise oefmt(self.w_TypeError,
+                        "argument must be a unicode string without NUL "
+                        "characters")
         return rstring.assert_str0(result)
 
     def realunicode_w(self, w_obj):
         # Like unicode_w, but only works if w_obj is really of type
         # 'unicode'.
         if not self.isinstance_w(w_obj, self.w_unicode):
-            raise OperationError(self.w_TypeError,
-                                 self.wrap('argument must be a unicode'))
+            raise oefmt(self.w_TypeError, "argument must be a unicode")
         return self.unicode_w(w_obj)
 
     def bool_w(self, w_obj):
@@ -1636,8 +1640,8 @@
 
     def gateway_r_uint_w(self, w_obj):
         if self.isinstance_w(w_obj, self.w_float):
-            raise OperationError(self.w_TypeError,
-                            self.wrap("integer argument expected, got float"))
+            raise oefmt(self.w_TypeError,
+                        "integer argument expected, got float")
         return self.uint_w(self.int(w_obj))
 
     def gateway_nonnegint_w(self, w_obj):
@@ -1645,8 +1649,7 @@
         # the integer is negative.  Here for gateway.py.
         value = self.gateway_int_w(w_obj)
         if value < 0:
-            raise OperationError(self.w_ValueError,
-                                 self.wrap("expected a non-negative integer"))
+            raise oefmt(self.w_ValueError, "expected a non-negative integer")
         return value
 
     def c_int_w(self, w_obj):
@@ -1654,8 +1657,7 @@
         # the integer does not fit in 32 bits.  Here for gateway.py.
         value = self.gateway_int_w(w_obj)
         if value < INT_MIN or value > INT_MAX:
-            raise OperationError(self.w_OverflowError,
-                                 self.wrap("expected a 32-bit integer"))
+            raise oefmt(self.w_OverflowError, "expected a 32-bit integer")
         return value
 
     def c_uint_w(self, w_obj):
@@ -1663,8 +1665,8 @@
         # the integer does not fit in 32 bits.  Here for gateway.py.
         value = self.uint_w(w_obj)
         if value > UINT_MAX:
-            raise OperationError(self.w_OverflowError,
-                              self.wrap("expected an unsigned 32-bit integer"))
+            raise oefmt(self.w_OverflowError,
+                        "expected an unsigned 32-bit integer")
         return value
 
     def c_nonnegint_w(self, w_obj):
@@ -1673,11 +1675,9 @@
         # for gateway.py.
         value = self.int_w(w_obj)
         if value < 0:
-            raise OperationError(self.w_ValueError,
-                                 self.wrap("expected a non-negative integer"))
+            raise oefmt(self.w_ValueError, "expected a non-negative integer")
         if value > INT_MAX:
-            raise OperationError(self.w_OverflowError,
-                                 self.wrap("expected a 32-bit integer"))
+            raise oefmt(self.w_OverflowError, "expected a 32-bit integer")
         return value
 
     def c_short_w(self, w_obj):
@@ -1705,7 +1705,7 @@
         # instead of raising OverflowError.  For obscure cases only.
         try:
             return self.int_w(w_obj, allow_conversion)
-        except OperationError, e:
+        except OperationError as e:
             if not e.match(self, self.w_OverflowError):
                 raise
             from rpython.rlib.rarithmetic import intmask
@@ -1716,7 +1716,7 @@
         # instead of raising OverflowError.
         try:
             return self.r_longlong_w(w_obj, allow_conversion)
-        except OperationError, e:
+        except OperationError as e:
             if not e.match(self, self.w_OverflowError):
                 raise
             from rpython.rlib.rarithmetic import longlongmask
@@ -1731,22 +1731,20 @@
             not self.isinstance_w(w_fd, self.w_long)):
             try:
                 w_fileno = self.getattr(w_fd, self.wrap("fileno"))
-            except OperationError, e:
+            except OperationError as e:
                 if e.match(self, self.w_AttributeError):
-                    raise OperationError(self.w_TypeError,
-                        self.wrap("argument must be an int, or have a fileno() "
-                            "method.")
-                    )
+                    raise oefmt(self.w_TypeError,
+                                "argument must be an int, or have a fileno() "
+                                "method.")
                 raise
             w_fd = self.call_function(w_fileno)
             if (not self.isinstance_w(w_fd, self.w_int) and
                 not self.isinstance_w(w_fd, self.w_long)):
-                raise OperationError(self.w_TypeError,
-                    self.wrap("fileno() returned a non-integer")
-                )
+                raise oefmt(self.w_TypeError,
+                            "fileno() returned a non-integer")
         try:
             fd = self.c_int_w(w_fd)
-        except OperationError, e:
+        except OperationError as e:
             if e.match(self, self.w_OverflowError):
                 fd = -1
             else:
@@ -1858,7 +1856,6 @@
     ('get',             'get',       3, ['__get__']),
     ('set',             'set',       3, ['__set__']),
     ('delete',          'delete',    2, ['__delete__']),
-    ('userdel',         'del',       1, ['__del__']),
 ]
 
 ObjSpace.BuiltinModuleTable = [
diff --git a/pypy/interpreter/error.py b/pypy/interpreter/error.py
--- a/pypy/interpreter/error.py
+++ b/pypy/interpreter/error.py
@@ -214,9 +214,8 @@
             w_inst = w_type
             w_instclass = self._exception_getclass(space, w_inst)
             if not space.is_w(w_value, space.w_None):
-                raise OperationError(space.w_TypeError,
-                                     space.wrap("instance exception may not "
-                                                "have a separate value"))
+                raise oefmt(space.w_TypeError,
+                            "instance exception may not have a separate value")
             w_value = w_inst
             w_type = w_instclass
 
diff --git a/pypy/interpreter/executioncontext.py b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -2,7 +2,7 @@
 from pypy.interpreter.error import OperationError, get_cleared_operation_error
 from rpython.rlib.unroll import unrolling_iterable
 from rpython.rlib.objectmodel import specialize
-from rpython.rlib import jit
+from rpython.rlib import jit, rgc
 
 TICK_COUNTER_STEP = 100
 
@@ -141,6 +141,12 @@
             actionflag.action_dispatcher(self, frame)     # slow path
     bytecode_trace._always_inline_ = True
 
+    def _run_finalizers_now(self):
+        # Tests only: run the actions now, to ensure that the
+        # finalizable objects are really finalized.  Used notably by
+        # pypy.tool.pytest.apptest.
+        self.space.actionflag.action_dispatcher(self, None)
+
     def bytecode_only_trace(self, frame):
         """
         Like bytecode_trace() but doesn't invoke any other events besides the
@@ -515,75 +521,98 @@
     """
 
 
-class UserDelCallback(object):
-    def __init__(self, w_obj, callback, descrname):
-        self.w_obj = w_obj
-        self.callback = callback
-        self.descrname = descrname
-        self.next = None
-
 class UserDelAction(AsyncAction):
     """An action that invokes all pending app-level __del__() method.
     This is done as an action instead of immediately when the
-    interp-level __del__() is invoked, because the latter can occur more
+    WRootFinalizerQueue is triggered, because the latter can occur more
     or less anywhere in the middle of code that might not be happy with
     random app-level code mutating data structures under its feet.
     """
 
     def __init__(self, space):
         AsyncAction.__init__(self, space)
-        self.dying_objects = None
-        self.dying_objects_last = None
-        self.finalizers_lock_count = 0
-        self.enabled_at_app_level = True
-
-    def register_callback(self, w_obj, callback, descrname):
-        cb = UserDelCallback(w_obj, callback, descrname)
-        if self.dying_objects_last is None:
-            self.dying_objects = cb
-        else:
-            self.dying_objects_last.next = cb
-        self.dying_objects_last = cb
-        self.fire()
+        self.finalizers_lock_count = 0        # see pypy/module/gc
+        self.enabled_at_app_level = True      # see pypy/module/gc
+        self.pending_with_disabled_del = None
 
     def perform(self, executioncontext, frame):
-        if self.finalizers_lock_count > 0:
-            return
         self._run_finalizers()
 
+    @jit.dont_look_inside
     def _run_finalizers(self):
-        # Each call to perform() first grabs the self.dying_objects
-        # and replaces it with an empty list.  We do this to try to
-        # avoid too deep recursions of the kind of __del__ being called
-        # while in the middle of another __del__ call.
-        pending = self.dying_objects
-        self.dying_objects = None
-        self.dying_objects_last = None
+        while True:
+            w_obj = self.space.finalizer_queue.next_dead()
+            if w_obj is None:
+                break
+            self._call_finalizer(w_obj)
+
+    def gc_disabled(self, w_obj):
+        # If we're running in 'gc.disable()' mode, record w_obj in the
+        # "call me later" list and return True.  In normal mode, return
+        # False.  Use this function from some _finalize_() methods:
+        # if a _finalize_() method would call some user-defined
+        # app-level function, like a weakref callback, then first do
+        # 'if gc.disabled(self): return'.  Another attempt at
+        # calling _finalize_() will be made after 'gc.enable()'.
+        # (The exact rule for when to use gc_disabled() or not is a bit
+        # vague, but most importantly this includes all user-level
+        # __del__().)
+        pdd = self.pending_with_disabled_del
+        if pdd is None:
+            return False
+        else:
+            pdd.append(w_obj)
+            return True
+
+    def _call_finalizer(self, w_obj):
+        # Before calling the finalizers, clear the weakrefs, if any.
+        w_obj.clear_all_weakrefs()
+
+        # Look up and call the app-level __del__, if any.
         space = self.space
-        while pending is not None:
+        if w_obj.typedef is None:
+            w_del = None       # obscure case: for WeakrefLifeline
+        else:
+            w_del = space.lookup(w_obj, '__del__')
+        if w_del is not None:
+            if self.gc_disabled(w_obj):
+                return
             try:
-                pending.callback(pending.w_obj)
-            except OperationError, e:
-                e.write_unraisable(space, pending.descrname, pending.w_obj)
-                e.clear(space)   # break up reference cycles
-            pending = pending.next
-        #
-        # Note: 'dying_objects' used to be just a regular list instead
-        # of a chained list.  This was the cause of "leaks" if we have a
-        # program that constantly creates new objects with finalizers.
-        # Here is why: say 'dying_objects' is a long list, and there
-        # are n instances in it.  Then we spend some time in this
-        # function, possibly triggering more GCs, but keeping the list
-        # of length n alive.  Then the list is suddenly freed at the
-        # end, and we return to the user program.  At this point the
-        # GC limit is still very high, because just before, there was
-        # a list of length n alive.  Assume that the program continues
-        # to allocate a lot of instances with finalizers.  The high GC
-        # limit means that it could allocate a lot of instances before
-        # reaching it --- possibly more than n.  So the whole procedure
-        # repeats with higher and higher values of n.
-        #
-        # This does not occur in the current implementation because
-        # there is no list of length n: if n is large, then the GC
-        # will run several times while walking the list, but it will
-        # see lower and lower memory usage, with no lower bound of n.
+                space.get_and_call_function(w_del, w_obj)
+            except Exception as e:
+                report_error(space, e, "method __del__ of ", w_obj)
+
+        # Call the RPython-level _finalize_() method.
+        try:
+            w_obj._finalize_()
+        except Exception as e:
+            report_error(space, e, "finalizer of ", w_obj)
+
+
+def report_error(space, e, where, w_obj):
+    if isinstance(e, OperationError):
+        e.write_unraisable(space, where, w_obj)
+        e.clear(space)   # break up reference cycles
+    else:
+        addrstring = w_obj.getaddrstring(space)
+        msg = ("RPython exception %s in %s<%s at 0x%s> ignored\n" % (
+                   str(e), where, space.type(w_obj).name, addrstring))
+        space.call_method(space.sys.get('stderr'), 'write',
+                          space.wrap(msg))
+
+
+def make_finalizer_queue(W_Root, space):
+    """Make a FinalizerQueue subclass which responds to GC finalizer
+    events by 'firing' the UserDelAction class above.  It does not
+    directly fetches the objects to finalize at all; they stay in the 
+    GC-managed queue, and will only be fetched by UserDelAction
+    (between bytecodes)."""
+
+    class WRootFinalizerQueue(rgc.FinalizerQueue):
+        Class = W_Root
+
+        def finalizer_trigger(self):
+            space.user_del_action.fire()
+
+    space.user_del_action = UserDelAction(space)
+    space.finalizer_queue = WRootFinalizerQueue()
diff --git a/pypy/interpreter/function.py b/pypy/interpreter/function.py
--- a/pypy/interpreter/function.py
+++ b/pypy/interpreter/function.py
@@ -202,16 +202,15 @@
 
     def setdict(self, space, w_dict):
         if not space.isinstance_w(w_dict, space.w_dict):
-            raise OperationError(space.w_TypeError,
-                space.wrap("setting function's dictionary to a non-dict")
-            )
+            raise oefmt(space.w_TypeError,
+                        "setting function's dictionary to a non-dict")
         self.w_func_dict = w_dict
 
     def descr_function__new__(space, w_subtype, w_code, w_globals,
                               w_name=None, w_argdefs=None, w_closure=None):
         code = space.interp_w(Code, w_code)
         if not space.isinstance_w(w_globals, space.w_dict):
-            raise OperationError(space.w_TypeError, space.wrap("expected dict"))
+            raise oefmt(space.w_TypeError, "expected dict")
         if not space.is_none(w_name):
             name = space.str_w(w_name)
         else:
@@ -227,15 +226,15 @@
         if space.is_none(w_closure) and nfreevars == 0:
             closure = None
         elif not space.is_w(space.type(w_closure), space.w_tuple):
-            raise OperationError(space.w_TypeError, space.wrap("invalid closure"))
+            raise oefmt(space.w_TypeError, "invalid closure")
         else:
             from pypy.interpreter.nestedscope import Cell
             closure_w = space.unpackiterable(w_closure)
             n = len(closure_w)
             if nfreevars == 0:
-                raise OperationError(space.w_ValueError, space.wrap("no closure needed"))
+                raise oefmt(space.w_ValueError, "no closure needed")
             elif nfreevars != n:
-                raise OperationError(space.w_ValueError, space.wrap("closure is wrong size"))
+                raise oefmt(space.w_ValueError, "closure is wrong size")
             closure = [space.interp_w(Cell, w_cell) for w_cell in closure_w]
         func = space.allocate_instance(Function, w_subtype)
         Function.__init__(func, space, code, w_globals, defs_w, closure, name)
@@ -321,8 +320,8 @@
              w_func_dict, w_module) = args_w
         except ValueError:
             # wrong args
-            raise OperationError(space.w_ValueError,
-                         space.wrap("Wrong arguments to function.__setstate__"))
+            raise oefmt(space.w_ValueError,
+                        "Wrong arguments to function.__setstate__")
 
         self.space = space
         self.name = space.str_w(w_name)
@@ -359,7 +358,8 @@
             self.defs_w = []
             return
         if not space.isinstance_w(w_defaults, space.w_tuple):
-            raise OperationError(space.w_TypeError, space.wrap("func_defaults must be set to a tuple object or None"))
+            raise oefmt(space.w_TypeError,
+                        "func_defaults must be set to a tuple object or None")
         self.defs_w = space.fixedview(w_defaults)
 
     def fdel_func_defaults(self, space):
@@ -380,8 +380,8 @@
         if space.isinstance_w(w_name, space.w_str):
             self.name = space.str_w(w_name)
         else:
-            raise OperationError(space.w_TypeError,
-                space.wrap("__name__ must be set to a string object"))
+            raise oefmt(space.w_TypeError,
+                        "__name__ must be set to a string object")
 
     def fdel_func_doc(self, space):
         self.w_doc = space.w_None
@@ -406,8 +406,8 @@
     def fset_func_code(self, space, w_code):
         from pypy.interpreter.pycode import PyCode
         if not self.can_change_code:
-            raise OperationError(space.w_TypeError,
-                    space.wrap("Cannot change code attribute of builtin functions"))
+            raise oefmt(space.w_TypeError,
+                        "Cannot change code attribute of builtin functions")
         code = space.interp_w(Code, w_code)
         closure_len = 0
         if self.closure:
@@ -457,8 +457,7 @@
         if space.is_w(w_instance, space.w_None):
             w_instance = None
         if w_instance is None and space.is_none(w_class):
-            raise OperationError(space.w_TypeError,
-                                 space.wrap("unbound methods must have class"))
+            raise oefmt(space.w_TypeError, "unbound methods must have class")
         method = space.allocate_instance(Method, w_subtype)
         Method.__init__(method, space, w_function, w_instance, w_class)
         return space.wrap(method)
@@ -540,7 +539,7 @@
             try:
                 return space.call_method(space.w_object, '__getattribute__',
                                          space.wrap(self), w_attr)
-            except OperationError, e:
+            except OperationError as e:
                 if not e.match(space, space.w_AttributeError):
                     raise
         # fall-back to the attribute of the underlying 'im_func'
@@ -659,8 +658,8 @@
         self.w_module = func.w_module
 
     def descr_builtinfunction__new__(space, w_subtype):
-        raise OperationError(space.w_TypeError,
-                     space.wrap("cannot create 'builtin_function' instances"))
+        raise oefmt(space.w_TypeError,
+                    "cannot create 'builtin_function' instances")
 
     def descr_function_repr(self):
         return self.space.wrap('<built-in function %s>' % (self.name,))
diff --git a/pypy/interpreter/gateway.py b/pypy/interpreter/gateway.py
--- a/pypy/interpreter/gateway.py
+++ b/pypy/interpreter/gateway.py
@@ -21,7 +21,7 @@
 from pypy.interpreter.signature import Signature
 from pypy.interpreter.baseobjspace import (W_Root, ObjSpace, SpaceCache,
     DescrMismatch)
-from pypy.interpreter.error import OperationError
+from pypy.interpreter.error import OperationError, oefmt
 from pypy.interpreter.function import ClassMethod, FunctionWithFixedCode
 from rpython.rlib import rstackovf
 from rpython.rlib.objectmodel import we_are_translated
@@ -686,7 +686,7 @@
                                                   self.descrmismatch_op,
                                                   self.descr_reqcls,
                                                   args)
-        except Exception, e:
+        except Exception as e:
             self.handle_exception(space, e)
             w_result = None
         if w_result is None:
@@ -699,14 +699,13 @@
                 raise
             raise e
         except KeyboardInterrupt:
-            raise OperationError(space.w_KeyboardInterrupt,
-                                 space.w_None)
+            raise OperationError(space.w_KeyboardInterrupt, space.w_None)
         except MemoryError:
             raise OperationError(space.w_MemoryError, space.w_None)
-        except rstackovf.StackOverflow, e:
+        except rstackovf.StackOverflow as e:
             rstackovf.check_stack_overflow()
-            raise OperationError(space.w_RuntimeError,
-                                space.wrap("maximum recursion depth exceeded"))
+            raise oefmt(space.w_RuntimeError,
+                        "maximum recursion depth exceeded")
         except RuntimeError:   # not on top of py.py
             raise OperationError(space.w_RuntimeError, space.w_None)
 
@@ -725,7 +724,7 @@
                                                   self.descrmismatch_op,
                                                   self.descr_reqcls,
                                                   args)
-        except Exception, e:
+        except Exception as e:
             self.handle_exception(space, e)
             w_result = None
         if w_result is None:
@@ -746,7 +745,7 @@
                                                   self.descrmismatch_op,
                                                   self.descr_reqcls,
                                                   args.prepend(w_obj))
-        except Exception, e:
+        except Exception as e:
             self.handle_exception(space, e)
             w_result = None
         if w_result is None:
@@ -762,9 +761,8 @@
         try:
             w_result = self.fastfunc_0(space)
         except DescrMismatch:
-            raise OperationError(space.w_SystemError,
-                                 space.wrap("unexpected DescrMismatch error"))
-        except Exception, e:
+            raise oefmt(space.w_SystemError, "unexpected DescrMismatch error")
+        except Exception as e:
             self.handle_exception(space, e)
             w_result = None
         if w_result is None:
@@ -784,7 +782,7 @@
                                           self.descrmismatch_op,
                                           self.descr_reqcls,
                                           Arguments(space, [w1]))
-        except Exception, e:
+        except Exception as e:
             self.handle_exception(space, e)
             w_result = None
         if w_result is None:
@@ -804,7 +802,7 @@
                                           self.descrmismatch_op,
                                           self.descr_reqcls,
                                           Arguments(space, [w1, w2]))
-        except Exception, e:
+        except Exception as e:
             self.handle_exception(space, e)
             w_result = None
         if w_result is None:
@@ -824,7 +822,7 @@
                                           self.descrmismatch_op,
                                           self.descr_reqcls,
                                           Arguments(space, [w1, w2, w3]))
-        except Exception, e:
+        except Exception as e:
             self.handle_exception(space, e)
             w_result = None
         if w_result is None:
@@ -845,7 +843,7 @@
                                           self.descr_reqcls,
                                           Arguments(space,
                                                     [w1, w2, w3, w4]))
-        except Exception, e:
+        except Exception as e:
             self.handle_exception(space, e)
             w_result = None
         if w_result is None:
diff --git a/pypy/interpreter/generator.py b/pypy/interpreter/generator.py
--- a/pypy/interpreter/generator.py
+++ b/pypy/interpreter/generator.py
@@ -1,6 +1,7 @@
 from pypy.interpreter.baseobjspace import W_Root
-from pypy.interpreter.error import OperationError
+from pypy.interpreter.error import OperationError, oefmt
 from pypy.interpreter.pyopcode import LoopBlock
+from pypy.interpreter.pycode import CO_YIELD_INSIDE_TRY
 from rpython.rlib import jit
 
 
@@ -13,6 +14,8 @@
         self.frame = frame     # turned into None when frame_finished_execution
         self.pycode = frame.pycode
         self.running = False
+        if self.pycode.co_flags & CO_YIELD_INSIDE_TRY:
+            self.register_finalizer(self.space)
 
     def descr__repr__(self, space):
         if self.pycode is None:
@@ -76,8 +79,7 @@
     def _send_ex(self, w_arg, operr):
         space = self.space
         if self.running:
-            raise OperationError(space.w_ValueError,
-                                 space.wrap('generator already executing'))
+            raise oefmt(space.w_ValueError, "generator already executing")
         frame = self.frame
         if frame is None:
             # xxx a bit ad-hoc, but we don't want to go inside
@@ -89,8 +91,9 @@
         last_instr = jit.promote(frame.last_instr)
         if last_instr == -1:
             if w_arg and not space.is_w(w_arg, space.w_None):
-                msg = "can't send non-None value to a just-started generator"
-                raise OperationError(space.w_TypeError, space.wrap(msg))
+                raise oefmt(space.w_TypeError,
+                            "can't send non-None value to a just-started "
+                            "generator")
         else:
             if not w_arg:
                 w_arg = space.w_None
@@ -139,20 +142,19 @@
 
     def descr_close(self):
         """x.close(arg) -> raise GeneratorExit inside generator."""
-        assert isinstance(self, GeneratorIterator)
         space = self.space
         try:
             w_retval = self.throw(space.w_GeneratorExit, space.w_None,
                                   space.w_None)
-        except OperationError, e:
+        except OperationError as e:
             if e.match(space, space.w_StopIteration) or \
                     e.match(space, space.w_GeneratorExit):
                 return space.w_None
             raise
 
         if w_retval is not None:
-            msg = "generator ignored GeneratorExit"
-            raise OperationError(space.w_RuntimeError, space.wrap(msg))
+            raise oefmt(space.w_RuntimeError,
+                        "generator ignored GeneratorExit")
 
     def descr_gi_frame(self, space):
         if self.frame is not None and not self.frame.frame_finished_execution:
@@ -184,8 +186,7 @@
             # XXX copied and simplified version of send_ex()
             space = self.space
             if self.running:
-                raise OperationError(space.w_ValueError,
-                                     space.wrap('generator already executing'))
+                raise oefmt(space.w_ValueError, "generator already executing")
             frame = self.frame
             if frame is None:    # already finished
                 return
@@ -197,7 +198,7 @@
                                               results=results, pycode=pycode)
                     try:
                         w_result = frame.execute_frame(space.w_None)
-                    except OperationError, e:
+                    except OperationError as e:
                         if not e.match(space, space.w_StopIteration):
                             raise
                         break
@@ -213,25 +214,21 @@
     unpack_into = _create_unpack_into()
     unpack_into_w = _create_unpack_into()
 
-
-class GeneratorIteratorWithDel(GeneratorIterator):
-
-    def __del__(self):
-        # Only bother enqueuing self to raise an exception if the frame is
-        # still not finished and finally or except blocks are present.
-        self.clear_all_weakrefs()
+    def _finalize_(self):
+        # This is only called if the CO_YIELD_INSIDE_TRY flag is set
+        # on the code object.  If the frame is still not finished and
+        # finally or except blocks are present at the current
+        # position, then raise a GeneratorExit.  Otherwise, there is
+        # no point.
         if self.frame is not None:
             block = self.frame.lastblock
             while block is not None:
                 if not isinstance(block, LoopBlock):
-                    self.enqueue_for_destruction(self.space,
-                                                 GeneratorIterator.descr_close,
-                                                 "interrupting generator of ")
+                    self.descr_close()
                     break
                 block = block.previous
 
 
-
 def get_printable_location_genentry(bytecode):
     return '%s <generator>' % (bytecode.get_repr(),)
 generatorentry_driver = jit.JitDriver(greens=['pycode'],
diff --git a/pypy/interpreter/main.py b/pypy/interpreter/main.py
--- a/pypy/interpreter/main.py
+++ b/pypy/interpreter/main.py
@@ -8,7 +8,7 @@
     w_modules = space.sys.get('modules')
     try:
         return space.getitem(w_modules, w_main)
-    except OperationError, e:
+    except OperationError as e:
         if not e.match(space, space.w_KeyError):
             raise
     mainmodule = module.Module(space, w_main)
@@ -52,7 +52,7 @@
         else:
             return
 
-    except OperationError, operationerr:
+    except OperationError as operationerr:
         operationerr.record_interpreter_traceback()
         raise
 
@@ -110,7 +110,7 @@
         try:
             w_stdout = space.sys.get('stdout')
             w_softspace = space.getattr(w_stdout, space.wrap('softspace'))
-        except OperationError, e:
+        except OperationError as e:
             if not e.match(space, space.w_AttributeError):
                 raise
             # Don't crash if user defined stdout doesn't have softspace
@@ -118,7 +118,7 @@
             if space.is_true(w_softspace):
                 space.call_method(w_stdout, 'write', space.wrap('\n'))
 
-    except OperationError, operationerr:
+    except OperationError as operationerr:
         operationerr.normalize_exception(space)
         w_type = operationerr.w_type
         w_value = operationerr.get_w_value(space)
@@ -162,7 +162,7 @@
                     space.call_function(w_hook, w_type, w_value, w_traceback)
                     return False   # done
 
-        except OperationError, err2:
+        except OperationError as err2:
             # XXX should we go through sys.get('stderr') ?
             print >> sys.stderr, 'Error calling sys.excepthook:'
             err2.print_application_traceback(space)
diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py
--- a/pypy/interpreter/mixedmodule.py
+++ b/pypy/interpreter/mixedmodule.py
@@ -169,7 +169,7 @@
         while 1:
             try:
                 value = eval(spec, d)
-            except NameError, ex:
+            except NameError as ex:
                 name = ex.args[0].split("'")[1]  # super-Evil
                 if name in d:
                     raise   # propagate the NameError
diff --git a/pypy/interpreter/nestedscope.py b/pypy/interpreter/nestedscope.py
--- a/pypy/interpreter/nestedscope.py
+++ b/pypy/interpreter/nestedscope.py
@@ -1,7 +1,7 @@
 from rpython.tool.uid import uid
 
 from pypy.interpreter.baseobjspace import W_Root
-from pypy.interpreter.error import OperationError
+from pypy.interpreter.error import oefmt
 from pypy.interpreter.mixedmodule import MixedModule
 
 
@@ -78,4 +78,4 @@
         try:
             return self.get()
         except ValueError:
-            raise OperationError(space.w_ValueError, space.wrap("Cell is empty"))
+            raise oefmt(space.w_ValueError, "Cell is empty")
diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py
--- a/pypy/interpreter/pycode.py
+++ b/pypy/interpreter/pycode.py
@@ -8,7 +8,7 @@
 
 from pypy.interpreter import eval
 from pypy.interpreter.signature import Signature
-from pypy.interpreter.error import OperationError
+from pypy.interpreter.error import OperationError, oefmt
 from pypy.interpreter.gateway import unwrap_spec
 from pypy.interpreter.astcompiler.consts import (
     CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS, CO_NESTED,
@@ -110,7 +110,7 @@
         if code_hook is not None:
             try:
                 self.space.call_function(code_hook, self)
-            except OperationError, e:
+            except OperationError as e:
                 e.write_unraisable(self.space, "new_code_hook()")
 
     def _initialize(self):
@@ -374,14 +374,13 @@
                           lnotab, w_freevars=None, w_cellvars=None,
                           magic=default_magic):
         if argcount < 0:
-            raise OperationError(space.w_ValueError,
-                                 space.wrap("code: argcount must not be negative"))
+            raise oefmt(space.w_ValueError,
+                        "code: argcount must not be negative")
         if nlocals < 0:
-            raise OperationError(space.w_ValueError,
-                                 space.wrap("code: nlocals must not be negative"))
+            raise oefmt(space.w_ValueError,
+                        "code: nlocals must not be negative")
         if not space.isinstance_w(w_constants, space.w_tuple):
-            raise OperationError(space.w_TypeError,
-                                 space.wrap("Expected tuple for constants"))
+            raise oefmt(space.w_TypeError, "Expected tuple for constants")
         consts_w = space.fixedview(w_constants)
         names = unpack_str_tuple(space, w_names)
         varnames = unpack_str_tuple(space, w_varnames)
diff --git a/pypy/interpreter/pycompiler.py b/pypy/interpreter/pycompiler.py
--- a/pypy/interpreter/pycompiler.py
+++ b/pypy/interpreter/pycompiler.py
@@ -7,7 +7,7 @@
 from pypy.interpreter.pyparser import future, pyparse, error as parseerror
 from pypy.interpreter.astcompiler import (astbuilder, codegen, consts, misc,
                                           optimize, ast)
-from pypy.interpreter.error import OperationError
+from pypy.interpreter.error import OperationError, oefmt
 
 
 class AbstractCompiler(object):
@@ -55,21 +55,21 @@
         try:
             code = self.compile(source, filename, mode, flags)
             return code   # success
-        except OperationError, err:
+        except OperationError as err:
             if not err.match(space, space.w_SyntaxError):
                 raise
 
         try:
             self.compile(source + "\n", filename, mode, flags)
             return None   # expect more
-        except OperationError, err1:
+        except OperationError as err1:
             if not err1.match(space, space.w_SyntaxError):
                 raise
 
         try:
             self.compile(source + "\n\n", filename, mode, flags)
             raise     # uh? no error with \n\n.  re-raise the previous error
-        except OperationError, err2:
+        except OperationError as err2:
             if not err2.match(space, space.w_SyntaxError):
                 raise
 
@@ -116,8 +116,7 @@
         else:
             check = True
         if not check:
-            raise OperationError(self.space.w_TypeError, self.space.wrap(
-                "invalid node type"))
+            raise oefmt(self.space.w_TypeError, "invalid node type")
 
         fut = misc.parse_future(node, self.future_flags.compiler_features)
         f_flags, f_lineno, f_col = fut
@@ -131,9 +130,8 @@
         try:
             mod = optimize.optimize_ast(space, node, info)
             code = codegen.compile_ast(space, mod, info)
-        except parseerror.SyntaxError, e:
-            raise OperationError(space.w_SyntaxError,
-                                 e.wrap_info(space))
+        except parseerror.SyntaxError as e:
+            raise OperationError(space.w_SyntaxError, e.wrap_info(space))
         return code
 
     def compile_to_ast(self, source, filename, mode, flags):
@@ -145,12 +143,10 @@
         try:
             parse_tree = self.parser.parse_source(source, info)
             mod = astbuilder.ast_from_node(space, parse_tree, info)
-        except parseerror.IndentationError, e:
-            raise OperationError(space.w_IndentationError,
-                                 e.wrap_info(space))
-        except parseerror.SyntaxError, e:
-            raise OperationError(space.w_SyntaxError,
-                                 e.wrap_info(space))
+        except parseerror.IndentationError as e:
+            raise OperationError(space.w_IndentationError, e.wrap_info(space))
+        except parseerror.SyntaxError as e:
+            raise OperationError(space.w_SyntaxError, e.wrap_info(space))
         return mod
 
     def compile(self, source, filename, mode, flags, hidden_applevel=False):
diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py
--- a/pypy/interpreter/pyframe.py
+++ b/pypy/interpreter/pyframe.py
@@ -220,9 +220,9 @@
                 return            # no cells needed - fast path
         elif outer_func is None:
             space = self.space
-            raise OperationError(space.w_TypeError,
-                                 space.wrap("directly executed code object "
-                                            "may not contain free variables"))
+            raise oefmt(space.w_TypeError,
+                        "directly executed code object may not contain free "
+                        "variables")
         if outer_func and outer_func.closure:
             closure_size = len(outer_func.closure)
         else:
@@ -241,12 +241,8 @@
     def run(self):
         """Start this frame's execution."""
         if self.getcode().co_flags & pycode.CO_GENERATOR:
-            if self.getcode().co_flags & pycode.CO_YIELD_INSIDE_TRY:
-                from pypy.interpreter.generator import GeneratorIteratorWithDel
-                return self.space.wrap(GeneratorIteratorWithDel(self))
-            else:
-                from pypy.interpreter.generator import GeneratorIterator
-                return self.space.wrap(GeneratorIterator(self))
+            from pypy.interpreter.generator import GeneratorIterator
+            return self.space.wrap(GeneratorIterator(self))
         else:
             return self.execute_frame()
 
@@ -513,7 +509,7 @@
         self.locals_cells_stack_w = values_w[:]
         valuestackdepth = space.int_w(w_stackdepth)
         if not self._check_stack_index(valuestackdepth):
-            raise OperationError(space.w_ValueError, space.wrap("invalid stackdepth"))
+            raise oefmt(space.w_ValueError, "invalid stackdepth")
         assert valuestackdepth >= 0
         self.valuestackdepth = valuestackdepth
         if space.is_w(w_exc_value, space.w_None):
@@ -550,7 +546,7 @@
         where the order is according to self.pycode.signature()."""
         scope_len = len(scope_w)
         if scope_len > self.pycode.co_nlocals:
-            raise ValueError, "new fastscope is longer than the allocated area"
+            raise ValueError("new fastscope is longer than the allocated area")
         # don't assign directly to 'locals_cells_stack_w[:scope_len]' to be
         # virtualizable-friendly
         for i in range(scope_len):
@@ -686,12 +682,11 @@
         try:
             new_lineno = space.int_w(w_new_lineno)
         except OperationError:
-            raise OperationError(space.w_ValueError,
-                                 space.wrap("lineno must be an integer"))
+            raise oefmt(space.w_ValueError, "lineno must be an integer")
 
         if self.get_w_f_trace() is None:
-            raise OperationError(space.w_ValueError,
-                  space.wrap("f_lineno can only be set by a trace function."))
+            raise oefmt(space.w_ValueError,
+                        "f_lineno can only be set by a trace function.")
 
         line = self.pycode.co_firstlineno
         if new_lineno < line:
@@ -718,8 +713,8 @@
         # Don't jump to a line with an except in it.
         code = self.pycode.co_code
         if ord(code[new_lasti]) in (DUP_TOP, POP_TOP):
-            raise OperationError(space.w_ValueError,
-                  space.wrap("can't jump to 'except' line as there's no exception"))
+            raise oefmt(space.w_ValueError,
+                        "can't jump to 'except' line as there's no exception")
 
         # Don't jump into or out of a finally block.
         f_lasti_setup_addr = -1
@@ -800,8 +795,8 @@
             new_iblock = f_iblock - delta_iblock
 
         if new_iblock > min_iblock:
-            raise OperationError(space.w_ValueError,
-                                 space.wrap("can't jump into the middle of a block"))
+            raise oefmt(space.w_ValueError,
+                        "can't jump into the middle of a block")
 
         while f_iblock > new_iblock:
             block = self.pop_block()
diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py
--- a/pypy/interpreter/pyopcode.py
+++ b/pypy/interpreter/pyopcode.py
@@ -67,9 +67,9 @@
     def handle_bytecode(self, co_code, next_instr, ec):
         try:
             next_instr = self.dispatch_bytecode(co_code, next_instr, ec)
-        except OperationError, operr:
+        except OperationError as operr:
             next_instr = self.handle_operation_error(ec, operr)
-        except RaiseWithExplicitTraceback, e:
+        except RaiseWithExplicitTraceback as e:
             next_instr = self.handle_operation_error(ec, e.operr,
                                                      attach_tb=False)
         except KeyboardInterrupt:
@@ -78,7 +78,7 @@
         except MemoryError:
             next_instr = self.handle_asynchronous_error(ec,
                 self.space.w_MemoryError)
-        except rstackovf.StackOverflow, e:
+        except rstackovf.StackOverflow as e:
             # Note that this case catches AttributeError!
             rstackovf.check_stack_overflow()


More information about the pypy-commit mailing list