[pypy-commit] pypy optresult-unroll: merge default

fijal noreply at buildbot.pypy.org
Fri Aug 28 16:06:53 CEST 2015


Author: Maciej Fijalkowski <fijall at gmail.com>
Branch: optresult-unroll
Changeset: r79279:a25583961987
Date: 2015-08-28 16:02 +0200
http://bitbucket.org/pypy/pypy/changeset/a25583961987/

Log:	merge default

diff too long, truncating to 2000 out of 17941 lines

diff --git a/LICENSE b/LICENSE
--- a/LICENSE
+++ b/LICENSE
@@ -352,8 +352,7 @@
 Except when otherwise stated (look for LICENSE files or copyright/license
 information at the beginning of each file) the files in the 'lib-python/2.7'
 directory are all copyrighted by the Python Software Foundation and licensed
-under the Python Software License of which you can find a copy here:
-http://www.python.org/doc/Copyright.html 
+under the terms that you can find here: https://docs.python.org/2/license.html
 
 License for 'pypy/module/unicodedata/'
 ======================================
@@ -435,4 +434,4 @@
 
 The code is based on gperftools. You may see a copy of the License for it at
 
-    https://code.google.com/p/gperftools/source/browse/COPYING
+    https://github.com/gperftools/gperftools/blob/master/COPYING
diff --git a/_pytest/assertion/rewrite.py b/_pytest/assertion/rewrite.py
--- a/_pytest/assertion/rewrite.py
+++ b/_pytest/assertion/rewrite.py
@@ -308,7 +308,10 @@
         if (len(data) != 8 or data[:4] != imp.get_magic() or
                 struct.unpack("<l", data[4:])[0] != mtime):
             return None
-        co = marshal.load(fp)
+        try:
+            co = marshal.load(fp)
+        except ValueError:
+            return None # e.g. bad marshal data because of pypy/cpython mix
         if not isinstance(co, types.CodeType):
             # That's interesting....
             return None
diff --git a/lib-python/2.7/test/test_urllib2.py b/lib-python/2.7/test/test_urllib2.py
--- a/lib-python/2.7/test/test_urllib2.py
+++ b/lib-python/2.7/test/test_urllib2.py
@@ -291,6 +291,7 @@
         self.req_headers = []
         self.data = None
         self.raise_on_endheaders = False
+        self.sock = None
         self._tunnel_headers = {}
 
     def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
diff --git a/lib-python/2.7/urllib2.py b/lib-python/2.7/urllib2.py
--- a/lib-python/2.7/urllib2.py
+++ b/lib-python/2.7/urllib2.py
@@ -1200,6 +1200,12 @@
                 r = h.getresponse(buffering=True)
             except TypeError: # buffering kw not supported
                 r = h.getresponse()
+            # If the server does not send us a 'Connection: close' header,
+            # HTTPConnection assumes the socket should be left open. Manually
+            # mark the socket to be closed when this response object goes away.
+            if h.sock:
+                h.sock.close()
+                h.sock = None
 
         # Pick apart the HTTPResponse object to get the addinfourl
         # object initialized properly.
diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py
--- a/lib_pypy/_sqlite3.py
+++ b/lib_pypy/_sqlite3.py
@@ -345,7 +345,10 @@
 
     def _finalize_raw_statement(self, _statement):
         if self.__rawstatements is not None:
-            self.__rawstatements.remove(_statement)
+            try:
+                self.__rawstatements.remove(_statement)
+            except KeyError:
+                return    # rare case: already finalized, see issue #2097
             _lib.sqlite3_finalize(_statement)
 
     def __do_all_statements(self, action, reset_cursors):
diff --git a/lib_pypy/_tkinter/tclobj.py b/lib_pypy/_tkinter/tclobj.py
--- a/lib_pypy/_tkinter/tclobj.py
+++ b/lib_pypy/_tkinter/tclobj.py
@@ -108,6 +108,8 @@
         return value.internalRep.doubleValue
     if value.typePtr == typeCache.IntType:
         return value.internalRep.longValue
+    if value.typePtr == typeCache.WideIntType:
+        return FromWideIntObj(app, value)
     if value.typePtr == typeCache.BigNumType and tklib.HAVE_LIBTOMMATH:
         return FromBignumObj(app, value)
     if value.typePtr == typeCache.ListType:
diff --git a/lib_pypy/_tkinter/tklib_build.py b/lib_pypy/_tkinter/tklib_build.py
--- a/lib_pypy/_tkinter/tklib_build.py
+++ b/lib_pypy/_tkinter/tklib_build.py
@@ -179,6 +179,7 @@
 typedef int... Tcl_WideInt;
 
 int Tcl_GetWideIntFromObj(Tcl_Interp *interp, Tcl_Obj *obj, Tcl_WideInt *value);
+Tcl_Obj *Tcl_NewWideIntObj(Tcl_WideInt value);
 """)
 
 if HAVE_LIBTOMMATH:
diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO
--- a/lib_pypy/cffi.egg-info/PKG-INFO
+++ b/lib_pypy/cffi.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: cffi
-Version: 1.1.2
+Version: 1.2.1
 Summary: Foreign Function Interface for Python calling C code.
 Home-page: http://cffi.readthedocs.org
 Author: Armin Rigo, Maciej Fijalkowski
diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py
--- a/lib_pypy/cffi/__init__.py
+++ b/lib_pypy/cffi/__init__.py
@@ -4,8 +4,8 @@
 from .api import FFI, CDefError, FFIError
 from .ffiplatform import VerificationError, VerificationMissing
 
-__version__ = "1.1.2"
-__version_info__ = (1, 1, 2)
+__version__ = "1.2.1"
+__version_info__ = (1, 2, 1)
 
 # The verifier module file names are based on the CRC32 of a string that
 # contains the following version number.  It may be older than __version__
diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h
--- a/lib_pypy/cffi/_cffi_include.h
+++ b/lib_pypy/cffi/_cffi_include.h
@@ -46,7 +46,7 @@
 # endif
 #else
 # include <stdint.h>
-# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
+# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
 #  include <alloca.h>
 # endif
 #endif
diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py
--- a/lib_pypy/cffi/api.py
+++ b/lib_pypy/cffi/api.py
@@ -236,6 +236,30 @@
             cdecl = self._typeof(cdecl)
         return self._backend.newp(cdecl, init)
 
+    def new_allocator(self, alloc=None, free=None,
+                      should_clear_after_alloc=True):
+        """Return a new allocator, i.e. a function that behaves like ffi.new()
+        but uses the provided low-level 'alloc' and 'free' functions.
+
+        'alloc' is called with the size as argument.  If it returns NULL, a
+        MemoryError is raised.  'free' is called with the result of 'alloc'
+        as argument.  Both can be either Python function or directly C
+        functions.  If 'free' is None, then no free function is called.
+        If both 'alloc' and 'free' are None, the default is used.
+
+        If 'should_clear_after_alloc' is set to False, then the memory
+        returned by 'alloc' is assumed to be already cleared (or you are
+        fine with garbage); otherwise CFFI will clear it.
+        """
+        compiled_ffi = self._backend.FFI()
+        allocator = compiled_ffi.new_allocator(alloc, free,
+                                               should_clear_after_alloc)
+        def allocate(cdecl, init=None):
+            if isinstance(cdecl, basestring):
+                cdecl = self._typeof(cdecl)
+            return allocator(cdecl, init)
+        return allocate
+
     def cast(self, cdecl, source):
         """Similar to a C cast: returns an instance of the named C
         type initialized with the given 'source'.  The source is
@@ -286,7 +310,7 @@
         """
         return self._backend.from_buffer(self.BCharA, python_buffer)
 
-    def callback(self, cdecl, python_callable=None, error=None):
+    def callback(self, cdecl, python_callable=None, error=None, onerror=None):
         """Return a callback object or a decorator making such a
         callback object.  'cdecl' must name a C function pointer type.
         The callback invokes the specified 'python_callable' (which may
@@ -298,7 +322,8 @@
             if not callable(python_callable):
                 raise TypeError("the 'python_callable' argument "
                                 "is not callable")
-            return self._backend.callback(cdecl, python_callable, error)
+            return self._backend.callback(cdecl, python_callable,
+                                          error, onerror)
         if isinstance(cdecl, basestring):
             cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
         if python_callable is None:
@@ -327,6 +352,13 @@
         data.  Later, when this new cdata object is garbage-collected,
         'destructor(old_cdata_object)' will be called.
         """
+        try:
+            gcp = self._backend.gcp
+        except AttributeError:
+            pass
+        else:
+            return gcp(cdata, destructor)
+        #
         with self._lock:
             try:
                 gc_weakrefs = self.gc_weakrefs
@@ -428,6 +460,8 @@
             raise TypeError("ffi.include() expects an argument that is also of"
                             " type cffi.FFI, not %r" % (
                                 type(ffi_to_include).__name__,))
+        if ffi_to_include is self:
+            raise ValueError("self.include(self)")
         with ffi_to_include._lock:
             with self._lock:
                 self._parser.include(ffi_to_include._parser)
diff --git a/lib_pypy/cffi/backend_ctypes.py b/lib_pypy/cffi/backend_ctypes.py
--- a/lib_pypy/cffi/backend_ctypes.py
+++ b/lib_pypy/cffi/backend_ctypes.py
@@ -989,7 +989,8 @@
     def cast(self, BType, source):
         return BType._cast_from(source)
 
-    def callback(self, BType, source, error):
+    def callback(self, BType, source, error, onerror):
+        assert onerror is None   # XXX not implemented
         return BType(source, error)
 
     typeof = type
diff --git a/lib_pypy/cffi/cffi_opcode.py b/lib_pypy/cffi/cffi_opcode.py
--- a/lib_pypy/cffi/cffi_opcode.py
+++ b/lib_pypy/cffi/cffi_opcode.py
@@ -53,6 +53,7 @@
 OP_GLOBAL_VAR      = 33
 OP_DLOPEN_FUNC     = 35
 OP_DLOPEN_CONST    = 37
+OP_GLOBAL_VAR_F    = 39
 
 PRIM_VOID          = 0
 PRIM_BOOL          = 1
diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py
--- a/lib_pypy/cffi/cparser.py
+++ b/lib_pypy/cffi/cparser.py
@@ -15,9 +15,11 @@
 except ImportError:
     lock = None
 
-_r_comment = re.compile(r"/\*.*?\*/|//.*?$", re.DOTALL | re.MULTILINE)
-_r_define  = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)\s+(.*?)$",
-                        re.MULTILINE)
+_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
+                        re.DOTALL | re.MULTILINE)
+_r_define  = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
+                        r"\b((?:[^\n\\]|\\.)*?)$",
+                        re.DOTALL | re.MULTILINE)
 _r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
 _r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
 _r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
@@ -39,6 +41,7 @@
     macros = {}
     for match in _r_define.finditer(csource):
         macroname, macrovalue = match.groups()
+        macrovalue = macrovalue.replace('\\\n', '').strip()
         macros[macroname] = macrovalue
     csource = _r_define.sub('', csource)
     # Replace "[...]" with "[__dotdotdotarray__]"
@@ -423,13 +426,10 @@
                 raise api.CDefError(
                     "%s: a function with only '(...)' as argument"
                     " is not correct C" % (funcname or 'in expression'))
-        elif (len(params) == 1 and
-            isinstance(params[0].type, pycparser.c_ast.TypeDecl) and
-            isinstance(params[0].type.type, pycparser.c_ast.IdentifierType)
-                and list(params[0].type.type.names) == ['void']):
-            del params[0]
         args = [self._as_func_arg(self._get_type(argdeclnode.type))
                 for argdeclnode in params]
+        if not ellipsis and args == [model.void_type]:
+            args = []
         result = self._get_type(typenode.type)
         return model.RawFunctionType(tuple(args), result, ellipsis)
 
@@ -633,6 +633,8 @@
 
     def include(self, other):
         for name, tp in other._declarations.items():
+            if name.startswith('anonymous $enum_$'):
+                continue   # fix for test_anonymous_enum_include
             kind = name.split(' ', 1)[0]
             if kind in ('struct', 'union', 'enum', 'anonymous'):
                 self._declare(name, tp, included=True)
diff --git a/lib_pypy/cffi/model.py b/lib_pypy/cffi/model.py
--- a/lib_pypy/cffi/model.py
+++ b/lib_pypy/cffi/model.py
@@ -35,9 +35,6 @@
     def is_integer_type(self):
         return False
 
-    def sizeof_enabled(self):
-        return False
-
     def get_cached_btype(self, ffi, finishlist, can_delay=False):
         try:
             BType = ffi._cached_btypes[self]
@@ -80,8 +77,7 @@
 
 
 class BasePrimitiveType(BaseType):
-    def sizeof_enabled(self):
-        return True
+    pass
 
 
 class PrimitiveType(BasePrimitiveType):
@@ -205,9 +201,6 @@
 class FunctionPtrType(BaseFunctionType):
     _base_pattern = '(*&)(%s)'
 
-    def sizeof_enabled(self):
-        return True
-
     def build_backend_type(self, ffi, finishlist):
         result = self.result.get_cached_btype(ffi, finishlist)
         args = []
@@ -233,9 +226,6 @@
             extra = self._base_pattern
         self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
 
-    def sizeof_enabled(self):
-        return True
-
     def build_backend_type(self, ffi, finishlist):
         BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
         return global_cache(self, ffi, 'new_pointer_type', BItem)
@@ -276,9 +266,6 @@
         self.c_name_with_marker = (
             self.item.c_name_with_marker.replace('&', brackets))
 
-    def sizeof_enabled(self):
-        return self.item.sizeof_enabled() and self.length is not None
-
     def resolve_length(self, newlength):
         return ArrayType(self.item, newlength)
 
@@ -433,9 +420,6 @@
             from . import ffiplatform
             raise ffiplatform.VerificationMissing(self._get_c_name())
 
-    def sizeof_enabled(self):
-        return self.fldtypes is not None
-
     def build_backend_type(self, ffi, finishlist):
         self.check_not_partial()
         finishlist.append(self)
@@ -464,9 +448,6 @@
         self.baseinttype = baseinttype
         self.build_c_name_with_marker()
 
-    def sizeof_enabled(self):
-        return True     # not strictly true, but external enums are obscure
-
     def force_the_name(self, forcename):
         StructOrUnionOrEnum.force_the_name(self, forcename)
         if self.forcename is None:
diff --git a/lib_pypy/cffi/parse_c_type.h b/lib_pypy/cffi/parse_c_type.h
--- a/lib_pypy/cffi/parse_c_type.h
+++ b/lib_pypy/cffi/parse_c_type.h
@@ -26,6 +26,7 @@
 #define _CFFI_OP_GLOBAL_VAR     33
 #define _CFFI_OP_DLOPEN_FUNC    35
 #define _CFFI_OP_DLOPEN_CONST   37
+#define _CFFI_OP_GLOBAL_VAR_F   39
 
 #define _CFFI_PRIM_VOID          0
 #define _CFFI_PRIM_BOOL          1
diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py
--- a/lib_pypy/cffi/recompiler.py
+++ b/lib_pypy/cffi/recompiler.py
@@ -4,11 +4,6 @@
 
 VERSION = "0x2601"
 
-try:
-    int_type = (int, long)
-except NameError:    # Python 3
-    int_type = int
-
 
 class GlobalExpr:
     def __init__(self, name, address, type_op, size=0, check_value=0):
@@ -981,10 +976,6 @@
         if not self.target_is_python and tp.is_integer_type():
             type_op = CffiOp(OP_CONSTANT_INT, -1)
         else:
-            if not tp.sizeof_enabled():
-                raise ffiplatform.VerificationError(
-                    "constant '%s' is of type '%s', whose size is not known"
-                    % (name, tp._get_c_name()))
             if self.target_is_python:
                 const_kind = OP_DLOPEN_CONST
             else:
@@ -1069,18 +1060,36 @@
         self._do_collect_type(self._global_type(tp, name))
 
     def _generate_cpy_variable_decl(self, tp, name):
-        pass
+        prnt = self._prnt
+        tp = self._global_type(tp, name)
+        if isinstance(tp, model.ArrayType) and tp.length is None:
+            tp = tp.item
+            ampersand = ''
+        else:
+            ampersand = '&'
+        # This code assumes that casts from "tp *" to "void *" is a
+        # no-op, i.e. a function that returns a "tp *" can be called
+        # as if it returned a "void *".  This should be generally true
+        # on any modern machine.  The only exception to that rule (on
+        # uncommon architectures, and as far as I can tell) might be
+        # if 'tp' were a function type, but that is not possible here.
+        # (If 'tp' is a function _pointer_ type, then casts from "fn_t
+        # **" to "void *" are again no-ops, as far as I can tell.)
+        prnt('static ' + tp.get_c_name('*_cffi_var_%s(void)' % (name,)))
+        prnt('{')
+        prnt('  return %s(%s);' % (ampersand, name))
+        prnt('}')
+        prnt()
 
     def _generate_cpy_variable_ctx(self, tp, name):
         tp = self._global_type(tp, name)
         type_index = self._typesdict[tp]
-        type_op = CffiOp(OP_GLOBAL_VAR, type_index)
-        if tp.sizeof_enabled():
-            size = "sizeof(%s)" % (name,)
+        if self.target_is_python:
+            op = OP_GLOBAL_VAR
         else:
-            size = 0
+            op = OP_GLOBAL_VAR_F
         self._lsts["global"].append(
-            GlobalExpr(name, '&%s' % name, type_op, size))
+            GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index)))
 
     # ----------
     # emitting the opcodes for individual types
diff --git a/lib_pypy/cffi/setuptools_ext.py b/lib_pypy/cffi/setuptools_ext.py
--- a/lib_pypy/cffi/setuptools_ext.py
+++ b/lib_pypy/cffi/setuptools_ext.py
@@ -81,10 +81,16 @@
     allsources.extend(kwds.pop('sources', []))
     ext = Extension(name=module_name, sources=allsources, **kwds)
 
-    def make_mod(tmpdir):
+    def make_mod(tmpdir, pre_run=None):
         c_file = os.path.join(tmpdir, module_name + source_extension)
         log.info("generating cffi module %r" % c_file)
         mkpath(tmpdir)
+        # a setuptools-only, API-only hook: called with the "ext" and "ffi"
+        # arguments just before we turn the ffi into C code.  To use it,
+        # subclass the 'distutils.command.build_ext.build_ext' class and
+        # add a method 'def pre_run(self, ext, ffi)'.
+        if pre_run is not None:
+            pre_run(ext, ffi)
         updated = recompiler.make_c_source(ffi, module_name, source, c_file)
         if not updated:
             log.info("already up-to-date")
@@ -98,7 +104,8 @@
     class build_ext_make_mod(base_class):
         def run(self):
             if ext.sources[0] == '$PLACEHOLDER':
-                ext.sources[0] = make_mod(self.build_temp)
+                pre_run = getattr(self, 'pre_run', None)
+                ext.sources[0] = make_mod(self.build_temp, pre_run)
             base_class.run(self)
     dist.cmdclass['build_ext'] = build_ext_make_mod
     # NB. multiple runs here will create multiple 'build_ext_make_mod'
diff --git a/lib_pypy/ctypes_support.py b/lib_pypy/ctypes_support.py
--- a/lib_pypy/ctypes_support.py
+++ b/lib_pypy/ctypes_support.py
@@ -28,7 +28,7 @@
     def _where_is_errno():
         return standard_c_lib.__errno_location()
 
-elif sys.platform in ('darwin', 'freebsd7', 'freebsd8', 'freebsd9'):
+elif sys.platform == 'darwin' or sys.platform.startswith('freebsd'):
     standard_c_lib.__error.restype = ctypes.POINTER(ctypes.c_int)
     standard_c_lib.__error.argtypes = None
     def _where_is_errno():
diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
--- a/pypy/config/pypyoption.py
+++ b/pypy/config/pypyoption.py
@@ -23,14 +23,14 @@
 default_modules.update([
     "_codecs", "gc", "_weakref", "marshal", "errno", "imp", "math", "cmath",
     "_sre", "_pickle_support", "operator", "parser", "symbol", "token", "_ast",
-    "_io", "_random", "__pypy__", "_testing"
+    "_io", "_random", "__pypy__", "_testing", "time"
 ])
 
 
 # --allworkingmodules
 working_modules = default_modules.copy()
 working_modules.update([
-    "_socket", "unicodedata", "mmap", "fcntl", "_locale", "pwd", "time" ,
+    "_socket", "unicodedata", "mmap", "fcntl", "_locale", "pwd",
     "select", "zipimport", "_lsprof", "crypt", "signal", "_rawffi", "termios",
     "zlib", "bz2", "struct", "_hashlib", "_md5", "_sha", "_minimal_curses",
     "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array",
diff --git a/pypy/doc/cpython_differences.rst b/pypy/doc/cpython_differences.rst
--- a/pypy/doc/cpython_differences.rst
+++ b/pypy/doc/cpython_differences.rst
@@ -135,7 +135,7 @@
 Here are some more technical details.  This issue affects the precise
 time at which ``__del__`` methods are called, which
 is not reliable in PyPy (nor Jython nor IronPython).  It also means that
-weak references may stay alive for a bit longer than expected.  This
+**weak references** may stay alive for a bit longer than expected.  This
 makes "weak proxies" (as returned by ``weakref.proxy()``) somewhat less
 useful: they will appear to stay alive for a bit longer in PyPy, and
 suddenly they will really be dead, raising a ``ReferenceError`` on the
@@ -143,6 +143,24 @@
 ``ReferenceError`` at any place that uses them.  (Or, better yet, don't use
 ``weakref.proxy()`` at all; use ``weakref.ref()``.)
 
+Note a detail in the `documentation for weakref callbacks`__:
+
+    If callback is provided and not None, *and the returned weakref
+    object is still alive,* the callback will be called when the object
+    is about to be finalized.
+
+There are cases where, due to CPython's refcount semantics, a weakref
+dies immediately before or after the objects it points to (typically
+with some circular reference).  If it happens to die just after, then
+the callback will be invoked.  In a similar case in PyPy, both the
+object and the weakref will be considered as dead at the same time,
+and the callback will not be invoked.  (Issue `#2030`__)
+
+.. __: https://docs.python.org/2/library/weakref.html
+.. __: https://bitbucket.org/pypy/pypy/issue/2030/
+
+---------------------------------
+
 There are a few extra implications from the difference in the GC.  Most
 notably, if an object has a ``__del__``, the ``__del__`` is never called more
 than once in PyPy; but CPython will call the same ``__del__`` several times
@@ -321,9 +339,8 @@
 Miscellaneous
 -------------
 
-* Hash randomization (``-R``) is ignored in PyPy.  As documented in
-  http://bugs.python.org/issue14621, some of us believe it has no
-  purpose in CPython either.
+* Hash randomization (``-R``) `is ignored in PyPy`_.  In CPython
+  before 3.4 it has `little point`_.
 
 * You can't store non-string keys in type objects.  For example::
 
@@ -338,7 +355,8 @@
   for about 1400 calls.
 
 * since the implementation of dictionary is different, the exact number
-  which ``__hash__`` and ``__eq__`` are called is different. Since CPython
+  of times that ``__hash__`` and ``__eq__`` are called is different. 
+  Since CPython
   does not give any specific guarantees either, don't rely on it.
 
 * assignment to ``__class__`` is limited to the cases where it
@@ -395,3 +413,12 @@
   interactive mode. In a released version, this behaviour is suppressed, but
   setting the environment variable PYPY_IRC_TOPIC will bring it back. Note that
   downstream package providers have been known to totally disable this feature.
+
+* PyPy's readline module was rewritten from scratch: it is not GNU's
+  readline.  It should be mostly compatible, and it adds multiline
+  support (see ``multiline_input()``).  On the other hand,
+  ``parse_and_bind()`` calls are ignored (issue `#2072`_).
+
+.. _`is ignored in PyPy`: http://bugs.python.org/issue14621
+.. _`little point`: http://events.ccc.de/congress/2012/Fahrplan/events/5152.en.html
+.. _`#2072`: https://bitbucket.org/pypy/pypy/issue/2072/
diff --git a/pypy/doc/embedding.rst b/pypy/doc/embedding.rst
--- a/pypy/doc/embedding.rst
+++ b/pypy/doc/embedding.rst
@@ -6,15 +6,9 @@
 C. It was developed in collaboration with Roberto De Ioris from the `uwsgi`_
 project. The `PyPy uwsgi plugin`_ is a good example of using the embedding API.
 
-**NOTE**: As of 1st of December, PyPy comes with ``--shared`` by default
-on linux, linux64 and windows. We will make it the default on all platforms
-by the time of the next release.
-
-The first thing that you need is to compile PyPy yourself with the option
-``--shared``. We plan to make ``--shared`` the default in the future. Consult
-the `how to compile PyPy`_ doc for details. This will result in ``libpypy.so``
-or ``pypy.dll`` file or something similar, depending on your platform. Consult
-your platform specification for details.
+**NOTE**: You need a PyPy compiled with the option ``--shared``, i.e.
+with a ``libpypy-c.so`` or ``pypy-c.dll`` file.  This is the default in
+recent versions of PyPy.
 
 The resulting shared library exports very few functions, however they are
 enough to accomplish everything you need, provided you follow a few principles.
@@ -52,7 +46,11 @@
    source. It'll acquire the GIL.
 
    Note: this is meant to be called *only once* or a few times at most.  See
-   the `more complete example`_ below.
+   the `more complete example`_ below.  In PyPy <= 2.6.0, the globals
+   dictionary is *reused* across multiple calls, giving potentially
+   strange results (e.g. objects dying too early).  In PyPy >= 2.6.1,
+   you get a new globals dictionary for every call (but then, all globals
+   dictionaries are all kept alive forever, in ``sys._pypy_execute_source``).
 
 .. function:: int pypy_execute_source_ptr(char* source, void* ptr);
 
@@ -75,10 +73,12 @@
 Note that this API is a lot more minimal than say CPython C API, so at first
 it's obvious to think that you can't do much. However, the trick is to do
 all the logic in Python and expose it via `cffi`_ callbacks. Let's assume
-we're on linux and pypy is installed in ``/opt/pypy`` with the
+we're on linux and pypy is installed in ``/opt/pypy`` (with
+subdirectories like ``lib-python`` and ``lib_pypy``), and with the
 library in ``/opt/pypy/bin/libpypy-c.so``.  (It doesn't need to be
-installed; you can also replace this path with your local checkout.)
-We write a little C program:
+installed; you can also replace these paths with a local extract of the
+installation tarballs, or with your local checkout of pypy.) We write a
+little C program:
 
 .. code-block:: c
 
@@ -92,7 +92,9 @@
         int res;
 
         rpython_startup_code();
-        res = pypy_setup_home("/opt/pypy/bin/libpypy-c.so", 1);
+        /* note: in the path /opt/pypy/x, the final x is ignored and
+           replaced with lib-python and lib_pypy. */
+        res = pypy_setup_home("/opt/pypy/x", 1);
         if (res) {
             printf("Error setting pypy home!\n");
             return 1;
@@ -179,7 +181,7 @@
         int res;
 
         rpython_startup_code();
-        res = pypy_setup_home("/opt/pypy/bin/libpypy-c.so", 1);
+        res = pypy_setup_home("/opt/pypy/x", 1);
         if (res) {
             fprintf(stderr, "Error setting pypy home!\n");
             return -1;
@@ -220,9 +222,15 @@
 Finding pypy_home
 -----------------
 
-Function pypy_setup_home takes one parameter - the path to libpypy. There's 
-currently no "clean" way (pkg-config comes to mind) how to find this path. You 
-can try the following (GNU-specific) hack (don't forget to link against *dl*):
+The function pypy_setup_home() takes as first parameter the path to a
+file from which it can deduce the location of the standard library.
+More precisely, it tries to remove final components until it finds
+``lib-python`` and ``lib_pypy``.  There is currently no "clean" way
+(pkg-config comes to mind) to find this path.  You can try the following
+(GNU-specific) hack (don't forget to link against *dl*), which assumes
+that the ``libpypy-c.so`` is inside the standard library directory.
+(This must more-or-less be the case anyway, otherwise the ``pypy``
+program itself would not run.)
 
 .. code-block:: c
 
@@ -236,7 +244,7 @@
 
     // caller should free returned pointer to avoid memleaks
     // returns NULL on error
-    char* guess_pypyhome() {
+    char* guess_pypyhome(void) {
         // glibc-only (dladdr is why we #define _GNU_SOURCE)
         Dl_info info;
         void *_rpython_startup_code = dlsym(0,"rpython_startup_code");
diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst
--- a/pypy/doc/faq.rst
+++ b/pypy/doc/faq.rst
@@ -70,6 +70,20 @@
 .. _`use virtualenv (as documented here)`: getting-started.html#installing-using-virtualenv
 
 
+Module xyz does not work in the sandboxed PyPy?
+-----------------------------------------------
+
+You cannot import *any* extension module in a `sandboxed PyPy`_,
+sorry.  Even the built-in modules available are very limited.
+Sandboxing in PyPy is a good proof of concept, really safe IMHO, but
+it is only a proof of concept.  It seriously requires someone working
+on it.  Before this occurs, it can only be used it for "pure Python"
+examples: programs that import mostly nothing (or only pure Python
+modules, recursively).
+
+.. _`sandboxed PyPy`: sandbox.html
+
+
 .. _`See below.`:
 
 Do CPython Extension modules work with PyPy?
diff --git a/pypy/doc/how-to-release.rst b/pypy/doc/how-to-release.rst
--- a/pypy/doc/how-to-release.rst
+++ b/pypy/doc/how-to-release.rst
@@ -31,15 +31,14 @@
   and add the new file to  pypy/doc/index-of-whatsnew.rst
 * go to pypy/tool/release and run
   ``force-builds.py <release branch>``
-  The following binaries should be built, however, we need more buildbots
- - JIT: windows, linux, os/x, armhf, armel
- - no JIT: windows, linux, os/x
- - sandbox: linux, os/x
+  The following JIT binaries should be built, however, we need more buildbots
+  windows, linux-32, linux-64, osx64, armhf-raring, armhf-raspberrian, armel,
+  freebsd64 
 
 * wait for builds to complete, make sure there are no failures
 * download the builds, repackage binaries. Tag the release version
   and download and repackage source from bitbucket. You may find it
-  convenient to use the ``repackage.sh`` script in pypy/tools to do this. 
+  convenient to use the ``repackage.sh`` script in pypy/tool/release to do this. 
 
   Otherwise repackage and upload source "-src.tar.bz2" to bitbucket
   and to cobra, as some packagers prefer a clearly labeled source package
diff --git a/pypy/doc/index-of-whatsnew.rst b/pypy/doc/index-of-whatsnew.rst
--- a/pypy/doc/index-of-whatsnew.rst
+++ b/pypy/doc/index-of-whatsnew.rst
@@ -7,6 +7,7 @@
 .. toctree::
 
    whatsnew-head.rst
+   whatsnew-2.6.1.rst
    whatsnew-2.6.0.rst
    whatsnew-2.5.1.rst
    whatsnew-2.5.0.rst
diff --git a/pypy/doc/sandbox.rst b/pypy/doc/sandbox.rst
--- a/pypy/doc/sandbox.rst
+++ b/pypy/doc/sandbox.rst
@@ -103,12 +103,15 @@
 Howto
 -----
 
-In pypy/goal::
+Grab a copy of the pypy repository_.  In the directory pypy/goal, run::
 
    ../../rpython/bin/rpython -O2 --sandbox targetpypystandalone.py
 
 If you don't have a regular PyPy installed, you should, because it's
-faster to translate, but you can also run ``python translate.py`` instead.
+faster to translate; but you can also run the same line with ``python``
+in front.
+
+.. _repository: https://bitbucket.org/pypy/pypy
 
 
 To run it, use the tools in the pypy/sandbox directory::
@@ -136,8 +139,6 @@
 Not all operations are supported; e.g. if you type os.readlink('...'),
 the controller crashes with an exception and the subprocess is killed.
 Other operations make the subprocess die directly with a "Fatal RPython
-error".  None of this is a security hole; it just means that if you try
-to run some random program, it risks getting killed depending on the
-Python built-in functions it tries to call.  This is a matter of the
-sandboxing layer being incomplete so far, but it should not really be
-a problem in practice.
+error".  None of this is a security hole.  More importantly, *most other
+built-in modules are not enabled.  Please read all the warnings in this
+page before complaining about this.  Contributions welcome.*
diff --git a/pypy/doc/whatsnew-2.6.1.rst b/pypy/doc/whatsnew-2.6.1.rst
new file mode 100644
--- /dev/null
+++ b/pypy/doc/whatsnew-2.6.1.rst
@@ -0,0 +1,76 @@
+========================
+What's new in PyPy 2.6.1
+========================
+
+.. this is a revision shortly after release-2.6.0
+.. startrev: 91904d5c5188
+
+.. branch: use_min_scalar
+Correctly resolve the output dtype of ufunc(array, scalar) calls.
+
+.. branch: stdlib-2.7.10
+
+Update stdlib to version 2.7.10
+
+.. branch: issue2062
+
+.. branch: disable-unroll-for-short-loops
+The JIT no longer performs loop unrolling if the loop compiles to too much code.
+
+.. branch: run-create_cffi_imports
+
+Build cffi import libraries as part of translation by monkey-patching an 
+additional task into translation
+
+.. branch: int-float-list-strategy
+
+Use a compact strategy for Python lists that mix integers and floats,
+at least if the integers fit inside 32 bits.  These lists are now
+stored as an array of floats, like lists that contain only floats; the
+difference is that integers are stored as tagged NaNs.  (This should
+have no visible effect!  After ``lst = [42, 42.5]``, the value of
+``lst[0]`` is still *not* the float ``42.0`` but the integer ``42``.)
+
+.. branch: cffi-callback-onerror
+Part of cffi 1.2.
+
+.. branch: cffi-new-allocator
+Part of cffi 1.2.
+
+.. branch: unicode-dtype
+
+Partial implementation of unicode dtype and unicode scalars.
+
+.. branch: dtypes-compatability
+
+Improve compatibility with numpy dtypes; handle offsets to create unions,
+fix str() and repr(), allow specifying itemsize, metadata and titles, add flags,
+allow subclassing dtype
+
+.. branch: indexing
+
+Refactor array indexing to support ellipses.
+
+.. branch: numpy-docstrings
+
+Allow the docstrings of built-in numpy objects to be set at run-time.
+
+.. branch: nditer-revisited
+
+Implement nditer 'buffered' flag and fix some edge cases
+
+.. branch: ufunc-reduce
+
+Allow multiple axes in ufunc.reduce()
+
+.. branch: fix-tinylang-goals
+
+Update tinylang goals to match current rpython
+
+.. branch: vmprof-review
+
+Clean up of vmprof, notably to handle correctly multiple threads
+
+.. branch: no_boehm_dl
+
+Remove extra link library from Boehm GC
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -2,12 +2,6 @@
 What's new in PyPy 2.6+
 =======================
 
-.. this is a revision shortly after release-2.6.0
-.. startrev: 91904d5c5188
+.. this is a revision shortly after release-2.6.1
+.. startrev: 07769be4057b
 
-.. branch: use_min_scalar
-Correctly resolve the output dtype of ufunc(array, scalar) calls.
-
-.. branch: stdlib-2.7.10
-
-Update stdlib to version 2.7.10
diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py
--- a/pypy/goal/targetpypystandalone.py
+++ b/pypy/goal/targetpypystandalone.py
@@ -1,6 +1,6 @@
 import py
 
-import os, sys
+import os, sys, subprocess
 
 import pypy
 from pypy.interpreter import gateway
@@ -97,13 +97,16 @@
         from pypy.module.sys.initpath import pypy_find_stdlib
         verbose = rffi.cast(lltype.Signed, verbose)
         if ll_home:
-            home = rffi.charp2str(ll_home)
+            home1 = rffi.charp2str(ll_home)
+            home = os.path.join(home1, 'x') # <- so that 'll_home' can be
+                                            # directly the root directory
         else:
-            home = pypydir
+            home = home1 = pypydir
         w_path = pypy_find_stdlib(space, home)
         if space.is_none(w_path):
             if verbose:
-                debug("Failed to find library based on pypy_find_stdlib")
+                debug("pypy_setup_home: directories 'lib-python' and 'lib_pypy'"
+                      " not found in '%s' or in any parent directory" % home1)
             return rffi.cast(rffi.INT, 1)
         space.startup()
         space.call_function(w_pathsetter, w_path)
@@ -125,13 +128,7 @@
 
     @entrypoint('main', [rffi.CCHARP], c_name='pypy_execute_source')
     def pypy_execute_source(ll_source):
-        after = rffi.aroundstate.after
-        if after: after()
-        source = rffi.charp2str(ll_source)
-        res = _pypy_execute_source(source)
-        before = rffi.aroundstate.before
-        if before: before()
-        return rffi.cast(rffi.INT, res)
+        return pypy_execute_source_ptr(ll_source, 0)
 
     @entrypoint('main', [rffi.CCHARP, lltype.Signed],
                 c_name='pypy_execute_source_ptr')
@@ -139,9 +136,7 @@
         after = rffi.aroundstate.after
         if after: after()
         source = rffi.charp2str(ll_source)
-        space.setitem(w_globals, space.wrap('c_argument'),
-                      space.wrap(ll_ptr))
-        res = _pypy_execute_source(source)
+        res = _pypy_execute_source(source, ll_ptr)
         before = rffi.aroundstate.before
         if before: before()
         return rffi.cast(rffi.INT, res)
@@ -166,15 +161,21 @@
         before = rffi.aroundstate.before
         if before: before()
 
-    w_globals = space.newdict()
-    space.setitem(w_globals, space.wrap('__builtins__'),
-                  space.builtin_modules['__builtin__'])
-
-    def _pypy_execute_source(source):
+    def _pypy_execute_source(source, c_argument):
         try:
-            compiler = space.createcompiler()
-            stmt = compiler.compile(source, 'c callback', 'exec', 0)
-            stmt.exec_code(space, w_globals, w_globals)
+            w_globals = space.newdict(module=True)
+            space.setitem(w_globals, space.wrap('__builtins__'),
+                          space.builtin_modules['__builtin__'])
+            space.setitem(w_globals, space.wrap('c_argument'),
+                          space.wrap(c_argument))
+            space.appexec([space.wrap(source), w_globals], """(src, glob):
+                import sys
+                stmt = compile(src, 'c callback', 'exec')
+                if not hasattr(sys, '_pypy_execute_source'):
+                    sys._pypy_execute_source = []
+                sys._pypy_execute_source.append(glob)
+                exec stmt in glob
+            """)
         except OperationError, e:
             debug("OperationError:")
             debug(" operror-type: " + e.w_type.getname(space))
@@ -294,8 +295,49 @@
         options = make_dict(config)
         wrapstr = 'space.wrap(%r)' % (options)
         pypy.module.sys.Module.interpleveldefs['pypy_translation_info'] = wrapstr
+        if config.objspace.usemodules._cffi_backend:
+            self.hack_for_cffi_modules(driver)
 
         return self.get_entry_point(config)
+    
+    def hack_for_cffi_modules(self, driver):
+        # HACKHACKHACK
+        # ugly hack to modify target goal from compile_c to build_cffi_imports
+        # this should probably get cleaned up and merged with driver.create_exe
+        from rpython.translator.driver import taskdef
+        import types
+
+        class Options(object):
+            pass
+
+
+        def mkexename(name):
+            if sys.platform == 'win32':
+                name = name.new(ext='exe')
+            return name
+
+        @taskdef(['compile_c'], "Create cffi bindings for modules")
+        def task_build_cffi_imports(self):
+            from pypy.tool.build_cffi_imports import create_cffi_import_libraries
+            ''' Use cffi to compile cffi interfaces to modules'''
+            exename = mkexename(driver.compute_exe_name())
+            basedir = exename
+            while not basedir.join('include').exists():
+                _basedir = basedir.dirpath()
+                if _basedir == basedir:
+                    raise ValueError('interpreter %s not inside pypy repo', 
+                                     str(exename))
+                basedir = _basedir
+            modules = self.config.objspace.usemodules.getpaths()
+            options = Options()
+            # XXX possibly adapt options using modules
+            failures = create_cffi_import_libraries(exename, options, basedir)
+            # if failures, they were already printed
+            print  >> sys.stderr, str(exename),'successfully built, but errors while building the above modules will be ignored'
+        driver.task_build_cffi_imports = types.MethodType(task_build_cffi_imports, driver)
+        driver.tasks['build_cffi_imports'] = driver.task_build_cffi_imports, ['compile_c']
+        driver.default_goal = 'build_cffi_imports'
+        # HACKHACKHACK end
 
     def jitpolicy(self, driver):
         from pypy.module.pypyjit.policy import PyPyJitPolicy
diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py
--- a/pypy/interpreter/app_main.py
+++ b/pypy/interpreter/app_main.py
@@ -40,6 +40,11 @@
 PYPYLOG: If set to a non-empty value, enable logging.
 """
 
+try:
+    from __pypy__ import get_hidden_tb, hidden_applevel
+except ImportError:
+    get_hidden_tb = lambda: sys.exc_info()[2]
+    hidden_applevel = lambda f: f
 import sys
 
 DEBUG = False       # dump exceptions before calling the except hook
@@ -63,6 +68,7 @@
             exitcode = 1
     raise SystemExit(exitcode)
 
+ at hidden_applevel
 def run_toplevel(f, *fargs, **fkwds):
     """Calls f() and handles all OperationErrors.
     Intended use is to run the main program or one interactive statement.
@@ -87,13 +93,13 @@
 
     except SystemExit as e:
         handle_sys_exit(e)
-    except:
-        display_exception()
+    except BaseException as e:
+        display_exception(e)
         return False
     return True   # success
 
-def display_exception():
-    etype, evalue, etraceback = sys.exc_info()
+def display_exception(e):
+    etype, evalue, etraceback = type(e), e, get_hidden_tb()
     try:
         # extra debugging info in case the code below goes very wrong
         if DEBUG and hasattr(sys, 'stderr'):
@@ -119,11 +125,11 @@
         hook(etype, evalue, etraceback)
         return # done
 
-    except:
+    except BaseException as e:
         try:
             stderr = sys.stderr
             print >> stderr, 'Error calling sys.excepthook:'
-            originalexcepthook(*sys.exc_info())
+            originalexcepthook(type(e), e, e.__traceback__)
             print >> stderr
             print >> stderr, 'Original exception was:'
         except:
@@ -509,6 +515,7 @@
 
     return options
 
+ at hidden_applevel
 def run_command_line(interactive,
                      inspect,
                      run_command,
@@ -597,6 +604,7 @@
             # Put '' on sys.path
             sys.path.insert(0, '')
 
+            @hidden_applevel
             def run_it():
                 exec run_command in mainmodule.__dict__
             success = run_toplevel(run_it)
@@ -634,6 +642,7 @@
                         print >> sys.stderr, "Could not open PYTHONSTARTUP"
                         print >> sys.stderr, "IOError:", e
                     else:
+                        @hidden_applevel
                         def run_it():
                             co_python_startup = compile(startup,
                                                         python_startup,
@@ -650,6 +659,7 @@
                 inspect = True
             else:
                 # If not interactive, just read and execute stdin normally.
+                @hidden_applevel
                 def run_it():
                     co_stdin = compile(sys.stdin.read(), '<stdin>', 'exec',
                                        PyCF_ACCEPT_NULL_BYTES)
@@ -689,7 +699,7 @@
     except SystemExit as e:
         status = e.code
         if inspect_requested():
-            display_exception()
+            display_exception(e)
     else:
         status = not success
 
@@ -743,6 +753,7 @@
     # This is important for py3k
     sys.executable = executable
 
+ at hidden_applevel
 def entry_point(executable, argv):
     # note that before calling setup_bootstrap_path, we are limited because we
     # cannot import stdlib modules. In particular, we cannot use unicode
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -11,7 +11,7 @@
     INT_MIN, INT_MAX, UINT_MAX, USHRT_MAX
 
 from pypy.interpreter.executioncontext import (ExecutionContext, ActionFlag,
-    UserDelAction, CodeUniqueIds)
+    UserDelAction)
 from pypy.interpreter.error import OperationError, new_exception_class, oefmt
 from pypy.interpreter.argument import Arguments
 from pypy.interpreter.miscutils import ThreadLocals, make_weak_value_dictionary
@@ -200,7 +200,7 @@
             w_result = space.get_and_call_function(w_impl, self)
             if space.isinstance_w(w_result, space.w_buffer):
                 return w_result.buffer_w(space, flags)
-        raise TypeError
+        raise BufferInterfaceNotFound
 
     def readbuf_w(self, space):
         w_impl = space.lookup(self, '__buffer__')
@@ -208,7 +208,7 @@
             w_result = space.get_and_call_function(w_impl, self)
             if space.isinstance_w(w_result, space.w_buffer):
                 return w_result.readbuf_w(space)
-        raise TypeError
+        raise BufferInterfaceNotFound
 
     def writebuf_w(self, space):
         w_impl = space.lookup(self, '__buffer__')
@@ -216,7 +216,7 @@
             w_result = space.get_and_call_function(w_impl, self)
             if space.isinstance_w(w_result, space.w_buffer):
                 return w_result.writebuf_w(space)
-        raise TypeError
+        raise BufferInterfaceNotFound
 
     def charbuf_w(self, space):
         w_impl = space.lookup(self, '__buffer__')
@@ -224,7 +224,7 @@
             w_result = space.get_and_call_function(w_impl, self)
             if space.isinstance_w(w_result, space.w_buffer):
                 return w_result.charbuf_w(space)
-        raise TypeError
+        raise BufferInterfaceNotFound
 
     def str_w(self, space):
         self._typed_unwrap_error(space, "string")
@@ -354,6 +354,9 @@
 class DescrMismatch(Exception):
     pass
 
+class BufferInterfaceNotFound(Exception):
+    pass
+
 def wrappable_class_name(Class):
     try:
         return Class.typedef.name
@@ -388,7 +391,6 @@
         self.actionflag = ActionFlag()    # changed by the signal module
         self.check_signal_action = None   # changed by the signal module
         self.user_del_action = UserDelAction(self)
-        self.code_unique_ids = CodeUniqueIds()
         self._code_of_sys_exc_info = None
 
         # can be overridden to a subclass
@@ -667,16 +669,6 @@
             assert ec is not None
             return ec
 
-    def register_code_callback(self, callback):
-        cui = self.code_unique_ids
-        cui.code_callback = callback
-
-    def register_code_object(self, pycode):
-        cui = self.code_unique_ids
-        if cui.code_callback is None:
-            return
-        cui.code_callback(self, pycode)
-
     def _freeze_(self):
         return True
 
@@ -1403,7 +1395,7 @@
         # New buffer interface, returns a buffer based on flags (PyObject_GetBuffer)
         try:
             return w_obj.buffer_w(self, flags)
-        except TypeError:
+        except BufferInterfaceNotFound:
             raise oefmt(self.w_TypeError,
                         "'%T' does not have the buffer interface", w_obj)
 
@@ -1411,7 +1403,7 @@
         # Old buffer interface, returns a readonly buffer (PyObject_AsReadBuffer)
         try:
             return w_obj.readbuf_w(self)
-        except TypeError:
+        except BufferInterfaceNotFound:
             raise oefmt(self.w_TypeError,
                         "expected a readable buffer object")
 
@@ -1419,7 +1411,7 @@
         # Old buffer interface, returns a writeable buffer (PyObject_AsWriteBuffer)
         try:
             return w_obj.writebuf_w(self)
-        except TypeError:
+        except BufferInterfaceNotFound:
             raise oefmt(self.w_TypeError,
                         "expected a writeable buffer object")
 
@@ -1427,7 +1419,7 @@
         # Old buffer interface, returns a character buffer (PyObject_AsCharBuffer)
         try:
             return w_obj.charbuf_w(self)
-        except TypeError:
+        except BufferInterfaceNotFound:
             raise oefmt(self.w_TypeError,
                         "expected a character buffer object")
 
@@ -1451,11 +1443,11 @@
                 return self.str(w_obj).readbuf_w(self)
             try:
                 return w_obj.buffer_w(self, 0)
-            except TypeError:
+            except BufferInterfaceNotFound:
                 pass
             try:
                 return w_obj.readbuf_w(self)
-            except TypeError:
+            except BufferInterfaceNotFound:
                 self._getarg_error("string or buffer", w_obj)
         elif code == 's#':
             if self.isinstance_w(w_obj, self.w_str):
@@ -1464,24 +1456,23 @@
                 return self.str(w_obj).str_w(self)
             try:
                 return w_obj.readbuf_w(self).as_str()
-            except TypeError:
+            except BufferInterfaceNotFound:
                 self._getarg_error("string or read-only buffer", w_obj)
         elif code == 'w*':
             try:
-                try:
-                    return w_obj.buffer_w(self, self.BUF_WRITABLE)
-                except OperationError:
-                    self._getarg_error("read-write buffer", w_obj)
-            except TypeError:
+                return w_obj.buffer_w(self, self.BUF_WRITABLE)
+            except OperationError:
+                self._getarg_error("read-write buffer", w_obj)
+            except BufferInterfaceNotFound:
                 pass
             try:
                 return w_obj.writebuf_w(self)
-            except TypeError:
+            except BufferInterfaceNotFound:
                 self._getarg_error("read-write buffer", w_obj)
         elif code == 't#':
             try:
                 return w_obj.charbuf_w(self)
-            except TypeError:
+            except BufferInterfaceNotFound:
                 self._getarg_error("string or read-only character buffer", w_obj)
         else:
             assert False
@@ -1503,13 +1494,13 @@
                 raise
         try:
             buf = w_obj.buffer_w(self, 0)
-        except TypeError:
+        except BufferInterfaceNotFound:
             pass
         else:
             return buf.as_str()
         try:
             buf = w_obj.readbuf_w(self)
-        except TypeError:
+        except BufferInterfaceNotFound:
             self._getarg_error("string or buffer", w_obj)
         else:
             return buf.as_str()
diff --git a/pypy/interpreter/error.py b/pypy/interpreter/error.py
--- a/pypy/interpreter/error.py
+++ b/pypy/interpreter/error.py
@@ -252,7 +252,8 @@
                                w_t, w_v, w_tb],
                 """(where, objrepr, extra_line, t, v, tb):
                     import sys, traceback
-                    sys.stderr.write('From %s%s:\\n' % (where, objrepr))
+                    if where or objrepr:
+                        sys.stderr.write('From %s%s:\\n' % (where, objrepr))
                     if extra_line:
                         sys.stderr.write(extra_line)
                     traceback.print_exception(t, v, tb)
diff --git a/pypy/interpreter/executioncontext.py b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -1,6 +1,7 @@
 import sys
 from pypy.interpreter.error import OperationError, get_cleared_operation_error
 from rpython.rlib.unroll import unrolling_iterable
+from rpython.rlib.objectmodel import specialize
 from rpython.rlib import jit
 
 TICK_COUNTER_STEP = 100
@@ -214,13 +215,21 @@
             self._trace(frame, 'exception', None, operationerr)
         #operationerr.print_detailed_traceback(self.space)
 
-    def sys_exc_info(self): # attn: the result is not the wrapped sys.exc_info() !!!
+    @specialize.arg(1)
+    def sys_exc_info(self, for_hidden=False):
         """Implements sys.exc_info().
-        Return an OperationError instance or None."""
+        Return an OperationError instance or None.
+
+        Ignores exceptions within hidden frames unless for_hidden=True
+        is specified.
+
+        # NOTE: the result is not the wrapped sys.exc_info() !!!
+
+        """
         frame = self.gettopframe()
         while frame:
             if frame.last_exception is not None:
-                if (not frame.hide() or
+                if ((for_hidden or not frame.hide()) or
                         frame.last_exception is
                             get_cleared_operation_error(self.space)):
                     return frame.last_exception
@@ -581,11 +590,3 @@
         # there is no list of length n: if n is large, then the GC
         # will run several times while walking the list, but it will
         # see lower and lower memory usage, with no lower bound of n.
-
-class CodeUniqueIds(object):
-    def __init__(self):
-        if sys.maxint == 2147483647:
-            self.code_unique_id = 0 # XXX this is wrong, it won't work on 32bit
-        else:
-            self.code_unique_id = 0x7000000000000000
-        self.code_callback = None
diff --git a/pypy/interpreter/generator.py b/pypy/interpreter/generator.py
--- a/pypy/interpreter/generator.py
+++ b/pypy/interpreter/generator.py
@@ -15,7 +15,10 @@
         self.running = False
 
     def descr__repr__(self, space):
-        code_name = self.pycode.co_name
+        if self.pycode is None:
+            code_name = '<finished>'
+        else:
+            code_name = self.pycode.co_name
         addrstring = self.getaddrstring(space)
         return space.wrap("<generator object %s at 0x%s>" %
                           (code_name, addrstring))
@@ -45,6 +48,8 @@
         w_framestate, w_running = args_w
         if space.is_w(w_framestate, space.w_None):
             self.frame = None
+            self.space = space
+            self.pycode = None
         else:
             frame = instantiate(space.FrameClass)   # XXX fish
             frame.descr__setstate__(space, w_framestate)
@@ -62,9 +67,10 @@
 
     def send_ex(self, w_arg, operr=None):
         pycode = self.pycode
-        if jit.we_are_jitted() and should_not_inline(pycode):
-            generatorentry_driver.jit_merge_point(gen=self, w_arg=w_arg,
-                                                  operr=operr, pycode=pycode)
+        if pycode is not None:
+            if jit.we_are_jitted() and should_not_inline(pycode):
+                generatorentry_driver.jit_merge_point(gen=self, w_arg=w_arg,
+                                                    operr=operr, pycode=pycode)
         return self._send_ex(w_arg, operr)
 
     def _send_ex(self, w_arg, operr):
@@ -158,7 +164,10 @@
         return self.pycode
 
     def descr__name__(self, space):
-        code_name = self.pycode.co_name
+        if self.pycode is None:
+            code_name = '<finished>'
+        else:
+            code_name = self.pycode.co_name
         return space.wrap(code_name)
 
     # Results can be either an RPython list of W_Root, or it can be an
diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py
--- a/pypy/interpreter/pycode.py
+++ b/pypy/interpreter/pycode.py
@@ -85,7 +85,7 @@
         self.magic = magic
         self._signature = cpython_code_signature(self)
         self._initialize()
-        space.register_code_object(self)
+        self._init_ready()
 
     def _initialize(self):
         if self.co_cellvars:
@@ -127,14 +127,8 @@
             from pypy.objspace.std.mapdict import init_mapdict_cache
             init_mapdict_cache(self)
 
-        cui = self.space.code_unique_ids
-        self._unique_id = cui.code_unique_id
-        cui.code_unique_id += 4  # so we have two bits that we can mark stuff
-        # with
-
-    def _get_full_name(self):
-        return "py:%s:%d:%s" % (self.co_name, self.co_firstlineno,
-                                self.co_filename)
+    def _init_ready(self):
+        "This is a hook for the vmprof module, which overrides this method."
 
     def _cleanup_(self):
         if (self.magic == cpython_magic and
diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py
--- a/pypy/interpreter/pyopcode.py
+++ b/pypy/interpreter/pyopcode.py
@@ -711,11 +711,17 @@
         w_item = self.popvalue()
         if self.space.is_w(w_stream, self.space.w_None):
             w_stream = sys_stdout(self.space)   # grumble grumble special cases
-        print_item_to(self.space, w_item, w_stream)
+        print_item_to(self.space, self._printable_object(w_item), w_stream)
 
     def PRINT_ITEM(self, oparg, next_instr):
         w_item = self.popvalue()
-        print_item(self.space, w_item)
+        print_item(self.space, self._printable_object(w_item))
+
+    def _printable_object(self, w_obj):
+        space = self.space
+        if not space.isinstance_w(w_obj, space.w_unicode):
+            w_obj = space.str(w_obj)
+        return w_obj
 
     def PRINT_NEWLINE_TO(self, oparg, next_instr):
         w_stream = self.popvalue()
@@ -1535,9 +1541,9 @@
            stream.write(" ")
 
         # give to write() an argument which is either a string or a unicode
-        # (and let it deals itself with unicode handling)
-        if not isinstance(x, unicode):
-            x = str(x)
+        # (and let it deals itself with unicode handling).  The check "is
+        # unicode" should not use isinstance() at app-level, because that
+        # could be fooled by strange objects, so it is done at interp-level.
         stream.write(x)
 
         # add a softspace unless we just printed a string which ends in a '\t'
diff --git a/pypy/interpreter/pytraceback.py b/pypy/interpreter/pytraceback.py
--- a/pypy/interpreter/pytraceback.py
+++ b/pypy/interpreter/pytraceback.py
@@ -60,7 +60,6 @@
 
 
 def check_traceback(space, w_tb, msg):
-    from pypy.interpreter.typedef import PyTraceback
     if w_tb is None or not space.isinstance_w(w_tb, space.gettypeobject(PyTraceback.typedef)):
         raise OperationError(space.w_TypeError, space.wrap(msg))
     return w_tb
diff --git a/pypy/interpreter/test/test_interpreter.py b/pypy/interpreter/test/test_interpreter.py
--- a/pypy/interpreter/test/test_interpreter.py
+++ b/pypy/interpreter/test/test_interpreter.py
@@ -299,6 +299,30 @@
         finally:
             sys.stdout = save
 
+    def test_print_strange_object(self):
+        import sys
+
+        class A(object):
+            def __getattribute__(self, name):
+                print "seeing", name
+            def __str__(self):
+                return 'A!!'
+        save = sys.stdout
+        class Out(object):
+            def __init__(self):
+                self.data = []
+            def write(self, x):
+                self.data.append((type(x), x))
+        sys.stdout = out = Out()
+        try:
+            a = A()
+            assert out.data == []
+            print a
+            assert out.data == [(str, 'A!!'),
+                                (str, '\n')]
+        finally:
+            sys.stdout = save
+
     def test_identity(self):
         def f(x): return x
         assert f(666) == 666
diff --git a/pypy/interpreter/test/test_zzpickle_and_slow.py b/pypy/interpreter/test/test_zzpickle_and_slow.py
--- a/pypy/interpreter/test/test_zzpickle_and_slow.py
+++ b/pypy/interpreter/test/test_zzpickle_and_slow.py
@@ -491,6 +491,22 @@
         assert pack.mod is result
 
 
+    def test_pickle_generator_crash(self):
+        import pickle
+
+        def f():
+            yield 0
+
+        x = f()
+        x.next()
+        try:
+            x.next()
+        except StopIteration:
+            y = pickle.loads(pickle.dumps(x))
+        assert 'finished' in y.__name__
+        assert 'finished' in repr(y)
+        assert y.gi_code is None
+
 class AppTestGeneratorCloning:
 
     def setup_class(cls):
diff --git a/pypy/module/__builtin__/app_functional.py b/pypy/module/__builtin__/app_functional.py
--- a/pypy/module/__builtin__/app_functional.py
+++ b/pypy/module/__builtin__/app_functional.py
@@ -53,6 +53,33 @@
         last = last + x
     return last
 
+
+class _Cons(object):
+    def __init__(self, prev, iter):
+        self.prev = prev
+        self.iter = iter
+
+    def fetch(self):
+        # recursive, loop-less version of the algorithm: works best for a
+        # fixed number of "collections" in the call to map(func, *collections)
+        prev = self.prev
+        if prev is None:
+            args1 = ()
+            stop = True
+        else:
+            args1, stop = prev.fetch()
+        iter = self.iter
+        if iter is None:
+            val = None
+        else:
+            try:
+                val = next(iter)
+                stop = False
+            except StopIteration:
+                self.iter = None
+                val = None
+        return args1 + (val,), stop
+
 def map(func, *collections):
     """map(function, sequence[, sequence, ...]) -> list
 
@@ -69,45 +96,30 @@
     if num_collections == 1:
         if none_func:
             return list(collections[0])
-        # Special case for the really common case of a single collection,
-        # this can be eliminated if we could unroll that loop that creates
-        # `args` based on whether or not len(collections) was constant
+        # Special case for the really common case of a single collection
         seq = collections[0]
         with _ManagedNewlistHint(operator._length_hint(seq, 0)) as result:
             for item in seq:
                 result.append(func(item))
             return result
 
-    # Gather the iterators (pair of (iter, has_finished)) and guess the
+    # Gather the iterators into _Cons objects and guess the
     # result length (the max of the input lengths)
-    iterators = []
+    c = None
     max_hint = 0
     for seq in collections:
-        iterators.append((iter(seq), False))
+        c = _Cons(c, iter(seq))
         max_hint = max(max_hint, operator._length_hint(seq, 0))
 
     with _ManagedNewlistHint(max_hint) as result:
         while True:
-            cont = False
-            args = []
-            for idx, (iterator, has_finished) in enumerate(iterators):
-                val = None
-                if not has_finished:
-                    try:
-                        val = next(iterator)
-                    except StopIteration:
-                        iterators[idx] = (None, True)
-                    else:
-                        cont = True
-                args.append(val)
-            args = tuple(args)
-            if cont:
-                if none_func:
-                    result.append(args)
-                else:
-                    result.append(func(*args))
+            args, stop = c.fetch()
+            if stop:
+                return result
+            if none_func:
+                result.append(args)
             else:
-                return result
+                result.append(func(*args))
 
 class _ManagedNewlistHint(object):
     """ Context manager returning a newlist_hint upon entry.
diff --git a/pypy/module/__builtin__/test/test_abstractinst.py b/pypy/module/__builtin__/test/test_abstractinst.py
--- a/pypy/module/__builtin__/test/test_abstractinst.py
+++ b/pypy/module/__builtin__/test/test_abstractinst.py
@@ -202,3 +202,17 @@
             __subclass__ = set([int])
         assert issubclass(int, Integer)
         assert issubclass(int, (Integer,))
+
+    def test_dont_call_instancecheck_fast_path(self):
+        called = []
+        
+        class M(type):
+            def __instancecheck__(self, obj):
+                called.append("called")
+
+        class C:
+            __metaclass__ = M
+
+        c = C()
+        assert isinstance(c, C)
+        assert not called
diff --git a/pypy/module/__builtin__/test/test_functional.py b/pypy/module/__builtin__/test/test_functional.py
--- a/pypy/module/__builtin__/test/test_functional.py
+++ b/pypy/module/__builtin__/test/test_functional.py
@@ -57,6 +57,11 @@
         b = []
         assert map(lambda x, y: x, a, b) == a
 
+    def test_map_second_item(self):
+        a = []
+        b = [1, 2, 3, 4, 5]
+        assert map(lambda x, y: y, a, b) == b
+
     def test_map_iterables(self):
         class A(object):
             def __init__(self, n):
diff --git a/pypy/module/__pypy__/__init__.py b/pypy/module/__pypy__/__init__.py
--- a/pypy/module/__pypy__/__init__.py
+++ b/pypy/module/__pypy__/__init__.py
@@ -62,6 +62,7 @@
     }
 
     interpleveldefs = {
+        'attach_gdb'                : 'interp_magic.attach_gdb',
         'internal_repr'             : 'interp_magic.internal_repr',
         'bytebuffer'                : 'bytebuffer.bytebuffer',
         'identity_dict'             : 'interp_identitydict.W_IdentityDict',
@@ -71,6 +72,8 @@
         'debug_print_once'          : 'interp_debug.debug_print_once',
         'debug_flush'               : 'interp_debug.debug_flush',
         'builtinify'                : 'interp_magic.builtinify',
+        'hidden_applevel'           : 'interp_magic.hidden_applevel',
+        'get_hidden_tb'             : 'interp_magic.get_hidden_tb',
         'lookup_special'            : 'interp_magic.lookup_special',
         'do_what_I_mean'            : 'interp_magic.do_what_I_mean',
         'validate_fd'               : 'interp_magic.validate_fd',
@@ -98,8 +101,6 @@
 
     def setup_after_space_initialization(self):
         """NOT_RPYTHON"""
-        if not self.space.config.translating:
-            self.extra_interpdef('interp_pdb', 'interp_magic.interp_pdb')
         if self.space.config.objspace.std.withmethodcachecounter:
             self.extra_interpdef('method_cache_counter',
                                  'interp_magic.method_cache_counter')
diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py
--- a/pypy/module/__pypy__/interp_magic.py
+++ b/pypy/module/__pypy__/interp_magic.py
@@ -15,12 +15,10 @@
     return space.wrap('%r' % (w_object,))
 
 
-def interp_pdb(space):
-    """Run an interp-level pdb.
-    This is not available in translated versions of PyPy."""
-    assert not we_are_translated()
-    import pdb
-    pdb.set_trace()
+def attach_gdb(space):
+    """Run an interp-level gdb (or pdb when untranslated)"""
+    from rpython.rlib.debug import attach_gdb
+    attach_gdb()
 
 
 @unwrap_spec(name=str)
@@ -59,6 +57,20 @@
     bltn = BuiltinFunction(func)
     return space.wrap(bltn)
 
+def hidden_applevel(space, w_func):
+    """Decorator that hides a function's frame from app-level"""
+    from pypy.interpreter.function import Function
+    func = space.interp_w(Function, w_func)
+    func.getcode().hidden_applevel = True
+    return w_func
+
+def get_hidden_tb(space):
+    """Return the traceback of the current exception being handled by a
+    frame hidden from applevel.
+    """
+    operr = space.getexecutioncontext().sys_exc_info(for_hidden=True)
+    return space.w_None if operr is None else space.wrap(operr.get_traceback())
+
 @unwrap_spec(meth=str)
 def lookup_special(space, w_obj, meth):
     """Lookup up a special method on an object."""
diff --git a/pypy/module/__pypy__/test/test_special.py b/pypy/module/__pypy__/test/test_special.py
--- a/pypy/module/__pypy__/test/test_special.py
+++ b/pypy/module/__pypy__/test/test_special.py
@@ -27,6 +27,52 @@
         assert A.a is not A.__dict__['a']
         assert A.b is A.__dict__['b']
 
+    def test_hidden_applevel(self):
+        import __pypy__
+        import sys
+
+        @__pypy__.hidden_applevel
+        def sneak(): (lambda: 1/0)()
+        try:
+            sneak()
+        except ZeroDivisionError as e:
+            tb = sys.exc_info()[2]
+            assert tb.tb_frame == sys._getframe()
+            assert tb.tb_next.tb_frame.f_code.co_name == '<lambda>'
+        else:
+            assert False, 'Expected ZeroDivisionError'
+
+    def test_hidden_applevel_frames(self):
+        import __pypy__
+        import sys
+
+        @__pypy__.hidden_applevel
+        def test_hidden():
+            assert sys._getframe().f_code.co_name != 'test_hidden'
+            def e(): 1/0
+            try: e()
+            except ZeroDivisionError as e:
+                assert sys.exc_info() == (None, None, None)
+            else: assert False
+            return 2
+        assert test_hidden() == 2
+
+    def test_get_hidden_tb(self):
+        import __pypy__
+        import sys
+
+        @__pypy__.hidden_applevel
+        def test_hidden_with_tb():
+            def not_hidden(): 1/0
+            try: not_hidden()
+            except ZeroDivisionError as e:
+                assert sys.exc_info() == (None, None, None)
+                tb = __pypy__.get_hidden_tb()
+                assert tb.tb_frame.f_code.co_name == 'not_hidden'
+                return True
+            else: return False
+        assert test_hidden_with_tb()
+
     def test_lookup_special(self):
         from __pypy__ import lookup_special
         class X(object):
diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py
--- a/pypy/module/_cffi_backend/__init__.py
+++ b/pypy/module/_cffi_backend/__init__.py
@@ -2,7 +2,7 @@
 from pypy.interpreter.mixedmodule import MixedModule
 from rpython.rlib import rdynload
 
-VERSION = "1.1.2"
+VERSION = "1.2.1"
 
 
 class Module(MixedModule):
diff --git a/pypy/module/_cffi_backend/allocator.py b/pypy/module/_cffi_backend/allocator.py
new file mode 100644
--- /dev/null
+++ b/pypy/module/_cffi_backend/allocator.py
@@ -0,0 +1,86 @@
+from pypy.interpreter.error import oefmt
+from pypy.interpreter.baseobjspace import W_Root
+from pypy.interpreter.typedef import TypeDef
+from pypy.interpreter.gateway import interp2app, unwrap_spec, WrappedDefault
+
+from rpython.rtyper.lltypesystem import lltype, rffi
+
+
+class W_Allocator(W_Root):
+    _immutable_ = True
+
+    def __init__(self, ffi, w_alloc, w_free, should_clear_after_alloc):
+        self.ffi = ffi    # may be None
+        self.w_alloc = w_alloc
+        self.w_free = w_free
+        self.should_clear_after_alloc = should_clear_after_alloc
+
+    def allocate(self, space, datasize, ctype, length=-1):
+        from pypy.module._cffi_backend import cdataobj, ctypeptr
+        if self.w_alloc is None:
+            if self.should_clear_after_alloc:
+                ptr = lltype.malloc(rffi.CCHARP.TO, datasize,
+                                    flavor='raw', zero=True)
+            else:
+                ptr = lltype.malloc(rffi.CCHARP.TO, datasize,
+                                    flavor='raw', zero=False)
+            return cdataobj.W_CDataNewStd(space, ptr, ctype, length)
+        else:
+            w_raw_cdata = space.call_function(self.w_alloc,
+                                              space.wrap(datasize))
+            if not isinstance(w_raw_cdata, cdataobj.W_CData):
+                raise oefmt(space.w_TypeError,
+                            "alloc() must return a cdata object (got %T)",
+                            w_raw_cdata)
+            if not isinstance(w_raw_cdata.ctype, ctypeptr.W_CTypePtrOrArray):
+                raise oefmt(space.w_TypeError,
+                            "alloc() must return a cdata pointer, not '%s'",
+                            w_raw_cdata.ctype.name)
+            #
+            ptr = w_raw_cdata.unsafe_escaping_ptr()
+            if not ptr:
+                raise oefmt(space.w_MemoryError, "alloc() returned NULL")
+            #
+            if self.should_clear_after_alloc:
+                rffi.c_memset(rffi.cast(rffi.VOIDP, ptr), 0,
+                              rffi.cast(rffi.SIZE_T, datasize))
+            #
+            if self.w_free is None:
+                # use this class which does not have a __del__, but still
+                # keeps alive w_raw_cdata
+                res = cdataobj.W_CDataNewNonStdNoFree(space, ptr, ctype, length)
+            else:
+                res = cdataobj.W_CDataNewNonStdFree(space, ptr, ctype, length)
+                res.w_free = self.w_free
+            res.w_raw_cdata = w_raw_cdata
+            return res
+
+    @unwrap_spec(w_init=WrappedDefault(None))
+    def descr_call(self, space, w_arg, w_init):
+        ffi = self.ffi
+        assert ffi is not None
+        w_ctype = ffi.ffi_type(w_arg, ffi.ACCEPT_STRING | ffi.ACCEPT_CTYPE)
+        return w_ctype.newp(w_init, self)
+
+
+W_Allocator.typedef = TypeDef(
+        'FFIAllocator',
+        __call__ = interp2app(W_Allocator.descr_call),
+        )
+W_Allocator.typedef.acceptable_as_base_class = False
+
+
+def new_allocator(ffi, w_alloc, w_free, should_clear_after_alloc):
+    space = ffi.space
+    if space.is_none(w_alloc):
+        w_alloc = None
+    if space.is_none(w_free):
+        w_free = None
+    if w_alloc is None and w_free is not None:
+        raise oefmt(space.w_TypeError, "cannot pass 'free' without 'alloc'")
+    alloc = W_Allocator(ffi, w_alloc, w_free, bool(should_clear_after_alloc))
+    return space.wrap(alloc)
+
+
+default_allocator = W_Allocator(None, None, None, should_clear_after_alloc=True)
+nonzero_allocator = W_Allocator(None, None, None,should_clear_after_alloc=False)
diff --git a/pypy/module/_cffi_backend/ccallback.py b/pypy/module/_cffi_backend/ccallback.py
--- a/pypy/module/_cffi_backend/ccallback.py
+++ b/pypy/module/_cffi_backend/ccallback.py
@@ -22,8 +22,9 @@
 class W_CDataCallback(W_CData):
     #_immutable_fields_ = ...
     ll_error = lltype.nullptr(rffi.CCHARP.TO)
+    w_onerror = None
 
-    def __init__(self, space, ctype, w_callable, w_error):
+    def __init__(self, space, ctype, w_callable, w_error, w_onerror):
         raw_closure = rffi.cast(rffi.CCHARP, clibffi.closureHeap.alloc())
         W_CData.__init__(self, space, raw_closure, ctype)
         #
@@ -31,6 +32,12 @@
             raise oefmt(space.w_TypeError,
                         "expected a callable object, not %T", w_callable)
         self.w_callable = w_callable
+        if not space.is_none(w_onerror):
+            if not space.is_true(space.callable(w_onerror)):
+                raise oefmt(space.w_TypeError,
+                            "expected a callable object for 'onerror', not %T",
+                            w_onerror)
+            self.w_onerror = w_onerror
         #
         fresult = self.getfunctype().ctitem
         size = fresult.size
@@ -161,6 +168,29 @@
 STDERR = 2
 
 
+ at jit.dont_look_inside
+def _handle_applevel_exception(space, callback, e, ll_res, extra_line):
+    callback.write_error_return_value(ll_res)
+    if callback.w_onerror is None:
+        callback.print_error(e, extra_line)
+    else:
+        try:
+            e.normalize_exception(space)
+            w_t = e.w_type
+            w_v = e.get_w_value(space)
+            w_tb = space.wrap(e.get_traceback())
+            w_res = space.call_function(callback.w_onerror,
+                                        w_t, w_v, w_tb)
+            if not space.is_none(w_res):
+                callback.convert_result(ll_res, w_res)
+        except OperationError, e2:
+            # double exception! print a double-traceback...
+            callback.print_error(e, extra_line)    # original traceback
+            e2.write_unraisable(space, '', with_traceback=True,
+                                extra_line="\nDuring the call to 'onerror', "
+                                           "another exception occurred:\n\n")
+
+
 @jit.jit_callback("CFFI")
 def _invoke_callback(ffi_cif, ll_res, ll_args, ll_userdata):
     """ Callback specification.
@@ -178,7 +208,7 @@
         try:
             os.write(STDERR, "SystemError: invoking a callback "
                              "that was already freed\n")
-        except OSError:
+        except:
             pass
         # In this case, we don't even know how big ll_res is.  Let's assume
         # it is just a 'ffi_arg', and store 0 there.
@@ -195,9 +225,7 @@
             extra_line = "Trying to convert the result back to C:\n"
             callback.convert_result(ll_res, w_res)
         except OperationError, e:
-            # got an app-level exception
-            callback.print_error(e, extra_line)
-            callback.write_error_return_value(ll_res)
+            _handle_applevel_exception(space, callback, e, ll_res, extra_line)
         #
     except Exception, e:
         # oups! last-level attempt to recover.
@@ -205,7 +233,7 @@
             os.write(STDERR, "SystemError: callback raised ")
             os.write(STDERR, str(e))
             os.write(STDERR, "\n")
-        except OSError:
+        except:
             pass
         callback.write_error_return_value(ll_res)
     if must_leave:
diff --git a/pypy/module/_cffi_backend/cdataobj.py b/pypy/module/_cffi_backend/cdataobj.py
--- a/pypy/module/_cffi_backend/cdataobj.py
+++ b/pypy/module/_cffi_backend/cdataobj.py
@@ -363,16 +363,19 @@
     def _sizeof(self):
         return self.ctype.size
 
+    def with_gc(self, w_destructor):
+        with self as ptr:
+            return W_CDataGCP(self.space, ptr, self.ctype, self, w_destructor)
+
 
 class W_CDataMem(W_CData):
-    """This is the base class used for cdata objects that own and free
-    their memory.  Used directly by the results of cffi.cast('int', x)
-    or other primitive explicitly-casted types.  It is further subclassed
-    by W_CDataNewOwning."""
+    """This is used only by the results of cffi.cast('int', x)
+    or other primitive explicitly-casted types."""
     _attrs_ = []
 
-    def __init__(self, space, size, ctype):
-        cdata = lltype.malloc(rffi.CCHARP.TO, size, flavor='raw', zero=True)


More information about the pypy-commit mailing list