[pypy-commit] pypy object-dtype2: merge default into branch

mattip noreply at buildbot.pypy.org
Wed Apr 22 11:22:37 CEST 2015


Author: mattip <matti.picus at gmail.com>
Branch: object-dtype2
Changeset: r76866:751d64b6e679
Date: 2015-04-22 00:09 +0300
http://bitbucket.org/pypy/pypy/changeset/751d64b6e679/

Log:	merge default into branch

diff too long, truncating to 2000 out of 3488 lines

diff --git a/.tddium.requirements.txt b/.tddium.requirements.txt
deleted file mode 100644
--- a/.tddium.requirements.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-pytest
diff --git a/lib-python/2.7/test/test_urllib2net.py b/lib-python/2.7/test/test_urllib2net.py
--- a/lib-python/2.7/test/test_urllib2net.py
+++ b/lib-python/2.7/test/test_urllib2net.py
@@ -102,11 +102,8 @@
 
     def test_ftp(self):
         urls = [
-            'ftp://ftp.kernel.org/pub/linux/kernel/README',
-            'ftp://ftp.kernel.org/pub/linux/kernel/non-existent-file',
-            #'ftp://ftp.kernel.org/pub/leenox/kernel/test',
-            'ftp://gatekeeper.research.compaq.com/pub/DEC/SRC'
-                '/research-reports/00README-Legal-Rules-Regs',
+            'ftp://ftp.debian.org/debian/README',
+            'ftp://ftp.debian.org/debian/non-existent-file',
             ]
         self._test_urls(urls, self._extra_handlers())
 
@@ -255,6 +252,7 @@
         with test_support.transient_internet(url, timeout=None):
             u = _urlopen_with_retry(url)
             self.assertIsNone(u.fp._sock.fp._sock.gettimeout())
+            u.close()
 
     def test_http_default_timeout(self):
         self.assertIsNone(socket.getdefaulttimeout())
@@ -266,6 +264,7 @@
             finally:
                 socket.setdefaulttimeout(None)
             self.assertEqual(u.fp._sock.fp._sock.gettimeout(), 60)
+            u.close()
 
     def test_http_no_timeout(self):
         self.assertIsNone(socket.getdefaulttimeout())
@@ -277,20 +276,23 @@
             finally:
                 socket.setdefaulttimeout(None)
             self.assertIsNone(u.fp._sock.fp._sock.gettimeout())
+            u.close()
 
     def test_http_timeout(self):
         url = "http://www.example.com"
         with test_support.transient_internet(url):
             u = _urlopen_with_retry(url, timeout=120)
             self.assertEqual(u.fp._sock.fp._sock.gettimeout(), 120)
+            u.close()
 
-    FTP_HOST = "ftp://ftp.mirror.nl/pub/gnu/"
+    FTP_HOST = 'ftp://ftp.debian.org/debian/'
 
     def test_ftp_basic(self):
         self.assertIsNone(socket.getdefaulttimeout())
         with test_support.transient_internet(self.FTP_HOST, timeout=None):
             u = _urlopen_with_retry(self.FTP_HOST)
             self.assertIsNone(u.fp.fp._sock.gettimeout())
+            u.close()
 
     def test_ftp_default_timeout(self):
         self.assertIsNone(socket.getdefaulttimeout())
@@ -301,6 +303,7 @@
             finally:
                 socket.setdefaulttimeout(None)
             self.assertEqual(u.fp.fp._sock.gettimeout(), 60)
+            u.close()
 
     def test_ftp_no_timeout(self):
         self.assertIsNone(socket.getdefaulttimeout(),)
@@ -311,11 +314,16 @@
             finally:
                 socket.setdefaulttimeout(None)
             self.assertIsNone(u.fp.fp._sock.gettimeout())
+            u.close()
 
     def test_ftp_timeout(self):
         with test_support.transient_internet(self.FTP_HOST):
-            u = _urlopen_with_retry(self.FTP_HOST, timeout=60)
+            try:
+                u = _urlopen_with_retry(self.FTP_HOST, timeout=60)
+            except:
+                raise
             self.assertEqual(u.fp.fp._sock.gettimeout(), 60)
+            u.close()
 
 
 def test_main():
diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py
--- a/lib_pypy/_sqlite3.py
+++ b/lib_pypy/_sqlite3.py
@@ -454,6 +454,7 @@
         self.__cursors_counter = 0
         self.__statements = []
         self.__statements_counter = 0
+        self.__rawstatements = set()
         self._statement_cache = _StatementCache(self, cached_statements)
 
         self.__func_cache = {}
@@ -483,6 +484,14 @@
 
         self.__do_all_statements(Statement._finalize, True)
 
+        # depending on when this close() is called, the statements' weakrefs
+        # may be already dead, even though Statement.__del__() was not called
+        # yet.  In this case, self.__rawstatements is not empty.
+        if self.__rawstatements is not None:
+            for stmt in list(self.__rawstatements):
+                self._finalize_raw_statement(stmt)
+            self.__rawstatements = None
+
         if self._db:
             ret = _lib.sqlite3_close(self._db)
             if ret != _lib.SQLITE_OK:
@@ -562,6 +571,7 @@
         self.__cursors = [r for r in self.__cursors if r() is not None]
 
     def _remember_statement(self, statement):
+        self.__rawstatements.add(statement._statement)
         self.__statements.append(weakref.ref(statement))
         self.__statements_counter += 1
         if self.__statements_counter < 200:
@@ -569,6 +579,11 @@
         self.__statements_counter = 0
         self.__statements = [r for r in self.__statements if r() is not None]
 
+    def _finalize_raw_statement(self, _statement):
+        if self.__rawstatements is not None:
+            self.__rawstatements.remove(_statement)
+            _lib.sqlite3_finalize(_statement)
+
     def __do_all_statements(self, action, reset_cursors):
         for weakref in self.__statements:
             statement = weakref()
@@ -1199,7 +1214,6 @@
 
     def __init__(self, connection, sql):
         self.__con = connection
-        self.__con._remember_statement(self)
 
         self._in_use = False
 
@@ -1244,17 +1258,19 @@
         if ret != _lib.SQLITE_OK:
             raise self.__con._get_exception(ret)
 
+        self.__con._remember_statement(self)
+
         tail = _ffi.string(next_char[0]).decode('utf-8')
         if _check_remaining_sql(tail):
             raise Warning("You can only execute one statement at a time.")
 
     def __del__(self):
         if self._statement:
-            _lib.sqlite3_finalize(self._statement)
+            self.__con._finalize_raw_statement(self._statement)
 
     def _finalize(self):
         if self._statement:
-            _lib.sqlite3_finalize(self._statement)
+            self.__con._finalize_raw_statement(self._statement)
             self._statement = None
         self._in_use = False
 
diff --git a/lib_pypy/_tkinter/tklib.py b/lib_pypy/_tkinter/tklib.py
--- a/lib_pypy/_tkinter/tklib.py
+++ b/lib_pypy/_tkinter/tklib.py
@@ -1,7 +1,7 @@
 # C bindings with libtcl and libtk.
 
 from cffi import FFI
-import sys
+import sys, os
 
 tkffi = FFI()
 
@@ -135,9 +135,12 @@
     linklibs = ['tcl', 'tk']
     libdirs = []
 else:
-    incdirs=['/usr/include/tcl']
-    linklibs=['tcl', 'tk']
-    libdirs = []
+    for _ver in ['', '8.6', '8.5', '']:
+        incdirs = ['/usr/include/tcl' + _ver]
+        linklibs = ['tcl' + _ver, 'tk' + _ver]
+        libdirs = []
+        if os.path.isdir(incdirs[0]):
+            break
 
 tklib = tkffi.verify("""
 #include <tcl.h>
diff --git a/lib_pypy/pyrepl/simple_interact.py b/lib_pypy/pyrepl/simple_interact.py
--- a/lib_pypy/pyrepl/simple_interact.py
+++ b/lib_pypy/pyrepl/simple_interact.py
@@ -33,6 +33,16 @@
         return False
     return True
 
+def _strip_final_indent(text):
+    # kill spaces and tabs at the end, but only if they follow '\n'.
+    # meant to remove the auto-indentation only (although it would of
+    # course also remove explicitly-added indentation).
+    short = text.rstrip(' \t')
+    n = len(short)
+    if n > 0 and text[n-1] == '\n':
+        return short
+    return text
+
 def run_multiline_interactive_console(mainmodule=None):
     import code
     if mainmodule is None:
@@ -41,7 +51,7 @@
 
     def more_lines(unicodetext):
         # ooh, look at the hack:
-        src = "#coding:utf-8\n"+unicodetext.encode('utf-8')
+        src = "#coding:utf-8\n"+_strip_final_indent(unicodetext).encode('utf-8')
         try:
             code = console.compile(src, '<stdin>', 'single')
         except (OverflowError, SyntaxError, ValueError):
@@ -58,7 +68,7 @@
                                             returns_unicode=True)
             except EOFError:
                 break
-            more = console.push(statement)
+            more = console.push(_strip_final_indent(statement))
             assert not more
         except KeyboardInterrupt:
             console.write("\nKeyboardInterrupt\n")
diff --git a/pypy/doc/build.rst b/pypy/doc/build.rst
--- a/pypy/doc/build.rst
+++ b/pypy/doc/build.rst
@@ -146,6 +146,26 @@
 :doc:`objspace proxies <objspace-proxies>` document.
 
 
+Packaging (preparing for installation)
+--------------------------------------
+
+Packaging is required if you want to install PyPy system-wide, even to
+install on the same machine.  The reason is that doing so prepares a
+number of extra features that cannot be done lazily on a root-installed
+PyPy, because the normal users don't have write access.  This concerns
+mostly libraries that would normally be compiled if and when they are
+imported the first time.
+
+::
+    
+    cd pypy/tool/release
+    ./package.py pypy-VER-PLATFORM
+
+This creates a clean and prepared hierarchy, as well as a ``.tar.bz2``
+with the same content; both are found by default in
+``/tmp/usession-YOURNAME/build/``.  You can then either move the file
+hierarchy or unpack the ``.tar.bz2`` at the correct place.
+
 
 Installation
 ------------
diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst
--- a/pypy/doc/getting-started-dev.rst
+++ b/pypy/doc/getting-started-dev.rst
@@ -207,12 +207,17 @@
 large amount of options that can be used to customize pyinteractive.py).
 As an example of using PyPy from the command line, you could type::
 
-    python pyinteractive.py -c "from test import pystone; pystone.main(10)"
+    python pyinteractive.py --withmod-time -c "from test import pystone; pystone.main(10)"
 
 Alternatively, as with regular Python, you can simply give a
 script name on the command line::
 
-    python pyinteractive.py ../../lib-python/2.7/test/pystone.py 10
+    python pyinteractive.py --withmod-time ../../lib-python/2.7/test/pystone.py 10
+
+The ``--withmod-xxx`` option enables the built-in module ``xxx``.  By
+default almost none of them are, because initializing them takes time.
+If you want anyway to enable all built-in modules, you can use
+``--allworkingmodules``.
 
 See our :doc:`configuration sections <config/index>` for details about what all the commandline
 options do.
diff --git a/pypy/doc/stm.rst b/pypy/doc/stm.rst
--- a/pypy/doc/stm.rst
+++ b/pypy/doc/stm.rst
@@ -564,6 +564,15 @@
 Miscellaneous functions
 -----------------------
 
+* First, note that the ``transaction`` module is found in the file
+  ``lib_pypy/transaction.py``.  This file can be copied around to
+  execute the same programs on CPython or on non-STM PyPy, with
+  fall-back behavior.  (One case where the behavior differs is
+  ``atomic``, which is in this fall-back case just a regular lock; so
+  ``with atomic`` only prevent other threads from entering other
+  ``with atomic`` sections, but won't prevent other threads from
+  running non-atomic code.)
+
 * ``transaction.getsegmentlimit()``: return the number of "segments" in
   this pypy-stm.  This is the limit above which more threads will not be
   able to execute on more cores.  (Right now it is limited to 4 due to
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -3,7 +3,12 @@
 =======================
 
 .. this is a revision shortly after release-2.5.1
-.. startrev: 397b96217b85
+.. startrev: cb01edcb59414d9d93056e54ed060673d24e67c1
+
+Issue #2017: on non-Linux-x86 platforms, reduced the memory impact of
+creating a lot of greenlets/tasklets.  Particularly useful on Win32 and
+on ARM, where you used to get a MemoryError after only 2500-5000
+greenlets (the 32-bit address space is exhausted).
 
 .. branch: gc-incminimark-pinning-improve
 Object Pinning is now used in `bz2` and `rzlib` (therefore also affects
@@ -11,3 +16,10 @@
 (incminimark) it no longer needs to create a non-moving copy of it. This saves
 one `malloc` and copying the data.  Additionally a new GC environment variable
 is introduced (`PYPY_GC_MAX_PINNED`) primarily for debugging purposes.
+
+.. branch: refactor-pycall
+Make `*`-unpacking in RPython function calls completely equivalent to passing
+the tuple's elements as arguments. In other words, `f(*(a, b))` now behaves 
+exactly like `f(a, b)`.
+
+.. branch: issue2018
diff --git a/pypy/interpreter/astcompiler/assemble.py b/pypy/interpreter/astcompiler/assemble.py
--- a/pypy/interpreter/astcompiler/assemble.py
+++ b/pypy/interpreter/astcompiler/assemble.py
@@ -1,5 +1,6 @@
 """Python control flow graph generation and bytecode assembly."""
 
+import os
 from rpython.rlib import rfloat
 from rpython.rlib.objectmodel import we_are_translated
 
@@ -9,6 +10,10 @@
 from pypy.tool import stdlib_opcode as ops
 
 
+class StackDepthComputationError(Exception):
+    pass
+
+
 class Instruction(object):
     """Represents a single opcode."""
 
@@ -55,11 +60,13 @@
     reaches the end of the block, it continues to next_block.
     """
 
+    marked = False
+    have_return = False
+    auto_inserted_return = False
+
     def __init__(self):
         self.instructions = []
         self.next_block = None
-        self.marked = False
-        self.have_return = False
 
     def _post_order_see(self, stack, nextblock):
         if nextblock.marked == 0:
@@ -384,7 +391,11 @@
         # look into a block when all the previous blocks have been done.
         self._max_depth = 0
         for block in blocks:
-            self._do_stack_depth_walk(block)
+            depth = self._do_stack_depth_walk(block)
+            if block.auto_inserted_return and depth != 0:
+                os.write(2, "StackDepthComputationError in %s at %s:%s\n" % (
+                    self.compile_info.filename, self.name, self.first_lineno))
+                raise StackDepthComputationError   # fatal error
         return self._max_depth
 
     def _next_stack_depth_walk(self, nextblock, depth):
@@ -393,20 +404,21 @@
 
     def _do_stack_depth_walk(self, block):
         depth = block.initial_depth
-        done = False
         for instr in block.instructions:
             depth += _opcode_stack_effect(instr.opcode, instr.arg)
             if depth >= self._max_depth:
                 self._max_depth = depth
+            jump_op = instr.opcode
             if instr.has_jump:
                 target_depth = depth
-                jump_op = instr.opcode
                 if jump_op == ops.FOR_ITER:
                     target_depth -= 2
                 elif (jump_op == ops.SETUP_FINALLY or
                       jump_op == ops.SETUP_EXCEPT or
                       jump_op == ops.SETUP_WITH):
-                    target_depth += 3
+                    if jump_op == ops.SETUP_WITH:
+                        target_depth -= 1     # ignore the w_result just pushed
+                    target_depth += 3         # add [exc_type, exc, unroller]
                     if target_depth > self._max_depth:
                         self._max_depth = target_depth
                 elif (jump_op == ops.JUMP_IF_TRUE_OR_POP or
@@ -415,10 +427,14 @@
                 self._next_stack_depth_walk(instr.jump[0], target_depth)
                 if jump_op == ops.JUMP_ABSOLUTE or jump_op == ops.JUMP_FORWARD:
                     # Nothing more can occur.
-                    done = True
                     break
-        if block.next_block and not done:
-            self._next_stack_depth_walk(block.next_block, depth)
+            elif jump_op == ops.RETURN_VALUE or jump_op == ops.RAISE_VARARGS:
+                # Nothing more can occur.
+                break
+        else:
+            if block.next_block:
+                self._next_stack_depth_walk(block.next_block, depth)
+        return depth
 
     def _build_lnotab(self, blocks):
         """Build the line number table for tracebacks and tracing."""
@@ -471,6 +487,7 @@
             if self.add_none_to_final_return:
                 self.load_const(self.space.w_None)
             self.emit_op(ops.RETURN_VALUE)
+            self.current_block.auto_inserted_return = True
         # Set the first lineno if it is not already explicitly set.
         if self.first_lineno == -1:
             if self.first_block.instructions:
@@ -563,10 +580,10 @@
     ops.INPLACE_OR: -1,
     ops.INPLACE_XOR: -1,
 
-    ops.SLICE+0: 1,
-    ops.SLICE+1: 0,
-    ops.SLICE+2: 0,
-    ops.SLICE+3: -1,
+    ops.SLICE+0: 0,
+    ops.SLICE+1: -1,
+    ops.SLICE+2: -1,
+    ops.SLICE+3: -2,
     ops.STORE_SLICE+0: -2,
     ops.STORE_SLICE+1: -3,
     ops.STORE_SLICE+2: -3,
@@ -576,7 +593,7 @@
     ops.DELETE_SLICE+2: -2,
     ops.DELETE_SLICE+3: -3,
 
-    ops.STORE_SUBSCR: -2,
+    ops.STORE_SUBSCR: -3,
     ops.DELETE_SUBSCR: -2,
 
     ops.GET_ITER: 0,
@@ -593,7 +610,9 @@
 
     ops.WITH_CLEANUP: -1,
     ops.POP_BLOCK: 0,
-    ops.END_FINALLY: -1,
+    ops.END_FINALLY: -3,     # assume always 3: we pretend that SETUP_FINALLY
+                             # pushes 3.  In truth, it would only push 1 and
+                             # the corresponding END_FINALLY only pops 1.
     ops.SETUP_WITH: 1,
     ops.SETUP_FINALLY: 0,
     ops.SETUP_EXCEPT: 0,
@@ -604,7 +623,6 @@
     ops.YIELD_VALUE: 0,
     ops.BUILD_CLASS: -2,
     ops.BUILD_MAP: 1,
-    ops.BUILD_SET: 1,
     ops.COMPARE_OP: -1,
 
     ops.LOOKUP_METHOD: 1,
@@ -659,6 +677,9 @@
 def _compute_BUILD_LIST(arg):
     return 1 - arg
 
+def _compute_BUILD_SET(arg):
+    return 1 - arg
+
 def _compute_MAKE_CLOSURE(arg):
     return -arg - 1
 
diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py
--- a/pypy/interpreter/astcompiler/test/test_compiler.py
+++ b/pypy/interpreter/astcompiler/test/test_compiler.py
@@ -772,6 +772,60 @@
         code = compile_with_astcompiler(source, 'exec', self.space)
         assert code.co_stacksize == 2
 
+    def test_stackeffect_bug3(self):
+        source = """if 1:
+        try: pass
+        finally: pass
+        try: pass
+        finally: pass
+        try: pass
+        finally: pass
+        try: pass
+        finally: pass
+        try: pass
+        finally: pass
+        try: pass
+        finally: pass
+        """
+        code = compile_with_astcompiler(source, 'exec', self.space)
+        assert code.co_stacksize == 3
+
+    def test_stackeffect_bug4(self):
+        source = """if 1:
+        with a: pass
+        with a: pass
+        with a: pass
+        with a: pass
+        with a: pass
+        with a: pass
+        """
+        code = compile_with_astcompiler(source, 'exec', self.space)
+        assert code.co_stacksize == 4
+
+    def test_stackeffect_bug5(self):
+        source = """if 1:
+        a[:]; a[:]; a[:]; a[:]; a[:]; a[:]
+        a[1:]; a[1:]; a[1:]; a[1:]; a[1:]; a[1:]
+        a[:2]; a[:2]; a[:2]; a[:2]; a[:2]; a[:2]
+        a[1:2]; a[1:2]; a[1:2]; a[1:2]; a[1:2]; a[1:2]
+        """
+        code = compile_with_astcompiler(source, 'exec', self.space)
+        assert code.co_stacksize == 3
+
+    def test_stackeffect_bug6(self):
+        source = """if 1:
+        {1}; {1}; {1}; {1}; {1}; {1}; {1}
+        """
+        code = compile_with_astcompiler(source, 'exec', self.space)
+        assert code.co_stacksize == 1
+
+    def test_stackeffect_bug7(self):
+        source = '''def f():
+            for i in a:
+                return
+        '''
+        code = compile_with_astcompiler(source, 'exec', self.space)
+
     def test_lambda(self):
         yield self.st, "y = lambda x: x", "y(4)", 4
 
diff --git a/pypy/interpreter/function.py b/pypy/interpreter/function.py
--- a/pypy/interpreter/function.py
+++ b/pypy/interpreter/function.py
@@ -374,14 +374,11 @@
         return space.wrap(self.name)
 
     def fset_func_name(self, space, w_name):
-        try:
+        if space.isinstance_w(w_name, space.w_str):
             self.name = space.str_w(w_name)
-        except OperationError, e:
-            if e.match(space, space.w_TypeError):
-                raise OperationError(space.w_TypeError,
-                                     space.wrap("func_name must be set "
-                                                "to a string object"))
-            raise
+        else:
+            raise OperationError(space.w_TypeError,
+                space.wrap("__name__ must be set to a string object"))
 
     def fdel_func_doc(self, space):
         self.w_doc = space.w_None
diff --git a/pypy/interpreter/test/test_function.py b/pypy/interpreter/test/test_function.py
--- a/pypy/interpreter/test/test_function.py
+++ b/pypy/interpreter/test/test_function.py
@@ -107,6 +107,12 @@
             __name__ = "bar"
             assert f.__module__ == "foo"''' in {}
 
+    def test_set_name(self):
+        def f(): pass
+        f.__name__ = 'g'
+        assert f.func_name == 'g'
+        raises(TypeError, "f.__name__ = u'g'")
+
 
 class AppTestFunction:
     def test_simple_call(self):
diff --git a/pypy/interpreter/unicodehelper.py b/pypy/interpreter/unicodehelper.py
--- a/pypy/interpreter/unicodehelper.py
+++ b/pypy/interpreter/unicodehelper.py
@@ -24,13 +24,9 @@
         self.end = end
         self.reason = reason
 
- at specialize.memo()
-def rpy_encode_error_handler():
-    # A RPython version of the "strict" error handler.
-    def raise_unicode_exception_encode(errors, encoding, msg, u,
-                                       startingpos, endingpos):
-        raise RUnicodeEncodeError(encoding, u, startingpos, endingpos, msg)
-    return raise_unicode_exception_encode
+def raise_unicode_exception_encode(errors, encoding, msg, u,
+                                   startingpos, endingpos):
+    raise RUnicodeEncodeError(encoding, u, startingpos, endingpos, msg)
 
 # ____________________________________________________________
 
@@ -67,5 +63,5 @@
     # This is not the case with Python3.
     return runicode.unicode_encode_utf_8(
         uni, len(uni), "strict",
-        errorhandler=rpy_encode_error_handler(),
+        errorhandler=raise_unicode_exception_encode,
         allow_surrogates=True)
diff --git a/pypy/module/_collections/app_defaultdict.py b/pypy/module/_collections/app_defaultdict.py
--- a/pypy/module/_collections/app_defaultdict.py
+++ b/pypy/module/_collections/app_defaultdict.py
@@ -11,6 +11,7 @@
 
 
 class defaultdict(dict):
+    __slots__ = ['default_factory']
 
     def __init__(self, *args, **kwds):
         if len(args) > 0:
@@ -20,7 +21,7 @@
                 raise TypeError("first argument must be callable")
         else:
             default_factory = None
-        self.default_factory = default_factory
+        defaultdict.default_factory.__set__(self, default_factory)
         super(defaultdict, self).__init__(*args, **kwds)
 
     def __missing__(self, key):
@@ -33,15 +34,15 @@
             return "defaultdict(...)"
         try:
             recurse.add(id(self))
-            return "defaultdict(%s, %s)" % (repr(self.default_factory), super(defaultdict, self).__repr__())
+            return "defaultdict(%s, %s)" % (self.default_factory,
+                                            super(defaultdict, self).__repr__())
         finally:
             recurse.remove(id(self))
 
     def copy(self):
         return type(self)(self.default_factory, self)
 
-    def __copy__(self):
-        return self.copy()
+    __copy__ = copy
 
     def __reduce__(self):
         """
@@ -55,4 +56,5 @@
 
            This API is used by pickle.py and copy.py.
         """
-        return (type(self), (self.default_factory,), None, None, self.iteritems())
+        return (type(self), (self.default_factory,), None, None,
+                defaultdict.iteritems(self))
diff --git a/pypy/module/_collections/test/test_defaultdict.py b/pypy/module/_collections/test/test_defaultdict.py
--- a/pypy/module/_collections/test/test_defaultdict.py
+++ b/pypy/module/_collections/test/test_defaultdict.py
@@ -54,3 +54,25 @@
         assert len(d2) == 1
         assert d2[2] == 3
         assert d2[3] == 42
+
+    def test_no_dict(self):
+        import _collections
+        assert not hasattr(_collections.defaultdict(), '__dict__')
+
+    def test_no_setattr(self):
+        import _collections
+        class D(_collections.defaultdict):
+            def __setattr__(self, attr, name):
+                raise AssertionError
+        d = D(int)
+        assert d['5'] == 0
+        d['6'] += 3
+        assert d['6'] == 3
+
+    def test_default_factory(self):
+        import _collections
+        f = lambda: 42
+        d = _collections.defaultdict(f)
+        assert d.default_factory is f
+        d.default_factory = lambda: 43
+        assert d['5'] == 43
diff --git a/pypy/module/_hashlib/__init__.py b/pypy/module/_hashlib/__init__.py
--- a/pypy/module/_hashlib/__init__.py
+++ b/pypy/module/_hashlib/__init__.py
@@ -1,11 +1,10 @@
 from pypy.interpreter.mixedmodule import MixedModule
-from pypy.module._hashlib.interp_hashlib import algorithms
+from pypy.module._hashlib.interp_hashlib import algorithms, fetch_names
 
 
 class Module(MixedModule):
     interpleveldefs = {
         'new' : 'interp_hashlib.new',
-        'openssl_md_meth_names': 'interp_hashlib.get(space).w_meth_names'
         }
 
     appleveldefs = {
@@ -15,5 +14,5 @@
         interpleveldefs['openssl_' + name] = 'interp_hashlib.new_' + name
 
     def startup(self, space):
-        from rpython.rlib.ropenssl import init_digests
-        init_digests()
+        w_meth_names = fetch_names(space)
+        space.setattr(self, space.wrap('openssl_md_meth_names'), w_meth_names)
diff --git a/pypy/module/_hashlib/interp_hashlib.py b/pypy/module/_hashlib/interp_hashlib.py
--- a/pypy/module/_hashlib/interp_hashlib.py
+++ b/pypy/module/_hashlib/interp_hashlib.py
@@ -16,47 +16,40 @@
 algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
 
 def hash_name_mapper_callback(obj_name, userdata):
-    state = global_state[0]
-    assert state is not None
     if not obj_name:
         return
     # Ignore aliased names, they pollute the list and OpenSSL appears
     # to have a its own definition of alias as the resulting list
     # still contains duplicate and alternate names for several
     # algorithms.
-    if obj_name[0].c_alias:
+    if rffi.cast(lltype.Signed, obj_name[0].c_alias):
         return
     try:
-        w_name = state.space.wrap(rffi.charp2str(obj_name[0].c_name))
-        state.space.call_method(state.w_meth_names, "add", w_name)
+        space = global_name_fetcher.space
+        w_name = space.wrap(rffi.charp2str(obj_name[0].c_name))
+        global_name_fetcher.meth_names.append(w_name)
     except OperationError, e:
-        state.w_error = e
+        global_name_fetcher.w_error = e
 
-# XXX make it threadlocal?
-global_state = [None]
+class NameFetcher:
+    def setup(self, space):
+        self.space = space
+        self.meth_names = []
+        self.w_error = None
+    def _cleanup_(self):
+        self.__dict__.clear()
+global_name_fetcher = NameFetcher()
 
-class State:
-    def __init__(self, space):
-        self.space = space
-        self.generate_method_names(space)
-
-    def generate_method_names(self, space):
-        if not we_are_translated():
-            ropenssl.init_digests()
-        self.w_error = None
-        try:
-            global_state[0] = self
-            self.w_meth_names = space.call_function(space.w_set)
-            ropenssl.OBJ_NAME_do_all(
-                ropenssl.OBJ_NAME_TYPE_MD_METH,
-                hash_name_mapper_callback, None)
-        finally:
-            global_state[0] = None
-        if self.w_error:
-            raise self.w_error
-
-def get(space):
-    return space.fromcache(State)
+def fetch_names(space):
+    global_name_fetcher.setup(space)
+    ropenssl.init_digests()
+    ropenssl.OBJ_NAME_do_all(ropenssl.OBJ_NAME_TYPE_MD_METH,
+                             hash_name_mapper_callback, None)
+    if global_name_fetcher.w_error:
+        raise global_name_fetcher.w_error
+    meth_names = global_name_fetcher.meth_names
+    global_name_fetcher.meth_names = None
+    return space.call_function(space.w_frozenset, space.newlist(meth_names))
 
 class W_Hash(W_Root):
     NULL_CTX = lltype.nullptr(ropenssl.EVP_MD_CTX.TO)
diff --git a/pypy/module/_hashlib/test/test_hashlib.py b/pypy/module/_hashlib/test/test_hashlib.py
--- a/pypy/module/_hashlib/test/test_hashlib.py
+++ b/pypy/module/_hashlib/test/test_hashlib.py
@@ -5,7 +5,7 @@
 
     def test_method_names(self):
         import _hashlib
-        assert isinstance(_hashlib.openssl_md_meth_names, set)
+        assert isinstance(_hashlib.openssl_md_meth_names, frozenset)
         assert "md5" in _hashlib.openssl_md_meth_names
 
     def test_simple(self):
diff --git a/pypy/module/_hashlib/test/test_ztranslation.py b/pypy/module/_hashlib/test/test_ztranslation.py
new file mode 100644
--- /dev/null
+++ b/pypy/module/_hashlib/test/test_ztranslation.py
@@ -0,0 +1,4 @@
+from pypy.objspace.fake.checkmodule import checkmodule
+
+def test_checkmodule():
+    checkmodule('_hashlib')
diff --git a/pypy/module/_ssl/interp_ssl.py b/pypy/module/_ssl/interp_ssl.py
--- a/pypy/module/_ssl/interp_ssl.py
+++ b/pypy/module/_ssl/interp_ssl.py
@@ -75,7 +75,9 @@
 constants["PROTOCOL_TLSv1"]  = PY_SSL_VERSION_TLS1
 if HAVE_TLSv1_2:
     constants["PROTOCOL_TLSv1_1"] = PY_SSL_VERSION_TLS1_1
+    constants["OP_NO_TLSv1_1"] = SSL_OP_NO_TLSv1_1
     constants["PROTOCOL_TLSv1_2"] = PY_SSL_VERSION_TLS1_2
+    constants["OP_NO_TLSv1_2"] = SSL_OP_NO_TLSv1_2
 
 constants["OP_ALL"] = SSL_OP_ALL &~SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS
 constants["OP_NO_SSLv2"] = SSL_OP_NO_SSLv2
diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py
--- a/pypy/module/cpyext/dictobject.py
+++ b/pypy/module/cpyext/dictobject.py
@@ -91,39 +91,39 @@
 @cpython_api([PyObject], lltype.Void)
 def PyDict_Clear(space, w_obj):
     """Empty an existing dictionary of all key-value pairs."""
-    space.call_method(w_obj, "clear")
+    space.call_method(space.w_dict, "clear", w_obj)
 
 @cpython_api([PyObject], PyObject)
 def PyDict_Copy(space, w_obj):
     """Return a new dictionary that contains the same key-value pairs as p.
     """
-    return space.call_method(w_obj, "copy")
+    return space.call_method(space.w_dict, "copy", w_obj)
 
 @cpython_api([PyObject, PyObject], rffi.INT_real, error=-1)
 def PyDict_Update(space, w_obj, w_other):
     """This is the same as PyDict_Merge(a, b, 1) in C, or a.update(b) in
     Python.  Return 0 on success or -1 if an exception was raised.
     """
-    space.call_method(w_obj, "update", w_other)
+    space.call_method(space.w_dict, "update", w_obj, w_other)
     return 0
 
 @cpython_api([PyObject], PyObject)
 def PyDict_Keys(space, w_obj):
     """Return a PyListObject containing all the keys from the dictionary,
     as in the dictionary method dict.keys()."""
-    return space.call_method(w_obj, "keys")
+    return space.call_method(space.w_dict, "keys", w_obj)
 
 @cpython_api([PyObject], PyObject)
 def PyDict_Values(space, w_obj):
     """Return a PyListObject containing all the values from the
     dictionary p, as in the dictionary method dict.values()."""
-    return space.call_method(w_obj, "values")
+    return space.call_method(space.w_dict, "values", w_obj)
 
 @cpython_api([PyObject], PyObject)
 def PyDict_Items(space, w_obj):
     """Return a PyListObject containing all the items from the
     dictionary, as in the dictionary method dict.items()."""
-    return space.call_method(w_obj, "items")
+    return space.call_method(space.w_dict, "items", w_obj)
 
 @cpython_api([PyObject, Py_ssize_tP, PyObjectP, PyObjectP], rffi.INT_real, error=CANNOT_FAIL)
 def PyDict_Next(space, w_dict, ppos, pkey, pvalue):
@@ -175,7 +175,7 @@
     # not complete.
 
     try:
-        w_iter = space.call_method(w_dict, "iteritems")
+        w_iter = space.call_method(space.w_dict, "iteritems", w_dict)
         pos = ppos[0]
         while pos:
             space.call_method(w_iter, "next")
diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py
--- a/pypy/module/cpyext/listobject.py
+++ b/pypy/module/cpyext/listobject.py
@@ -65,7 +65,7 @@
     """Insert the item item into list list in front of index index.  Return
     0 if successful; return -1 and set an exception if unsuccessful.
     Analogous to list.insert(index, item)."""
-    space.call_method(w_list, "insert", space.wrap(index), w_item)
+    space.call_method(space.w_list, "insert", w_list, space.wrap(index), w_item)
     return 0
 
 @cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL)
@@ -98,7 +98,7 @@
     failure.  This is equivalent to list.sort()."""
     if not isinstance(w_list, W_ListObject):
         PyErr_BadInternalCall(space)
-    space.call_method(w_list, "sort")
+    space.call_method(space.w_list, "sort", w_list)
     return 0
 
 @cpython_api([PyObject], rffi.INT_real, error=-1)
@@ -107,7 +107,7 @@
     failure.  This is the equivalent of list.reverse()."""
     if not isinstance(w_list, W_ListObject):
         PyErr_BadInternalCall(space)
-    space.call_method(w_list, "reverse")
+    space.call_method(space.w_list, "reverse", w_list)
     return 0
 
 @cpython_api([PyObject, Py_ssize_t, Py_ssize_t], PyObject)
diff --git a/pypy/module/cpyext/longobject.py b/pypy/module/cpyext/longobject.py
--- a/pypy/module/cpyext/longobject.py
+++ b/pypy/module/cpyext/longobject.py
@@ -186,6 +186,17 @@
         pend[0] = rffi.ptradd(str, len(s))
     return space.call_function(space.w_long, w_str, w_base)
 
+ at cpython_api([rffi.CWCHARP, Py_ssize_t, rffi.INT_real], PyObject)
+def PyLong_FromUnicode(space, u, length, base):
+    """Convert a sequence of Unicode digits to a Python long integer value.
+    The first parameter, u, points to the first character of the Unicode
+    string, length gives the number of characters, and base is the radix
+    for the conversion.  The radix must be in the range [2, 36]; if it is
+    out of range, ValueError will be raised."""
+    w_value = space.wrap(rffi.wcharpsize2unicode(u, length))
+    w_base = space.wrap(rffi.cast(lltype.Signed, base))
+    return space.call_function(space.w_long, w_value, w_base)
+
 @cpython_api([rffi.VOIDP], PyObject)
 def PyLong_FromVoidPtr(space, p):
     """Create a Python integer or long integer from the pointer p. The pointer value
diff --git a/pypy/module/cpyext/setobject.py b/pypy/module/cpyext/setobject.py
--- a/pypy/module/cpyext/setobject.py
+++ b/pypy/module/cpyext/setobject.py
@@ -36,7 +36,7 @@
     values of brand new frozensets before they are exposed to other code."""
     if not PySet_Check(space, w_s):
         PyErr_BadInternalCall(space)
-    space.call_method(w_s, 'add', w_obj)
+    space.call_method(space.w_set, 'add', w_s, w_obj)
     return 0
 
 @cpython_api([PyObject, PyObject], rffi.INT_real, error=-1)
@@ -49,7 +49,7 @@
     instance of set or its subtype."""
     if not PySet_Check(space, w_s):
         PyErr_BadInternalCall(space)
-    space.call_method(w_s, 'discard', w_obj)
+    space.call_method(space.w_set, 'discard', w_s, w_obj)
     return 0
 
 
@@ -59,12 +59,12 @@
     object from the set.  Return NULL on failure.  Raise KeyError if the
     set is empty. Raise a SystemError if set is an not an instance of
     set or its subtype."""
-    return space.call_method(w_set, "pop")
+    return space.call_method(space.w_set, "pop", w_set)
 
 @cpython_api([PyObject], rffi.INT_real, error=-1)
 def PySet_Clear(space, w_set):
     """Empty an existing set of all elements."""
-    space.call_method(w_set, 'clear')
+    space.call_method(space.w_set, 'clear', w_set)
     return 0
 
 @cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL)
diff --git a/pypy/module/cpyext/stubs.py b/pypy/module/cpyext/stubs.py
--- a/pypy/module/cpyext/stubs.py
+++ b/pypy/module/cpyext/stubs.py
@@ -1395,18 +1395,6 @@
     """
     raise NotImplementedError
 
- at cpython_api([rffi.CWCHARP, Py_ssize_t, rffi.INT_real], PyObject)
-def PyLong_FromUnicode(space, u, length, base):
-    """Convert a sequence of Unicode digits to a Python long integer value.  The first
-    parameter, u, points to the first character of the Unicode string, length
-    gives the number of characters, and base is the radix for the conversion.  The
-    radix must be in the range [2, 36]; if it is out of range, ValueError
-    will be raised.
-
-    This function used an int for length. This might require
-    changes in your code for properly supporting 64-bit systems."""
-    raise NotImplementedError
-
 @cpython_api([PyObject, rffi.CCHARP], rffi.INT_real, error=-1)
 def PyMapping_DelItemString(space, o, key):
     """Remove the mapping for object key from the object o. Return -1 on
diff --git a/pypy/module/cpyext/test/test_longobject.py b/pypy/module/cpyext/test/test_longobject.py
--- a/pypy/module/cpyext/test/test_longobject.py
+++ b/pypy/module/cpyext/test/test_longobject.py
@@ -180,3 +180,16 @@
         assert module.from_bytearray(False, False) == 0xBC9A
         assert module.from_bytearray(False, True) == -0x4365
 
+    def test_fromunicode(self):
+        module = self.import_extension('foo', [
+            ("from_unicode", "METH_O",
+             """
+                 Py_UNICODE* u = PyUnicode_AsUnicode(args);
+                 return Py_BuildValue("NN",
+                     PyLong_FromUnicode(u, 6, 10),
+                     PyLong_FromUnicode(u, 6, 16));
+             """),
+            ])
+        # A string with arabic digits. 'BAD' is after the 6th character.
+        assert module.from_unicode(u'  1\u0662\u0663\u0664BAD') == (1234, 4660)
+
diff --git a/pypy/module/cpyext/test/test_ztranslation.py b/pypy/module/cpyext/test/test_ztranslation.py
--- a/pypy/module/cpyext/test/test_ztranslation.py
+++ b/pypy/module/cpyext/test/test_ztranslation.py
@@ -1,4 +1,4 @@
 from pypy.objspace.fake.checkmodule import checkmodule
 
 def test_cpyext_translates():
-    checkmodule('cpyext', '_rawffi')
+    checkmodule('cpyext', '_rawffi', translate_startup=False)
diff --git a/pypy/module/pypyjit/test_pypy_c/test_buffers.py b/pypy/module/pypyjit/test_pypy_c/test_buffers.py
--- a/pypy/module/pypyjit/test_pypy_c/test_buffers.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_buffers.py
@@ -42,7 +42,7 @@
         assert loop.match_by_id('unpack', """
             guard_not_invalidated(descr=...)
             p90 = newstr(4)
-            call(ConstClass(copy_raw_to_string), i55, p90, 0, 4, descr=<Callv 0 irii EF=4>)
+            call(ConstClass(copy_raw_to_string), i55, p90, 0, 4, descr=<Callv 0 irii EF=5>)
             guard_no_exception(descr=...)
             i91 = strgetitem(p90, 0)
             i92 = strgetitem(p90, 1)
diff --git a/pypy/module/pypyjit/test_pypy_c/test_call.py b/pypy/module/pypyjit/test_pypy_c/test_call.py
--- a/pypy/module/pypyjit/test_pypy_c/test_call.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_call.py
@@ -355,7 +355,7 @@
             i17 = arraylen_gc(p15, descr=<ArrayS .>)
             i18 = int_lt(i17, i15)
             # a cond call to _ll_list_resize_hint_really_look_inside_iff
-            cond_call(i18, _, p8, i15, 1, descr=<Callv 0 rii EF=4>)
+            cond_call(i18, _, p8, i15, 1, descr=<Callv 0 rii EF=5>)
             guard_no_exception(descr=...)
             p17 = getfield_gc(p8, descr=<FieldP list.items .*>)
             setarrayitem_gc(p17, i13, i12, descr=<ArrayS .>)
@@ -395,7 +395,7 @@
             setarrayitem_gc(p24, 0, p26, descr=<ArrayP .>)
             setfield_gc(p22, p24, descr=<FieldP .*Arguments.inst_arguments_w .*>)
             }}}
-            p32 = call_may_force(_, p18, p22, descr=<Callr . rr EF=6>)
+            p32 = call_may_force(_, p18, p22, descr=<Callr . rr EF=7>)
             ...
         """)
 
diff --git a/pypy/module/pypyjit/test_pypy_c/test_containers.py b/pypy/module/pypyjit/test_pypy_c/test_containers.py
--- a/pypy/module/pypyjit/test_pypy_c/test_containers.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_containers.py
@@ -74,7 +74,7 @@
             setfield_gc(p13, p15, descr=<FieldP dicttable.indexes .+>)
             setfield_gc(p13, ConstPtr(0), descr=<FieldP dicttable.entries .+>)
             }}}
-            i17 = call(ConstClass(ll_dict_lookup_trampoline), p13, p10, i12, 1, descr=<Calli . rrii EF=4 OS=4>)
+            i17 = call(ConstClass(ll_dict_lookup_trampoline), p13, p10, i12, 1, descr=<Calli . rrii EF=5 OS=4>)
             {{{
             setfield_gc(p13, 0, descr=<FieldS dicttable.lookup_function_no .+>)
             setfield_gc(p13, 0, descr=<FieldS dicttable.num_live_items .+>)
@@ -82,10 +82,10 @@
             }}}
             guard_no_exception(descr=...)
             p20 = new_with_vtable(ConstClass(W_IntObject))
-            call(ConstClass(_ll_dict_setitem_lookup_done_trampoline), p13, p10, p20, i12, i17, descr=<Callv 0 rrrii EF=4>)
+            call(ConstClass(_ll_dict_setitem_lookup_done_trampoline), p13, p10, p20, i12, i17, descr=<Callv 0 rrrii EF=5>)
             setfield_gc(p20, i5, descr=<FieldS .*W_IntObject.inst_intval .*>)
             guard_no_exception(descr=...)
-            i23 = call(ConstClass(ll_call_lookup_function), p13, p10, i12, 0, descr=<Calli . rrii EF=4 OS=4>)
+            i23 = call(ConstClass(ll_call_lookup_function), p13, p10, i12, 0, descr=<Calli . rrii EF=5 OS=4>)
             guard_no_exception(descr=...)
             i27 = int_lt(i23, 0)
             guard_false(i27, descr=...)
diff --git a/pypy/module/pypyjit/test_pypy_c/test_ffi.py b/pypy/module/pypyjit/test_pypy_c/test_ffi.py
--- a/pypy/module/pypyjit/test_pypy_c/test_ffi.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_ffi.py
@@ -35,7 +35,7 @@
             guard_not_invalidated(descr=...)
             i17 = force_token()
             setfield_gc(p0, i17, descr=<.* .*PyFrame.vable_token .*>)
-            f21 = call_release_gil(%s, 2.000000, 3.000000, descr=<Callf 8 ff EF=6>)
+            f21 = call_release_gil(%s, 2.000000, 3.000000, descr=<Callf 8 ff EF=7>)
             guard_not_forced(descr=...)
             guard_no_exception(descr=...)
         """ % pow_addr)
@@ -202,7 +202,7 @@
         assert loop.match_by_id('cfficall', """
             p96 = force_token()
             setfield_gc(p0, p96, descr=<FieldP pypy.interpreter.pyframe.PyFrame.vable_token .>)
-            f97 = call_release_gil(91, i59, 1.0, 3, descr=<Callf 8 fi EF=6 OS=62>)
+            f97 = call_release_gil(91, i59, 1.0, 3, descr=<Callf 8 fi EF=7 OS=62>)
             guard_not_forced(descr=...)
             guard_no_exception(descr=...)
         """, ignore_ops=['guard_not_invalidated'])
@@ -266,7 +266,7 @@
         loop, = log.loops_by_id('cfficall')
         assert loop.match_by_id('cfficall', """
             ...
-            f1 = call_release_gil(..., descr=<Calli 4 ii EF=6 OS=62>)
+            f1 = call_release_gil(..., descr=<Calli 4 ii EF=7 OS=62>)
             ...
         """)
 
@@ -330,14 +330,14 @@
         guard_value(p165, ConstPtr(ptr70), descr=...)
         p166 = getfield_gc(p165, descr=<FieldP pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_strategy .+>)
         guard_value(p166, ConstPtr(ptr72), descr=...)
-        p167 = call(ConstClass(_ll_0_alloc_with_del___), descr=<Callr . EF=4>)
+        p167 = call(ConstClass(_ll_0_alloc_with_del___), descr=<Callr . EF=5>)
         guard_no_exception(descr=...)
         i112 = int_signext(i160, 2)
         setfield_gc(p167, ConstPtr(ptr85), descr=<FieldP pypy.module._cffi_backend.cdataobj.W_CData.inst_ctype .+>)
         i114 = int_ne(i160, i112)
         guard_false(i114, descr=...)
         --TICK--
-        i119 = call(ConstClass(_ll_1_raw_malloc_varsize__Signed), 6, descr=<Calli . i EF=4 OS=110>)
+        i119 = call(ConstClass(_ll_1_raw_malloc_varsize__Signed), 6, descr=<Calli . i EF=5 OS=110>)
         raw_store(i119, 0, i160, descr=<ArrayS 2>)
         raw_store(i119, 2, i160, descr=<ArrayS 2>)
         raw_store(i119, 4, i160, descr=<ArrayS 2>)
diff --git a/pypy/module/pypyjit/test_pypy_c/test_string.py b/pypy/module/pypyjit/test_pypy_c/test_string.py
--- a/pypy/module/pypyjit/test_pypy_c/test_string.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_string.py
@@ -6,7 +6,7 @@
 else:
     SHIFT = 63
 
-# XXX review the <Call> descrs to replace some EF=4 with EF=3 (elidable)
+# XXX review the <Call> descrs to replace some EF=5 with EF=4 (elidable)
 
 
 class TestString(BaseTestPyPyC):
@@ -80,12 +80,12 @@
             i23 = strgetitem(p10, i19)
             p25 = newstr(1)
             strsetitem(p25, 0, i23)
-            p93 = call(ConstClass(fromstr), p25, 16, descr=<Callr . ri EF=3>)
+            p93 = call(ConstClass(fromstr), p25, 16, descr=<Callr . ri EF=4>)
             guard_no_exception(descr=...)
             i95 = getfield_gc_pure(p93, descr=<FieldS rpython.rlib.rbigint.rbigint.inst_size .*>)
             i96 = int_gt(i95, #)
             guard_false(i96, descr=...)
-            i94 = call(ConstClass(rbigint._toint_helper), p93, descr=<Calli . r EF=3>)
+            i94 = call(ConstClass(rbigint._toint_helper), p93, descr=<Calli . r EF=4>)
             guard_no_exception(descr=...)
             i95 = int_add_ovf(i6, i94)
             guard_no_overflow(descr=...)
@@ -119,21 +119,21 @@
             setfield_gc(p86, 23, descr=<FieldS stringbuilder.current_end .+>)
             setfield_gc(p86, 23, descr=<FieldS stringbuilder.total_size .+>)
             }}}
-            call(ConstClass(ll_append_res0__stringbuilderPtr_rpy_stringPtr), p86, p80, descr=<Callv 0 rr EF=4>)
+            call(ConstClass(ll_append_res0__stringbuilderPtr_rpy_stringPtr), p86, p80, descr=<Callv 0 rr EF=5>)
             guard_no_exception(descr=...)
             i89 = getfield_gc(p86, descr=<FieldS stringbuilder.current_pos .+>)
             i90 = getfield_gc(p86, descr=<FieldS stringbuilder.current_end .+>)
             i91 = int_eq(i89, i90)
-            cond_call(i91, ConstClass(ll_grow_by__stringbuilderPtr_Signed), p86, 1, descr=<Callv 0 ri EF=4>)
+            cond_call(i91, ConstClass(ll_grow_by__stringbuilderPtr_Signed), p86, 1, descr=<Callv 0 ri EF=5>)
             guard_no_exception(descr=...)
             i92 = getfield_gc(p86, descr=<FieldS stringbuilder.current_pos .+>)
             i93 = int_add(i92, 1)
             p94 = getfield_gc(p86, descr=<FieldP stringbuilder.current_buf .+>)
             strsetitem(p94, i92, 32)
             setfield_gc(p86, i93, descr=<FieldS stringbuilder.current_pos .+>)
-            call(ConstClass(ll_append_res0__stringbuilderPtr_rpy_stringPtr), p86, p80, descr=<Callv 0 rr EF=4>)
+            call(ConstClass(ll_append_res0__stringbuilderPtr_rpy_stringPtr), p86, p80, descr=<Callv 0 rr EF=5>)
             guard_no_exception(descr=...)
-            p95 = call(..., descr=<Callr . r EF=4>)     # ll_build
+            p95 = call(..., descr=<Callr . r EF=5>)     # ll_build
             guard_no_exception(descr=...)
             i96 = strlen(p95)
             i97 = int_add_ovf(i71, i96)
@@ -248,7 +248,7 @@
         i50 = int_add(i47, 1)
         setfield_gc(p15, i50, descr=<FieldS pypy.module.__builtin__.functional.W_XRangeIterator.inst_current 8>)
         guard_not_invalidated(descr=...)
-        p52 = call(ConstClass(str_decode_ascii__raise_unicode_exception_decode), ConstPtr(ptr38), 3, 1, descr=<Callr . rii EF=4>)
+        p52 = call(ConstClass(str_decode_ascii__raise_unicode_exception_decode), ConstPtr(ptr38), 3, 1, descr=<Callr . rii EF=5>)
         guard_no_exception(descr=...)
         p53 = getfield_gc_pure(p52, descr=<FieldP tuple2.item0 .>)
         guard_nonnull(p53, descr=...)
diff --git a/pypy/module/pypyjit/test_pypy_c/test_thread.py b/pypy/module/pypyjit/test_pypy_c/test_thread.py
--- a/pypy/module/pypyjit/test_pypy_c/test_thread.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_thread.py
@@ -64,7 +64,7 @@
         guard_true(i56, descr=...)
         p57 = force_token()
         setfield_gc(p0, p57, descr=<FieldP pypy.interpreter.pyframe.PyFrame.vable_token 8>)
-        i58 = call_release_gil(0, _, i37, 1, descr=<Calli 4 ii EF=6>)
+        i58 = call_release_gil(0, _, i37, 1, descr=<Calli 4 ii EF=7>)
         guard_not_forced(descr=...)
         guard_no_exception(descr=...)
         i58 = int_sub(i44, 1)
diff --git a/pypy/module/pypyjit/test_pypy_c/test_weakref.py b/pypy/module/pypyjit/test_pypy_c/test_weakref.py
--- a/pypy/module/pypyjit/test_pypy_c/test_weakref.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_weakref.py
@@ -35,7 +35,7 @@
         guard_nonnull_class(p66, ..., descr=...)
         p67 = force_token()
         setfield_gc(p0, p67, descr=<FieldP pypy.interpreter.pyframe.PyFrame.vable_token \d+>)
-        p68 = call_may_force(ConstClass(WeakrefLifelineWithCallbacks.make_weakref_with_callback), p66, ConstPtr(ptr50), p14, ConstPtr(ptr51), descr=<Callr \d rrrr EF=6>)
+        p68 = call_may_force(ConstClass(WeakrefLifelineWithCallbacks.make_weakref_with_callback), p66, ConstPtr(ptr50), p14, ConstPtr(ptr51), descr=<Callr \d rrrr EF=7>)
         guard_not_forced(descr=...)
         guard_no_exception(descr=...)
         guard_nonnull_class(p68, ..., descr=...)
diff --git a/pypy/module/sys/test/test_sysmodule.py b/pypy/module/sys/test/test_sysmodule.py
--- a/pypy/module/sys/test/test_sysmodule.py
+++ b/pypy/module/sys/test/test_sysmodule.py
@@ -642,7 +642,7 @@
 
         thread_id = thread.get_ident()
         def other_thread():
-            print "thread started"
+            #print "thread started"
             lock2.release()
             lock1.acquire()
         lock1 = thread.allocate_lock()
diff --git a/pypy/module/test_lib_pypy/test_sqlite3.py b/pypy/module/test_lib_pypy/test_sqlite3.py
--- a/pypy/module/test_lib_pypy/test_sqlite3.py
+++ b/pypy/module/test_lib_pypy/test_sqlite3.py
@@ -276,6 +276,30 @@
         exc = raises(ValueError, cur.execute, "select 2\0")
         assert str(exc.value) == "the query contains a null character"
 
+    def test_close_in_del_ordering(self):
+        import gc
+        class SQLiteBackend(object):
+            success = False
+            def __init__(self):
+                self.connection = _sqlite3.connect(":memory:")
+            def close(self):
+                self.connection.close()
+            def __del__(self):
+                self.close()
+                SQLiteBackend.success = True
+            def create_db_if_needed(self):
+                conn = self.connection
+                cursor = conn.cursor()
+                cursor.execute("""
+                    create table if not exists nameoftable(value text)
+                """)
+                cursor.close()
+                conn.commit()
+        SQLiteBackend().create_db_if_needed()
+        gc.collect()
+        gc.collect()
+        assert SQLiteBackend.success
+
 
 class TestSQLiteHost(BaseTestSQLite):
     def setup_class(cls):
diff --git a/pypy/objspace/descroperation.py b/pypy/objspace/descroperation.py
--- a/pypy/objspace/descroperation.py
+++ b/pypy/objspace/descroperation.py
@@ -129,7 +129,8 @@
     # This is meant to be a *mixin*.
 
     def is_data_descr(space, w_obj):
-        return space.lookup(w_obj, '__set__') is not None
+        return (space.lookup(w_obj, '__set__') is not None or
+                space.lookup(w_obj, '__delete__') is not None)
 
     def get_and_call_args(space, w_descr, w_obj, args):
         # a special case for performance and to avoid infinite recursion
diff --git a/pypy/objspace/fake/checkmodule.py b/pypy/objspace/fake/checkmodule.py
--- a/pypy/objspace/fake/checkmodule.py
+++ b/pypy/objspace/fake/checkmodule.py
@@ -2,16 +2,19 @@
 from pypy.config.pypyoption import get_pypy_config
 
 
-def checkmodule(*modnames):
+def checkmodule(*modnames, **kwds):
+    translate_startup = kwds.pop('translate_startup', True)
+    assert not kwds
     config = get_pypy_config(translating=True)
     space = FakeObjSpace(config)
     seeobj_w = []
+    modules = []
     for modname in modnames:
         mod = __import__('pypy.module.%s' % modname, None, None, ['__doc__'])
         # force computation and record what we wrap
         module = mod.Module(space, W_Root())
         module.setup_after_space_initialization()
-        module.startup(space)
+        modules.append(module)
         for name in module.loaders:
             seeobj_w.append(module._load_lazily(space, name))
         if hasattr(module, 'submodules'):
@@ -20,5 +23,11 @@
                 for name in submod.loaders:
                     seeobj_w.append(submod._load_lazily(space, name))
     #
-    space.translates(seeobj_w=seeobj_w,
+    def func():
+        for mod in modules:
+            mod.startup(space)
+    if not translate_startup:
+        func()   # call it now
+        func = None
+    space.translates(func, seeobj_w=seeobj_w,
                      **{'translation.list_comprehension_operations': True})
diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py
--- a/pypy/objspace/fake/objspace.py
+++ b/pypy/objspace/fake/objspace.py
@@ -113,7 +113,7 @@
 
 BUILTIN_TYPES = ['int', 'str', 'float', 'long', 'tuple', 'list', 'dict',
                  'unicode', 'complex', 'slice', 'bool', 'basestring', 'object',
-                 'bytearray', 'buffer']
+                 'bytearray', 'buffer', 'set', 'frozenset']
 
 class FakeObjSpace(ObjSpace):
     def __init__(self, config=None):
diff --git a/pypy/objspace/std/unicodeobject.py b/pypy/objspace/std/unicodeobject.py
--- a/pypy/objspace/std/unicodeobject.py
+++ b/pypy/objspace/std/unicodeobject.py
@@ -439,12 +439,12 @@
             try:
                 if encoding == 'ascii':
                     u = space.unicode_w(w_object)
-                    eh = unicodehelper.rpy_encode_error_handler()
+                    eh = unicodehelper.raise_unicode_exception_encode
                     return space.wrap(unicode_encode_ascii(
                             u, len(u), None, errorhandler=eh))
                 if encoding == 'utf-8':
                     u = space.unicode_w(w_object)
-                    eh = unicodehelper.rpy_encode_error_handler()
+                    eh = unicodehelper.raise_unicode_exception_encode
                     return space.wrap(unicode_encode_utf_8(
                             u, len(u), None, errorhandler=eh,
                             allow_surrogates=True))
diff --git a/pypy/objspace/test/test_descroperation.py b/pypy/objspace/test/test_descroperation.py
--- a/pypy/objspace/test/test_descroperation.py
+++ b/pypy/objspace/test/test_descroperation.py
@@ -607,6 +607,18 @@
 
         raises(AttributeError, lambda: A().a)
 
+    def test_delete_descriptor(self):
+        class Prop(object):
+            def __get__(self, obj, cls):
+                return 42
+            def __delete__(self, obj):
+                obj.deleted = True
+        class C(object):
+            x = Prop()
+        obj = C()
+        del obj.x
+        assert obj.deleted
+
     def test_non_callable(self):
         meth = classmethod(1).__get__(1)
         raises(TypeError, meth)
diff --git a/pypy/tool/gdb_pypy.py b/pypy/tool/gdb_pypy.py
--- a/pypy/tool/gdb_pypy.py
+++ b/pypy/tool/gdb_pypy.py
@@ -5,7 +5,7 @@
 
 Or, alternatively:
 
-(gdb) python execfile('/path/to/gdb_pypy.py')
+(gdb) python exec(open('/path/to/gdb_pypy.py').read())
 """
 
 import re
@@ -55,10 +55,10 @@
 
 class RPyType(Command):
     """
-    Prints the RPython type of the expression (remember to dereference it!)
+    Prints the RPython type of the expression.
     E.g.:
 
-    (gdb) rpy_type *l_v123
+    (gdb) rpy_type l_v123
     GcStruct pypy.foo.Bar { super, inst_xxx, inst_yyy }
     """
 
@@ -73,23 +73,34 @@
 
     def invoke(self, arg, from_tty):
         # some magic code to automatically reload the python file while developing
-        from pypy.tool import gdb_pypy
         try:
-            reload(gdb_pypy)
+            from pypy.tool import gdb_pypy
+            try:
+                reload(gdb_pypy)
+            except:
+                import imp
+                imp.reload(gdb_pypy)
+            gdb_pypy.RPyType.prog2typeids = self.prog2typeids # persist the cache
+            self.__class__ = gdb_pypy.RPyType
+            result = self.do_invoke(arg, from_tty)
+            if not isinstance(result, str):
+                result = result.decode('latin-1')
+            print(result)
         except:
-            import imp
-            imp.reload(gdb_pypy)
-        gdb_pypy.RPyType.prog2typeids = self.prog2typeids # persist the cache
-        self.__class__ = gdb_pypy.RPyType
-        print (self.do_invoke(arg, from_tty).decode('latin-1'))
+            import traceback
+            traceback.print_exc()
 
     def do_invoke(self, arg, from_tty):
         try:
             offset = int(arg)
         except ValueError:
             obj = self.gdb.parse_and_eval(arg)
+            if obj.type.code == self.gdb.TYPE_CODE_PTR:
+                obj = obj.dereference()
             hdr = lookup(obj, '_gcheader')
             tid = hdr['h_tid']
+            if tid == -42:      # forwarded?
+                return 'Forwarded'
             if sys.maxsize < 2**32:
                 offset = tid & 0xFFFF     # 32bit
             else:
@@ -100,7 +111,7 @@
         if offset in typeids:
             return typeids[offset]
         else:
-            return 'Cannot find the type with offset %d' % offset
+            return 'Cannot find the type with offset 0x%x' % offset
 
     def get_typeids(self):
         try:
@@ -127,7 +138,7 @@
         try:
             self.gdb.execute('dump binary memory %s %s %s+%d' %
                              (fname, vstart, vstart, length))
-            with open(fname, 'rt') as fobj:
+            with open(fname, 'rb') as fobj:
                 data = fobj.read()
             return TypeIdsMap(zlib.decompress(data).splitlines(True), self.gdb)
         finally:
@@ -242,6 +253,8 @@
     fields
     """
 
+    recursive = False
+
     def __init__(self, val):
         self.val = val
 
@@ -249,29 +262,47 @@
     def lookup(cls, val, gdb=None):
         t = val.type
         if (is_ptr(t, gdb) and t.target().tag is not None and
-            re.match(r'pypy_list\d*', t.target().tag)):
+            re.match(r'pypy_(list|array)\d*', t.target().tag)):
             return cls(val)
         return None
 
     def to_string(self):
-        length = int(self.val['l_length'])
-        array = self.val['l_items']
-        allocated = int(array['length'])
-        items = array['items']
-        itemlist = []
-        for i in range(length):
-            item = items[i]
-            itemlist.append(str(item))
-        str_items = ', '.join(itemlist)
-        return 'r[%s] (len=%d, alloc=%d)' % (str_items, length, allocated)
+        t = self.val.type
+        if t.target().tag.startswith(r'pypy_array'):
+            if not self.val:
+                return 'r(null_array)'
+            length = int(self.val['length'])
+            items = self.val['items']
+            allocstr = ''
+        else:
+            if not self.val:
+                return 'r(null_list)'
+            length = int(self.val['l_length'])
+            array = self.val['l_items']
+            allocated = int(array['length'])
+            items = array['items']
+            allocstr = ', alloc=%d' % allocated
+        if RPyListPrinter.recursive:
+            str_items = '...'
+        else:
+            RPyListPrinter.recursive = True
+            try:
+                itemlist = []
+                for i in range(length):
+                    item = items[i]
+                    itemlist.append(str(item))    # may recurse here
+                str_items = ', '.join(itemlist)
+            finally:
+                RPyListPrinter.recursive = False
+        return 'r[%s] (len=%d%s)' % (str_items, length, allocstr)
 
 
 try:
     import gdb
     RPyType() # side effects
-    gdb.pretty_printers += [
+    gdb.pretty_printers = [
         RPyStringPrinter.lookup,
         RPyListPrinter.lookup
-        ]
+        ] + gdb.pretty_printers
 except ImportError:
     pass
diff --git a/pypy/tool/release/package.py b/pypy/tool/release/package.py
--- a/pypy/tool/release/package.py
+++ b/pypy/tool/release/package.py
@@ -65,6 +65,12 @@
 add --without-{0} option to skip packaging binary CFFI extension.""".format(module)
             raise MissingDependenciesError(module)
 
+def pypy_runs(pypy_c, quiet=False):
+    kwds = {}
+    if quiet:
+        kwds['stderr'] = subprocess.PIPE
+    return subprocess.call([str(pypy_c), '-c', 'pass'], **kwds) == 0
+
 def create_package(basedir, options):
     retval = 0
     name = options.name
@@ -87,6 +93,8 @@
             ' Please compile pypy first, using translate.py,'
             ' or check that you gave the correct path'
             ' with --override_pypy_c' % pypy_c)
+    if not pypy_runs(pypy_c):
+        raise OSError("Running %r failed!" % (str(pypy_c),))
     if not options.no_cffi:
         try:
             create_cffi_import_libraries(pypy_c, options)
@@ -100,6 +108,15 @@
     libpypy_name = 'libpypy-c.so' if not sys.platform.startswith('darwin') else 'libpypy-c.dylib'
     libpypy_c = pypy_c.new(basename=libpypy_name)
     if libpypy_c.check():
+        # check that this libpypy_c is really needed
+        os.rename(str(libpypy_c), str(libpypy_c) + '~')
+        try:
+            if pypy_runs(pypy_c, quiet=True):
+                raise Exception("It seems that %r runs without needing %r.  "
+                                "Please check and remove the latter" %
+                                (str(pypy_c), str(libpypy_c)))
+        finally:
+            os.rename(str(libpypy_c) + '~', str(libpypy_c))
         binaries.append((libpypy_c, libpypy_name))
     #
     builddir = options.builddir
diff --git a/pypy/tool/test/test_gdb_pypy.py b/pypy/tool/test/test_gdb_pypy.py
--- a/pypy/tool/test/test_gdb_pypy.py
+++ b/pypy/tool/test/test_gdb_pypy.py
@@ -204,6 +204,22 @@
     mylist.type.target().tag = None
     assert gdb_pypy.RPyListPrinter.lookup(mylist, FakeGdb) is None
 
+def test_pprint_array():
+    d = {'_gcheder': {'h_tid': 234}, 'length': 3, 'items': [20, 21, 22]}
+    mylist = PtrValue(d, type_tag='pypy_array1')
+    printer = gdb_pypy.RPyListPrinter.lookup(mylist, FakeGdb)
+    assert printer.to_string() == 'r[20, 21, 22] (len=3)'
+
+def test_pprint_null_list():
+    mylist = PtrValue({}, type_tag='pypy_list1')
+    printer = gdb_pypy.RPyListPrinter.lookup(mylist, FakeGdb)
+    assert printer.to_string() == 'r(null_list)'
+
+def test_pprint_null_array():
+    mylist = PtrValue({}, type_tag='pypy_array1')
+    printer = gdb_pypy.RPyListPrinter.lookup(mylist, FakeGdb)
+    assert printer.to_string() == 'r(null_array)'
+
 def test_typeidsmap():
     gdb = FakeGdb('', {exprmember(1): 111,
                        exprmember(2): 222,
diff --git a/rpython/annotator/annrpython.py b/rpython/annotator/annrpython.py
--- a/rpython/annotator/annrpython.py
+++ b/rpython/annotator/annrpython.py
@@ -10,7 +10,6 @@
     Variable, Constant, FunctionGraph, checkgraph)
 from rpython.translator import simplify, transform
 from rpython.annotator import model as annmodel, signature
-from rpython.annotator.argument import simple_args
 from rpython.annotator.bookkeeper import Bookkeeper
 from rpython.rtyper.normalizecalls import perform_normalizations
 
@@ -91,22 +90,14 @@
 
     def get_call_parameters(self, function, args_s, policy):
         desc = self.bookkeeper.getdesc(function)
-        args = simple_args(args_s)
-        result = []
-        def schedule(graph, inputcells):
-            result.append((graph, inputcells))
-            return annmodel.s_ImpossibleValue
-
         prevpolicy = self.policy
         self.policy = policy
         self.bookkeeper.enter(None)
         try:
-            desc.pycall(schedule, args, annmodel.s_ImpossibleValue)
+            return desc.get_call_parameters(args_s)
         finally:
             self.bookkeeper.leave()
             self.policy = prevpolicy
-        [(graph, inputcells)] = result
-        return graph, inputcells
 
     def annotate_helper(self, function, args_s, policy=None):
         if policy is None:
diff --git a/rpython/annotator/argument.py b/rpython/annotator/argument.py
--- a/rpython/annotator/argument.py
+++ b/rpython/annotator/argument.py
@@ -155,18 +155,6 @@
         keywords_w = [_kwds_w[key] for key in self.keywords]
         return ArgumentsForTranslation(args_w, dict(zip(self.keywords, keywords_w)))
 
-    @classmethod
-    def fromshape(cls, (shape_cnt, shape_keys, shape_star), data_w):
-        args_w = data_w[:shape_cnt]
-        p = end_keys = shape_cnt + len(shape_keys)
-        if shape_star:
-            w_star = data_w[p]
-            p += 1
-        else:
-            w_star = None
-        return cls(args_w, dict(zip(shape_keys, data_w[shape_cnt:end_keys])),
-                w_star)
-
 
 def rawshape(args):
     return args._rawshape()
diff --git a/rpython/annotator/description.py b/rpython/annotator/description.py
--- a/rpython/annotator/description.py
+++ b/rpython/annotator/description.py
@@ -1,13 +1,14 @@
 from __future__ import absolute_import
 import types
 from rpython.annotator.signature import (
-    enforce_signature_args, enforce_signature_return)
+    enforce_signature_args, enforce_signature_return, finish_type)
 from rpython.flowspace.model import Constant, FunctionGraph
 from rpython.flowspace.bytecode import cpython_code_signature
-from rpython.annotator.argument import rawshape, ArgErr
+from rpython.annotator.argument import rawshape, ArgErr, simple_args
 from rpython.tool.sourcetools import valid_identifier, func_with_new_name
 from rpython.tool.pairtype import extendabletype
-from rpython.annotator.model import AnnotatorError, SomeInteger, SomeString
+from rpython.annotator.model import (
+    AnnotatorError, SomeInteger, SomeString, s_ImpossibleValue)
 
 class CallFamily(object):
     """A family of Desc objects that could be called from common call sites.
@@ -75,7 +76,6 @@
         try:
             return self.attrs[attrname]
         except KeyError:
-            from rpython.annotator.model import s_ImpossibleValue
             return s_ImpossibleValue
 
     def set_s_value(self, attrname, s_value):
@@ -97,7 +97,6 @@
     # ClassAttrFamily is more precise: it is only about one attribut name.
 
     def __init__(self, desc):
-        from rpython.annotator.model import s_ImpossibleValue
         self.descs = {desc: True}
         self.read_locations = {}     # set of position_keys
         self.s_value = s_ImpossibleValue    # union of possible values
@@ -321,6 +320,24 @@
         result = unionof(result, s_previous_result)
         return result
 
+    def get_call_parameters(self, args_s):
+        args = simple_args(args_s)
+        inputcells = self.parse_arguments(args)
+        graph = self.specialize(inputcells)
+        assert isinstance(graph, FunctionGraph)
+        # if that graph has a different signature, we need to re-parse
+        # the arguments.
+        # recreate the args object because inputcells may have been changed
+        new_args = args.unmatch_signature(self.signature, inputcells)
+        inputcells = self.parse_arguments(new_args, graph)
+        signature = getattr(self.pyobj, '_signature_', None)
+        if signature:
+            s_result = finish_type(signature[1], self.bookkeeper, self.pyobj)
+            if s_result is not None:
+                self.bookkeeper.annotator.addpendingblock(
+                    graph, graph.returnblock, [s_result])
+        return graph, inputcells
+
     def bind_under(self, classdef, name):
         # XXX static methods
         return self.bookkeeper.getmethoddesc(self,
@@ -352,7 +369,6 @@
     @staticmethod
     def row_to_consider(descs, args, op):
         # see comments in CallFamily
-        from rpython.annotator.model import s_ImpossibleValue
         row = {}
         for desc in descs:
             def enlist(graph, ignore):
@@ -685,7 +701,6 @@
         # look up an attribute in the class
         cdesc = self.lookup(name)
         if cdesc is None:
-            from rpython.annotator.model import s_ImpossibleValue
             return s_ImpossibleValue
         else:
             # delegate to s_get_value to turn it into an annotation
@@ -999,7 +1014,6 @@
         try:
             value = self.read_attribute(attr)
         except AttributeError:
-            from rpython.annotator.model import s_ImpossibleValue
             return s_ImpossibleValue
         else:
             return self.bookkeeper.immutablevalue(value)
diff --git a/rpython/annotator/test/test_annrpython.py b/rpython/annotator/test/test_annrpython.py
--- a/rpython/annotator/test/test_annrpython.py
+++ b/rpython/annotator/test/test_annrpython.py
@@ -354,6 +354,25 @@
         assert isinstance(s, annmodel.SomeInteger)
         assert s.const == 3
 
+    def test_star_unpack_list(self):
+        def g():
+            pass
+        def f(l):
+            return g(*l)
+        a = self.RPythonAnnotator()
+        with py.test.raises(annmodel.AnnotatorError):
+            a.build_types(f, [[int]])
+
+    def test_star_unpack_and_keywords(self):
+        def g(a, b, c=0, d=0):
+            return a + b + c + d
+
+        def f(a, b):
+            return g(a, *(b,), d=5)
+        a = self.RPythonAnnotator()
+        s_result = a.build_types(f, [int, int])
+        assert isinstance(s_result, annmodel.SomeInteger)
+
     def test_pbc_attr_preserved_on_instance(self):
         a = self.RPythonAnnotator()
         s = a.build_types(snippet.preserve_pbc_attr_on_instance, [bool])
diff --git a/rpython/annotator/unaryop.py b/rpython/annotator/unaryop.py
--- a/rpython/annotator/unaryop.py
+++ b/rpython/annotator/unaryop.py
@@ -6,6 +6,7 @@
 
 from rpython.flowspace.operation import op
 from rpython.flowspace.model import const, Constant
+from rpython.flowspace.argument import CallSpec
 from rpython.annotator.model import (SomeObject, SomeInteger, SomeBool,
     SomeString, SomeChar, SomeList, SomeDict, SomeTuple, SomeImpossibleValue,
     SomeUnicodeCodePoint, SomeInstance, SomeBuiltin, SomeBuiltinMethod,
@@ -47,11 +48,35 @@
 
 @op.simple_call.register(SomeObject)
 def simple_call_SomeObject(annotator, func, *args):
-    return annotator.annotation(func).call(simple_args([annotator.annotation(arg) for arg in args]))
+    return annotator.annotation(func).call(
+        simple_args([annotator.annotation(arg) for arg in args]))
+
+ at op.call_args.register_transform(SomeObject)
+def transform_varargs(annotator, v_func, v_shape, *data_v):
+    callspec = CallSpec.fromshape(v_shape.value, list(data_v))
+    v_vararg = callspec.w_stararg
+    if callspec.w_stararg:
+        s_vararg = annotator.annotation(callspec.w_stararg)
+        if not isinstance(s_vararg, SomeTuple):
+            raise AnnotatorError(
+                "Calls like f(..., *arg) require 'arg' to be a tuple")
+        n_items = len(s_vararg.items)
+        ops = [op.getitem(v_vararg, const(i)) for i in range(n_items)]
+        new_args = callspec.arguments_w + [hlop.result for hlop in ops]
+        if callspec.keywords:
+            newspec = CallSpec(new_args, callspec.keywords)
+            shape, data_v = newspec.flatten()
+            call_op = op.call_args(v_func, const(shape), *data_v)
+        else:
+            call_op = op.simple_call(v_func, *new_args)
+        ops.append(call_op)
+        return ops
+
 
 @op.call_args.register(SomeObject)
-def call_args(annotator, func, *args):
-    return annotator.annotation(func).call(complex_args([annotator.annotation(arg) for arg in args]))
+def call_args(annotator, func, *args_v):
+    callspec = complex_args([annotator.annotation(v_arg) for v_arg in args_v])
+    return annotator.annotation(func).call(callspec)
 
 class __extend__(SomeObject):
 
@@ -476,12 +501,18 @@
         return SomeInteger(nonneg=True)
 
     def method_strip(self, chr=None):
+        if chr is None and isinstance(self, SomeUnicodeString):
+            raise AnnotatorError("unicode.strip() with no arg is not RPython")
         return self.basestringclass(no_nul=self.no_nul)
 
     def method_lstrip(self, chr=None):
+        if chr is None and isinstance(self, SomeUnicodeString):
+            raise AnnotatorError("unicode.lstrip() with no arg is not RPython")
         return self.basestringclass(no_nul=self.no_nul)
 
     def method_rstrip(self, chr=None):
+        if chr is None and isinstance(self, SomeUnicodeString):
+            raise AnnotatorError("unicode.rstrip() with no arg is not RPython")
         return self.basestringclass(no_nul=self.no_nul)
 
     def method_join(self, s_list):
@@ -722,7 +753,7 @@
         if attr not in dct:
             continue
         obj = dct[attr]
-        if (not isinstance(obj, Constant) or 
+        if (not isinstance(obj, Constant) or
                 not isinstance(obj.value, property)):
             return
         result.append(getattr(obj.value, meth))
diff --git a/rpython/doc/rpython.rst b/rpython/doc/rpython.rst
--- a/rpython/doc/rpython.rst
+++ b/rpython/doc/rpython.rst
@@ -59,7 +59,7 @@
 
 **exceptions**
 
-  fully supported
+  fully supported.
   see below `Exception rules`_ for restrictions on exceptions raised by built-in operations
 
 
@@ -92,6 +92,11 @@
   no variable-length tuples; use them to store or return pairs or n-tuples of
   values. Each combination of types for elements and length constitute
   a separate and not mixable type.
+  
+  There is no general way to convert a list into a tuple, because the
+  length of the result would not be known statically.  (You can of course
+  do ``t = (lst[0], lst[1], lst[2])`` if you know that ``lst`` has got 3
+  items.)
 
 **lists**
 
@@ -136,9 +141,16 @@
 
 **functions**
 
-+ statically called functions may use defaults and a variable number of
-  arguments (which may be passed as a list instead of a tuple, so write code
-  that does not depend on it being a tuple).
++ function declarations may use defaults and ``*args``, but not
+  ``**keywords``.
+
++ function calls may be done to a known function or to a variable one,
+  or to a method.  You can call with positional and keyword arguments,
+  and you can pass a ``*args`` argument (it must be a tuple).
+
++ as explained above, tuples are not of a variable length.  If you need
+  to call a function with a dynamic number of arguments, refactor the
+  function itself to accept a single argument which is a regular list.
 
 + dynamic dispatch enforces the use of signatures that are equal for all
   possible called function, or at least "compatible enough".  This
diff --git a/rpython/flowspace/argument.py b/rpython/flowspace/argument.py
--- a/rpython/flowspace/argument.py
+++ b/rpython/flowspace/argument.py
@@ -111,3 +111,16 @@
             return self.arguments_w
         else:
             return self.arguments_w + [const(x) for x in self.w_stararg.value]
+
+    @classmethod
+    def fromshape(cls, (shape_cnt, shape_keys, shape_star), data_w):
+        args_w = data_w[:shape_cnt]
+        p = end_keys = shape_cnt + len(shape_keys)
+        if shape_star:
+            w_star = data_w[p]
+            p += 1
+        else:
+            w_star = None
+        return cls(args_w, dict(zip(shape_keys, data_w[shape_cnt:end_keys])),
+                w_star)
+
diff --git a/rpython/jit/codewriter/call.py b/rpython/jit/codewriter/call.py
--- a/rpython/jit/codewriter/call.py
+++ b/rpython/jit/codewriter/call.py
@@ -31,6 +31,8 @@
             self.rtyper = cpu.rtyper
             translator = self.rtyper.annotator.translator
             self.raise_analyzer = RaiseAnalyzer(translator)
+            self.raise_analyzer_ignore_memoryerror = RaiseAnalyzer(translator)
+            self.raise_analyzer_ignore_memoryerror.do_ignore_memory_error()
             self.readwrite_analyzer = ReadWriteAnalyzer(translator)
             self.virtualizable_analyzer = VirtualizableAnalyzer(translator)
             self.quasiimmut_analyzer = QuasiImmutAnalyzer(translator)
@@ -141,7 +143,7 @@
     def grab_initial_jitcodes(self):
         for jd in self.jitdrivers_sd:
             jd.mainjitcode = self.get_jitcode(jd.portal_graph)
-            jd.mainjitcode.is_portal = True
+            jd.mainjitcode.jitdriver_sd = jd
 
     def enum_pending_graphs(self):
         while self.unfinished_graphs:
@@ -260,11 +262,14 @@
             elif loopinvariant:
                 extraeffect = EffectInfo.EF_LOOPINVARIANT
             elif elidable:
-                if self._canraise(op):
+                cr = self._canraise(op)
+                if cr == "mem":
+                    extraeffect = EffectInfo.EF_ELIDABLE_OR_MEMORYERROR
+                elif cr:
                     extraeffect = EffectInfo.EF_ELIDABLE_CAN_RAISE
                 else:
                     extraeffect = EffectInfo.EF_ELIDABLE_CANNOT_RAISE
-            elif self._canraise(op):
+            elif self._canraise(op):   # True or "mem"
                 extraeffect = EffectInfo.EF_CAN_RAISE
             else:
                 extraeffect = EffectInfo.EF_CANNOT_RAISE
@@ -278,6 +283,7 @@
                 " effects): EF=%s" % (op, extraeffect))
         if elidable:
             if extraeffect not in (EffectInfo.EF_ELIDABLE_CANNOT_RAISE,
+                                   EffectInfo.EF_ELIDABLE_OR_MEMORYERROR,
                                    EffectInfo.EF_ELIDABLE_CAN_RAISE):
                 raise Exception(
                 "in operation %r: this calls an _elidable_function_,"
@@ -301,10 +307,17 @@
                                     effectinfo)
 
     def _canraise(self, op):
+        """Returns True, False, or "mem" to mean 'only MemoryError'."""
         if op.opname == 'pseudo_call_cannot_raise':
             return False
         try:
-            return self.raise_analyzer.can_raise(op)
+            if self.raise_analyzer.can_raise(op):
+                if self.raise_analyzer_ignore_memoryerror.can_raise(op):
+                    return True
+                else:
+                    return "mem"
+            else:
+                return False
         except lltype.DelayedPointer:
             return True  # if we need to look into the delayed ptr that is
                          # the portal, then it's certainly going to raise
diff --git a/rpython/jit/codewriter/effectinfo.py b/rpython/jit/codewriter/effectinfo.py
--- a/rpython/jit/codewriter/effectinfo.py
+++ b/rpython/jit/codewriter/effectinfo.py
@@ -11,10 +11,11 @@
     EF_ELIDABLE_CANNOT_RAISE           = 0 #elidable function (and cannot raise)
     EF_LOOPINVARIANT                   = 1 #special: call it only once per loop
     EF_CANNOT_RAISE                    = 2 #a function which cannot raise
-    EF_ELIDABLE_CAN_RAISE              = 3 #elidable function (but can raise)
-    EF_CAN_RAISE                       = 4 #normal function (can raise)
-    EF_FORCES_VIRTUAL_OR_VIRTUALIZABLE = 5 #can raise and force virtualizables
-    EF_RANDOM_EFFECTS                  = 6 #can do whatever
+    EF_ELIDABLE_OR_MEMORYERROR         = 3 #elidable, can only raise MemoryError
+    EF_ELIDABLE_CAN_RAISE              = 4 #elidable function (but can raise)
+    EF_CAN_RAISE                       = 5 #normal function (can raise)
+    EF_FORCES_VIRTUAL_OR_VIRTUALIZABLE = 6 #can raise and force virtualizables
+    EF_RANDOM_EFFECTS                  = 7 #can do whatever
 
     # the 'oopspecindex' field is one of the following values:
     OS_NONE                     = 0    # normal case, no oopspec
@@ -126,20 +127,27 @@
         if extraeffect == EffectInfo.EF_RANDOM_EFFECTS:
             assert readonly_descrs_fields is None
             assert readonly_descrs_arrays is None
+            assert readonly_descrs_interiorfields is None
             assert write_descrs_fields is None
             assert write_descrs_arrays is None
+            assert write_descrs_interiorfields is None
         else:
             assert readonly_descrs_fields is not None
             assert readonly_descrs_arrays is not None
+            assert readonly_descrs_interiorfields is not None
             assert write_descrs_fields is not None
             assert write_descrs_arrays is not None
+            assert write_descrs_interiorfields is not None
         result = object.__new__(cls)
         result.readonly_descrs_fields = readonly_descrs_fields
         result.readonly_descrs_arrays = readonly_descrs_arrays
         result.readonly_descrs_interiorfields = readonly_descrs_interiorfields
         if extraeffect == EffectInfo.EF_LOOPINVARIANT or \
            extraeffect == EffectInfo.EF_ELIDABLE_CANNOT_RAISE or \
+           extraeffect == EffectInfo.EF_ELIDABLE_OR_MEMORYERROR or \


More information about the pypy-commit mailing list