[pypy-commit] pypy python-numpy: merge default into branch

mattip noreply at buildbot.pypy.org
Fri Feb 8 09:50:32 CET 2013


Author: Matti Picus <matti.picus at gmail.com>
Branch: python-numpy
Changeset: r60960:dbdfabb6c600
Date: 2013-02-08 10:34 +0200
http://bitbucket.org/pypy/pypy/changeset/dbdfabb6c600/

Log:	merge default into branch

diff too long, truncating to 2000 out of 775068 lines

diff --git a/.hgignore b/.hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -85,3 +85,4 @@
 ^compiled
 ^.git/
 ^release/
+^rpython/_cache$
diff --git a/dotviewer/test/test_translator.py b/dotviewer/test/test_translator.py
--- a/dotviewer/test/test_translator.py
+++ b/dotviewer/test/test_translator.py
@@ -21,7 +21,7 @@
 
 
 def test_annotated():
-    from pypy.translator.interactive import Translation
+    from rpython.translator.interactive import Translation
     t = Translation(is_prime)
     t.annotate([int])
     t.viewcg()
diff --git a/lib-python/2.7/collections.py b/lib-python/2.7/collections.py
--- a/lib-python/2.7/collections.py
+++ b/lib-python/2.7/collections.py
@@ -6,11 +6,12 @@
 __all__ += _abcoll.__all__
 
 from _collections import deque, defaultdict
-from operator import itemgetter as _itemgetter
+from operator import itemgetter as _itemgetter, eq as _eq
 from keyword import iskeyword as _iskeyword
 import sys as _sys
 import heapq as _heapq
 from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
+from itertools import imap as _imap
 
 try:
     from thread import get_ident as _get_ident
@@ -50,49 +51,45 @@
             self.__map = {}
         self.__update(*args, **kwds)
 
-    def __setitem__(self, key, value, PREV=0, NEXT=1, dict_setitem=dict.__setitem__):
+    def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
         'od.__setitem__(i, y) <==> od[i]=y'
         # Setting a new item creates a new link at the end of the linked list,
         # and the inherited dictionary is updated with the new key/value pair.
         if key not in self:
             root = self.__root
-            last = root[PREV]
-            last[NEXT] = root[PREV] = self.__map[key] = [last, root, key]
-        dict_setitem(self, key, value)
+            last = root[0]
+            last[1] = root[0] = self.__map[key] = [last, root, key]
+        return dict_setitem(self, key, value)
 
-    def __delitem__(self, key, PREV=0, NEXT=1, dict_delitem=dict.__delitem__):
+    def __delitem__(self, key, dict_delitem=dict.__delitem__):
         'od.__delitem__(y) <==> del od[y]'
         # Deleting an existing item uses self.__map to find the link which gets
         # removed by updating the links in the predecessor and successor nodes.
         dict_delitem(self, key)
         link_prev, link_next, key = self.__map.pop(key)
-        link_prev[NEXT] = link_next
-        link_next[PREV] = link_prev
+        link_prev[1] = link_next                        # update link_prev[NEXT]
+        link_next[0] = link_prev                        # update link_next[PREV]
 
     def __iter__(self):
         'od.__iter__() <==> iter(od)'
         # Traverse the linked list in order.
-        NEXT, KEY = 1, 2
         root = self.__root
-        curr = root[NEXT]
+        curr = root[1]                                  # start at the first node
         while curr is not root:
-            yield curr[KEY]
-            curr = curr[NEXT]
+            yield curr[2]                               # yield the curr[KEY]
+            curr = curr[1]                              # move to next node
 
     def __reversed__(self):
         'od.__reversed__() <==> reversed(od)'
         # Traverse the linked list in reverse order.
-        PREV, KEY = 0, 2
         root = self.__root
-        curr = root[PREV]
+        curr = root[0]                                  # start at the last node
         while curr is not root:
-            yield curr[KEY]
-            curr = curr[PREV]
+            yield curr[2]                               # yield the curr[KEY]
+            curr = curr[0]                              # move to previous node
 
     def clear(self):
         'od.clear() -> None.  Remove all items from od.'
-        for node in self.__map.itervalues():
-            del node[:]
         root = self.__root
         root[:] = [root, root, None]
         self.__map.clear()
@@ -208,7 +205,7 @@
 
         '''
         if isinstance(other, OrderedDict):
-            return len(self)==len(other) and self.items() == other.items()
+            return dict.__eq__(self, other) and all(_imap(_eq, self, other))
         return dict.__eq__(self, other)
 
     def __ne__(self, other):
@@ -298,7 +295,7 @@
         _fields = %(field_names)r \n
         def __new__(_cls, %(argtxt)s):
             'Create new instance of %(typename)s(%(argtxt)s)'
-            return _tuple.__new__(_cls, (%(argtxt)s)) \n
+            return tuple.__new__(_cls, (%(argtxt)s)) \n
         @classmethod
         def _make(cls, iterable, new=tuple.__new__, len=len):
             'Make a new %(typename)s object from a sequence or iterable'
@@ -323,14 +320,14 @@
             'Return self as a plain tuple.  Used by copy and pickle.'
             return tuple(self) \n\n''' % locals()
     for i, name in enumerate(field_names):
-        template += "        %s = _property(_itemgetter(%d), doc='Alias for field number %d')\n" % (name, i, i)
+        template += "        %s = property(lambda self: self[%d], doc='Alias for field number %d')\n" % (name, i, i)
     if verbose:
         print template
 
     # Execute the template string in a temporary namespace and
     # support tracing utilities by setting a value for frame.f_globals['__name__']
-    namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
-                     OrderedDict=OrderedDict, _property=property, _tuple=tuple)
+    namespace = {'__name__': 'namedtuple_%s' % typename,
+                 'OrderedDict': OrderedDict}
     try:
         exec template in namespace
     except SyntaxError, e:
diff --git a/lib-python/2.7/inspect.py b/lib-python/2.7/inspect.py
--- a/lib-python/2.7/inspect.py
+++ b/lib-python/2.7/inspect.py
@@ -960,7 +960,7 @@
                 raise TypeError('%s() takes exactly 0 arguments '
                                 '(%d given)' % (f_name, num_total))
         else:
-            raise TypeError('%s() takes no argument (%d given)' %
+            raise TypeError('%s() takes no arguments (%d given)' %
                             (f_name, num_total))
     for arg in args:
         if isinstance(arg, str) and arg in named:
diff --git a/lib-python/2.7/site.py b/lib-python/2.7/site.py
--- a/lib-python/2.7/site.py
+++ b/lib-python/2.7/site.py
@@ -75,6 +75,7 @@
 USER_SITE = None
 USER_BASE = None
 
+
 def makepath(*paths):
     dir = os.path.join(*paths)
     try:
diff --git a/lib-python/2.7/sysconfig.py b/lib-python/2.7/sysconfig.py
--- a/lib-python/2.7/sysconfig.py
+++ b/lib-python/2.7/sysconfig.py
@@ -27,10 +27,10 @@
         'data'   : '{base}',
         },
     'pypy': {
-        'stdlib': '{base}/lib-python',
-        'platstdlib': '{base}/lib-python',
-        'purelib': '{base}/lib-python',
-        'platlib': '{base}/lib-python',
+        'stdlib': '{base}/lib-python/{py_version_short}',
+        'platstdlib': '{base}/lib-python/{py_version_short}',
+        'purelib': '{base}/lib-python/{py_version_short}',
+        'platlib': '{base}/lib-python/{py_version_short}',
         'include': '{base}/include',
         'platinclude': '{base}/include',
         'scripts': '{base}/bin',
diff --git a/lib-python/2.7/test/test_capi.py b/lib-python/2.7/test/test_capi.py
--- a/lib-python/2.7/test/test_capi.py
+++ b/lib-python/2.7/test/test_capi.py
@@ -13,6 +13,17 @@
     threading = None
 import _testcapi
 
+skips = []
+if test_support.check_impl_detail(pypy=True):
+    skips += [
+            'test_broken_memoryview',
+            'test_capsule',
+            'test_lazy_hash_inheritance',
+            'test_widechar',
+            'TestThreadState',
+            'TestPendingCalls',
+            ]
+
 @unittest.skipUnless(threading, 'Threading required for this test.')
 class TestPendingCalls(unittest.TestCase):
 
@@ -99,7 +110,7 @@
 def test_main():
 
     for name in dir(_testcapi):
-        if name.startswith('test_'):
+        if name.startswith('test_') and name not in skips:
             test = getattr(_testcapi, name)
             if test_support.verbose:
                 print "internal", name
@@ -126,7 +137,7 @@
             raise test_support.TestFailed, \
                   "Couldn't find main thread correctly in the list"
 
-    if threading:
+    if threading and 'TestThreadState' not in skips:
         import thread
         import time
         TestThreadState()
@@ -134,7 +145,8 @@
         t.start()
         t.join()
 
-    test_support.run_unittest(TestPendingCalls)
+    if 'TestPendingCalls' not in skips:
+        test_support.run_unittest(TestPendingCalls)
 
 if __name__ == "__main__":
     test_main()
diff --git a/lib-python/2.7/test/test_iterlen.py b/lib-python/2.7/test/test_iterlen.py
--- a/lib-python/2.7/test/test_iterlen.py
+++ b/lib-python/2.7/test/test_iterlen.py
@@ -94,7 +94,11 @@
 
     def test_no_len_for_infinite_repeat(self):
         # The repeat() object can also be infinite
-        self.assertRaises(TypeError, len, repeat(None))
+        if test_support.check_impl_detail(pypy=True):
+            # 3.4 (PEP 424) behavior
+            self.assertEqual(len(repeat(None)), NotImplemented)
+        else:
+            self.assertRaises(TypeError, len, repeat(None))
 
 class TestXrange(TestInvariantWithoutMutations):
 
@@ -230,6 +234,7 @@
         self.assertRaises(RuntimeError, b.extend, BadLen())
         self.assertRaises(RuntimeError, b.extend, BadLengthHint())
 
+    @test_support.impl_detail("PEP 424 disallows None results", pypy=False)
     def test_invalid_hint(self):
         # Make sure an invalid result doesn't muck-up the works
         self.assertEqual(list(NoneLengthHint()), list(range(10)))
diff --git a/lib-python/2.7/test/test_itertools.py b/lib-python/2.7/test/test_itertools.py
--- a/lib-python/2.7/test/test_itertools.py
+++ b/lib-python/2.7/test/test_itertools.py
@@ -533,11 +533,11 @@
         self.assertEqual(list(izip()), zip())
         self.assertRaises(TypeError, izip, 3)
         self.assertRaises(TypeError, izip, range(3), 3)
-
         self.assertEqual([tuple(list(pair)) for pair in izip('abc', 'def')],
                          zip('abc', 'def'))
         self.assertEqual([pair for pair in izip('abc', 'def')],
                          zip('abc', 'def'))
+
     @test_support.impl_detail("tuple reuse is specific to CPython")
     def test_izip_tuple_reuse(self):
         ids = map(id, izip('abc', 'def'))
@@ -588,6 +588,7 @@
                          zip('abc', 'def'))
         self.assertEqual([pair for pair in izip_longest('abc', 'def')],
                          zip('abc', 'def'))
+
     @test_support.impl_detail("tuple reuse is specific to CPython")
     def test_izip_longest_tuple_reuse(self):
         ids = map(id, izip_longest('abc', 'def'))
diff --git a/lib-python/2.7/test/test_modulefinder.py b/lib-python/2.7/test/test_modulefinder.py
--- a/lib-python/2.7/test/test_modulefinder.py
+++ b/lib-python/2.7/test/test_modulefinder.py
@@ -16,7 +16,7 @@
 # library.
 
 TEST_DIR = tempfile.mkdtemp()
-TEST_PATH = [TEST_DIR, os.path.dirname(__future__.__file__)]
+TEST_PATH = [TEST_DIR, os.path.dirname(tempfile.__file__)]
 
 # Each test description is a list of 5 items:
 #
diff --git a/lib-python/2.7/test/test_multiprocessing.py b/lib-python/2.7/test/test_multiprocessing.py
--- a/lib-python/2.7/test/test_multiprocessing.py
+++ b/lib-python/2.7/test/test_multiprocessing.py
@@ -1343,7 +1343,7 @@
 
         # Because we are using xmlrpclib for serialization instead of
         # pickle this will cause a serialization error.
-        self.assertRaises(Exception, queue.put, time.sleep)
+        self.assertRaises(Exception, queue.put, object)
 
         # Make queue finalizer run before the server is stopped
         del queue
@@ -1800,9 +1800,9 @@
         if not gc.isenabled():
             gc.enable()
             self.addCleanup(gc.disable)
-        thresholds = gc.get_threshold()
-        self.addCleanup(gc.set_threshold, *thresholds)
-        gc.set_threshold(10)
+        #thresholds = gc.get_threshold()
+        #self.addCleanup(gc.set_threshold, *thresholds)
+        #gc.set_threshold(10)
 
         # perform numerous block allocations, with cyclic references to make
         # sure objects are collected asynchronously by the gc
@@ -1865,6 +1865,7 @@
     def test_synchronize(self):
         self.test_sharedctypes(lock=True)
 
+    @unittest.skipUnless(test_support.check_impl_detail(pypy=False), "pypy ctypes differences")
     def test_copy(self):
         foo = _Foo(2, 5.0)
         bar = copy(foo)
diff --git a/lib-python/2.7/test/test_support.py b/lib-python/2.7/test/test_support.py
--- a/lib-python/2.7/test/test_support.py
+++ b/lib-python/2.7/test/test_support.py
@@ -1085,7 +1085,6 @@
     else:
         runner = BasicTestRunner()
 
-
     result = runner.run(suite)
     if not result.wasSuccessful():
         if len(result.errors) == 1 and not result.failures:
diff --git a/lib-python/2.7/test/test_zipfile.py b/lib-python/2.7/test/test_zipfile.py
--- a/lib-python/2.7/test/test_zipfile.py
+++ b/lib-python/2.7/test/test_zipfile.py
@@ -1213,6 +1213,17 @@
             self.assertEqual(data1, '1'*FIXEDTEST_SIZE)
             self.assertEqual(data2, '2'*FIXEDTEST_SIZE)
 
+    def test_many_opens(self):
+        # Verify that read() and open() promptly close the file descriptor,
+        # and don't rely on the garbage collector to free resources.
+        with zipfile.ZipFile(TESTFN2, mode="r") as zipf:
+            for x in range(100):
+                zipf.read('ones')
+                with zipf.open('ones') as zopen1:
+                    pass
+        for x in range(10):
+            self.assertLess(open('/dev/null').fileno(), 100)
+
     def tearDown(self):
         unlink(TESTFN2)
 
diff --git a/lib-python/2.7/zipfile.py b/lib-python/2.7/zipfile.py
--- a/lib-python/2.7/zipfile.py
+++ b/lib-python/2.7/zipfile.py
@@ -475,9 +475,11 @@
     # Search for universal newlines or line chunks.
     PATTERN = re.compile(r'^(?P<chunk>[^\r\n]+)|(?P<newline>\n|\r\n?)')
 
-    def __init__(self, fileobj, mode, zipinfo, decrypter=None):
+    def __init__(self, fileobj, mode, zipinfo, decrypter=None,
+                 close_fileobj=False):
         self._fileobj = fileobj
         self._decrypter = decrypter
+        self._close_fileobj = close_fileobj
 
         self._compress_type = zipinfo.compress_type
         self._compress_size = zipinfo.compress_size
@@ -649,6 +651,12 @@
         self._offset += len(data)
         return data
 
+    def close(self):
+        try:
+            if self._close_fileobj:
+                self._fileobj.close()
+        finally:
+            super(ZipExtFile, self).close()
 
 
 class ZipFile:
@@ -866,7 +874,8 @@
 
     def read(self, name, pwd=None):
         """Return file bytes (as a string) for name."""
-        return self.open(name, "r", pwd).read()
+        with self.open(name, "r", pwd) as fp:
+            return fp.read()
 
     def open(self, name, mode="r", pwd=None):
         """Return file-like object for 'name'."""
@@ -889,8 +898,12 @@
             zinfo = name
         else:
             # Get info object for name
-            zinfo = self.getinfo(name)
-
+            try:
+                zinfo = self.getinfo(name)
+            except KeyError:
+                if not self._filePassed:
+                    zef_file.close()
+                raise
         zef_file.seek(zinfo.header_offset, 0)
 
         # Skip the file header:
@@ -904,6 +917,8 @@
             zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
 
         if fname != zinfo.orig_filename:
+            if not self._filePassed:
+                zef_file.close()
             raise BadZipfile, \
                       'File name in directory "%s" and header "%s" differ.' % (
                           zinfo.orig_filename, fname)
@@ -915,6 +930,8 @@
             if not pwd:
                 pwd = self.pwd
             if not pwd:
+                if not self._filePassed:
+                    zef_file.close()
                 raise RuntimeError, "File %s is encrypted, " \
                       "password required for extraction" % name
 
@@ -933,9 +950,12 @@
                 # compare against the CRC otherwise
                 check_byte = (zinfo.CRC >> 24) & 0xff
             if ord(h[11]) != check_byte:
+                if not self._filePassed:
+                    zef_file.close()
                 raise RuntimeError("Bad password for file", name)
 
-        return  ZipExtFile(zef_file, mode, zinfo, zd)
+        return ZipExtFile(zef_file, mode, zinfo, zd,
+                          close_fileobj=not self._filePassed)
 
     def extract(self, member, path=None, pwd=None):
         """Extract a member from the archive to the current working directory,
@@ -993,7 +1013,7 @@
             return targetpath
 
         source = self.open(member, pwd=pwd)
-        target = file(targetpath, "wb")
+        target = open(targetpath, "wb")
         shutil.copyfileobj(source, target)
         source.close()
         target.close()
diff --git a/lib-python/conftest.py b/lib-python/conftest.py
--- a/lib-python/conftest.py
+++ b/lib-python/conftest.py
@@ -17,8 +17,8 @@
 from pypy.conftest import option as pypy_option 
 
 from pypy.tool.pytest import appsupport 
-from pypy.tool.pytest.confpath import pypydir, testdir, testresultdir
-from pypy.config.parse import parse_info
+from pypy.tool.pytest.confpath import pypydir, rpythondir, testdir, testresultdir
+from rpython.config.parse import parse_info
 
 pytest_plugins = "resultlog",
 rsyncdirs = ['.', '../pypy/']
@@ -40,7 +40,7 @@
                     dest="unittest_filter",  help="Similar to -k, XXX")
 
 def gettimeout(timeout): 
-    from test import pystone
+    from rpython.translator.test import rpystone
     if timeout.endswith('mp'): 
         megapystone = float(timeout[:-2])
         t, stone = pystone.Proc0(10000)
@@ -61,7 +61,7 @@
                                  usemodules = '',
                                  skip=None): 
         self.basename = basename 
-        self._usemodules = usemodules.split() + ['signal']
+        self._usemodules = usemodules.split() + ['signal', 'rctime', 'itertools', '_socket']
         self._compiler = compiler 
         self.core = core
         self.skip = skip
@@ -93,63 +93,57 @@
                 m.test_main()
         ''' % locals())
 
-if sys.platform == 'win32':
-    skip_win32 = "Not supported on Windows"
-    only_win32 = False
-else:
-    skip_win32 = False
-    only_win32 = "Only on Windows"
-
 testmap = [
     RegrTest('test___all__.py', core=True),
     RegrTest('test___future__.py', core=True),
-    RegrTest('test__locale.py', skip=skip_win32),
+    RegrTest('test__locale.py', usemodules='_locale'),
     RegrTest('test_abc.py'),
     RegrTest('test_abstract_numbers.py'),
-    RegrTest('test_aepack.py', skip=True),
+    RegrTest('test_aepack.py'),
     RegrTest('test_aifc.py'),
-    RegrTest('test_argparse.py'),
-    RegrTest('test_al.py', skip=True),
-    RegrTest('test_ast.py', core=True),
-    RegrTest('test_anydbm.py'),
-    RegrTest('test_applesingle.py', skip=True),
-    RegrTest('test_array.py', core=True, usemodules='struct array'),
+    RegrTest('test_al.py'),
+    RegrTest('test_anydbm.py', usemodules='struct'),
+    RegrTest('test_applesingle.py'),
+    RegrTest('test_argparse.py', usemodules='binascii'),
+    RegrTest('test_array.py', core=True, usemodules='struct array binascii'),
     RegrTest('test_ascii_formatd.py'),
-    RegrTest('test_asynchat.py', usemodules='thread'),
-    RegrTest('test_asyncore.py'),
+    RegrTest('test_ast.py', core=True, usemodules='struct'),
+    RegrTest('test_asynchat.py', usemodules='select fcntl'),
+    RegrTest('test_asyncore.py', usemodules='select fcntl'),
     RegrTest('test_atexit.py', core=True),
-    RegrTest('test_audioop.py', skip=True),
+    RegrTest('test_audioop.py', skip="unsupported extension module"),
     RegrTest('test_augassign.py', core=True),
-    RegrTest('test_base64.py'),
+    RegrTest('test_base64.py', usemodules='struct'),
     RegrTest('test_bastion.py'),
+    RegrTest('test_bigaddrspace.py'),
+    RegrTest('test_bigmem.py'),
     RegrTest('test_binascii.py', usemodules='binascii'),
-
     RegrTest('test_binhex.py'),
-
     RegrTest('test_binop.py', core=True),
     RegrTest('test_bisect.py', core=True, usemodules='_bisect'),
     RegrTest('test_bool.py', core=True),
-    RegrTest('test_bsddb.py', skip="unsupported extension module"),
-    RegrTest('test_bsddb185.py', skip="unsupported extension module"),
-    RegrTest('test_bsddb3.py', skip="unsupported extension module"),
+    RegrTest('test_bsddb.py'),
+    RegrTest('test_bsddb185.py'),
+    RegrTest('test_bsddb3.py'),
     RegrTest('test_buffer.py'),
     RegrTest('test_bufio.py', core=True),
-    RegrTest('test_builtin.py', core=True),
-    RegrTest('test_bytes.py'),
+    RegrTest('test_builtin.py', core=True, usemodules='binascii'),
+    RegrTest('test_bytes.py', usemodules='struct binascii'),
     RegrTest('test_bz2.py', usemodules='bz2'),
     RegrTest('test_calendar.py'),
     RegrTest('test_call.py', core=True),
-    RegrTest('test_capi.py', skip="not applicable"),
-    RegrTest('test_cd.py', skip=True),
+    RegrTest('test_capi.py'),
+    RegrTest('test_cd.py'),
     RegrTest('test_cfgparser.py'),
-
     RegrTest('test_cgi.py'),
     RegrTest('test_charmapcodec.py', core=True),
-    RegrTest('test_cl.py', skip=True),
+    RegrTest('test_cl.py'),
     RegrTest('test_class.py', core=True),
     RegrTest('test_cmath.py', core=True),
     RegrTest('test_cmd.py'),
+    RegrTest('test_cmd_line.py'),
     RegrTest('test_cmd_line_script.py'),
+    RegrTest('test_code.py', core=True),
     RegrTest('test_codeccallbacks.py', core=True),
     RegrTest('test_codecencodings_cn.py', usemodules='_multibytecodec'),
     RegrTest('test_codecencodings_hk.py', usemodules='_multibytecodec'),
@@ -157,7 +151,6 @@
     RegrTest('test_codecencodings_jp.py', usemodules='_multibytecodec'),
     RegrTest('test_codecencodings_kr.py', usemodules='_multibytecodec'),
     RegrTest('test_codecencodings_tw.py', usemodules='_multibytecodec'),
-
     RegrTest('test_codecmaps_cn.py', usemodules='_multibytecodec'),
     RegrTest('test_codecmaps_hk.py', usemodules='_multibytecodec'),
     RegrTest('test_codecmaps_jp.py', usemodules='_multibytecodec'),
@@ -165,8 +158,9 @@
     RegrTest('test_codecmaps_tw.py', usemodules='_multibytecodec'),
     RegrTest('test_codecs.py', core=True, usemodules='_multibytecodec'),
     RegrTest('test_codeop.py', core=True),
+    RegrTest('test_coding.py', core=True),
     RegrTest('test_coercion.py', core=True),
-    RegrTest('test_collections.py'),
+    RegrTest('test_collections.py', usemodules='binascii struct'),
     RegrTest('test_colorsys.py'),
     RegrTest('test_commands.py'),
     RegrTest('test_compare.py', core=True),
@@ -174,22 +168,24 @@
     RegrTest('test_compileall.py'),
     RegrTest('test_compiler.py', core=False, skip="slowly deprecating compiler"),
     RegrTest('test_complex.py', core=True),
-
+    RegrTest('test_complex_args.py'),
     RegrTest('test_contains.py', core=True),
+    RegrTest('test_contextlib.py', usemodules="thread"),
     RegrTest('test_cookie.py'),
     RegrTest('test_cookielib.py'),
     RegrTest('test_copy.py', core=True),
     RegrTest('test_copy_reg.py', core=True),
     RegrTest('test_cpickle.py', core=True),
-    RegrTest('test_cprofile.py'), 
-    RegrTest('test_crypt.py', usemodules='crypt', skip=skip_win32),
+    RegrTest('test_cprofile.py'),
+    RegrTest('test_crypt.py', usemodules='crypt'),
     RegrTest('test_csv.py', usemodules='_csv'),
-
-    RegrTest('test_curses.py', skip="unsupported extension module"),
-    RegrTest('test_datetime.py'),
+    RegrTest('test_ctypes.py', usemodules="_rawffi thread"),
+    RegrTest('test_curses.py'),
+    RegrTest('test_datetime.py', usemodules='binascii struct'),
     RegrTest('test_dbm.py'),
     RegrTest('test_decimal.py'),
     RegrTest('test_decorators.py', core=True),
+    RegrTest('test_defaultdict.py', usemodules='_collections'),
     RegrTest('test_deque.py', core=True, usemodules='_collections'),
     RegrTest('test_descr.py', core=True, usemodules='_weakref'),
     RegrTest('test_descrtut.py', core=True),
@@ -200,7 +196,7 @@
     RegrTest('test_dircache.py', core=True),
     RegrTest('test_dis.py'),
     RegrTest('test_distutils.py', skip=True),
-    RegrTest('test_dl.py', skip=True),
+    RegrTest('test_dl.py'),
     RegrTest('test_doctest.py', usemodules="thread"),
     RegrTest('test_doctest2.py'),
     RegrTest('test_docxmlrpc.py'),
@@ -208,20 +204,22 @@
     RegrTest('test_dummy_thread.py', core=True),
     RegrTest('test_dummy_threading.py', core=True),
     RegrTest('test_email.py'),
-
     RegrTest('test_email_codecs.py'),
+    RegrTest('test_email_renamed.py'),
     RegrTest('test_enumerate.py', core=True),
     RegrTest('test_eof.py', core=True),
     RegrTest('test_epoll.py'),
     RegrTest('test_errno.py', usemodules="errno"),
+    RegrTest('test_exception_variations.py'),
     RegrTest('test_exceptions.py', core=True),
     RegrTest('test_extcall.py', core=True),
-    RegrTest('test_fcntl.py', usemodules='fcntl', skip=skip_win32),
+    RegrTest('test_fcntl.py', usemodules='fcntl'),
     RegrTest('test_file.py', usemodules="posix", core=True),
     RegrTest('test_file2k.py', usemodules="posix", core=True),
     RegrTest('test_filecmp.py', core=True),
     RegrTest('test_fileinput.py', core=True),
     RegrTest('test_fileio.py'),
+    RegrTest('test_float.py', core=True),
     RegrTest('test_fnmatch.py', core=True),
     RegrTest('test_fork1.py', usemodules="thread"),
     RegrTest('test_format.py', core=True),
@@ -230,6 +228,7 @@
     RegrTest('test_frozen.py', skip="unsupported extension module"),
     RegrTest('test_ftplib.py'),
     RegrTest('test_funcattrs.py', core=True),
+    RegrTest('test_functools.py'),
     RegrTest('test_future.py', core=True),
     RegrTest('test_future1.py', core=True),
     RegrTest('test_future2.py', core=True),
@@ -239,41 +238,37 @@
     RegrTest('test_future_builtins.py'),
     RegrTest('test_gc.py', usemodules='_weakref', skip="implementation detail"),
     RegrTest('test_gdb.py', skip="not applicable"),
-    RegrTest('test_gdbm.py', skip="unsupported extension module"),
+    RegrTest('test_gdbm.py'),
     RegrTest('test_generators.py', core=True, usemodules='thread _weakref'),
     RegrTest('test_genericpath.py'),
     RegrTest('test_genexps.py', core=True, usemodules='_weakref'),
-    RegrTest('test_getargs.py', skip="unsupported extension module"),
-    RegrTest('test_getargs2.py', skip="unsupported extension module"),
-
+    RegrTest('test_getargs.py'),
+    RegrTest('test_getargs2.py', usemodules='binascii', skip=True),
     RegrTest('test_getopt.py', core=True),
     RegrTest('test_gettext.py'),
-
-    RegrTest('test_gl.py', skip=True),
+    RegrTest('test_gl.py'),
     RegrTest('test_glob.py', core=True),
     RegrTest('test_global.py', core=True),
     RegrTest('test_grammar.py', core=True),
-    RegrTest('test_grp.py', skip=skip_win32),
-
-    RegrTest('test_gzip.py'),
+    RegrTest('test_grp.py'),
+    RegrTest('test_gzip.py', usemodules='zlib'),
     RegrTest('test_hash.py', core=True),
     RegrTest('test_hashlib.py', core=True),
-    
     RegrTest('test_heapq.py', core=True),
     RegrTest('test_hmac.py'),
     RegrTest('test_hotshot.py', skip="unsupported extension module"),
-
     RegrTest('test_htmllib.py'),
     RegrTest('test_htmlparser.py'),
     RegrTest('test_httplib.py'),
     RegrTest('test_httpservers.py'),
-    RegrTest('test_imageop.py', skip="unsupported extension module"),
+    RegrTest('test_imageop.py'),
     RegrTest('test_imaplib.py'),
-    RegrTest('test_imgfile.py', skip="unsupported extension module"),
+    RegrTest('test_imgfile.py'),
     RegrTest('test_imp.py', core=True, usemodules='thread'),
     RegrTest('test_import.py', core=True),
     RegrTest('test_importhooks.py', core=True),
     RegrTest('test_importlib.py'),
+    RegrTest('test_index.py'),
     RegrTest('test_inspect.py'),
     RegrTest('test_int.py', core=True),
     RegrTest('test_int_literal.py', core=True),
@@ -281,7 +276,7 @@
     RegrTest('test_ioctl.py'),
     RegrTest('test_isinstance.py', core=True),
     RegrTest('test_iter.py', core=True),
-    RegrTest('test_iterlen.py', skip="undocumented internal API behavior __length_hint__"),
+    RegrTest('test_iterlen.py', core=True, usemodules="_collections itertools"),
     RegrTest('test_itertools.py', core=True, usemodules="itertools struct"),
     RegrTest('test_json.py'),
     RegrTest('test_kqueue.py'),
@@ -296,7 +291,7 @@
     RegrTest('test_long_future.py', core=True),
     RegrTest('test_longexp.py', core=True),
     RegrTest('test_macos.py'),
-    RegrTest('test_macostools.py', skip=True),
+    RegrTest('test_macostools.py'),
     RegrTest('test_macpath.py'),
     RegrTest('test_mailbox.py'),
     RegrTest('test_marshal.py', core=True),
@@ -307,30 +302,29 @@
     RegrTest('test_mhlib.py'),
     RegrTest('test_mimetools.py'),
     RegrTest('test_mimetypes.py'),
-    RegrTest('test_MimeWriter.py', core=False),
+    RegrTest('test_MimeWriter.py', core=False, usemodules='binascii'),
     RegrTest('test_minidom.py'),
     RegrTest('test_mmap.py', usemodules="mmap"),
     RegrTest('test_module.py', core=True),
     RegrTest('test_modulefinder.py'),
-    RegrTest('test_msilib.py', skip=only_win32),
+    RegrTest('test_msilib.py'),
     RegrTest('test_multibytecodec.py', usemodules='_multibytecodec'),
-    RegrTest('test_multibytecodec_support.py', skip="not a test"),
     RegrTest('test_multifile.py'),
-    RegrTest('test_multiprocessing.py', skip="FIXME leaves subprocesses"),
+    RegrTest('test_multiprocessing.py'),
     RegrTest('test_mutants.py', core="possibly"),
     RegrTest('test_mutex.py'),
     RegrTest('test_netrc.py'),
     RegrTest('test_new.py', core=True),
-    RegrTest('test_nis.py', skip="unsupported extension module"),
+    RegrTest('test_nis.py'),
     RegrTest('test_normalization.py'),
     RegrTest('test_ntpath.py'),
+    RegrTest('test_old_mailbox.py'),
     RegrTest('test_opcodes.py', core=True),
     RegrTest('test_openpty.py'),
     RegrTest('test_operator.py', core=True),
     RegrTest('test_optparse.py'),
-
     RegrTest('test_os.py', core=True),
-    RegrTest('test_ossaudiodev.py', skip="unsupported extension module"),
+    RegrTest('test_ossaudiodev.py'),
     RegrTest('test_parser.py', skip="slowly deprecating compiler"),
     RegrTest('test_pdb.py'),
     RegrTest('test_peepholer.py'),
@@ -338,14 +332,16 @@
     RegrTest('test_pep263.py'),
     RegrTest('test_pep277.py'),
     RegrTest('test_pep292.py'),
+    RegrTest('test_pep352.py'),
     RegrTest('test_pickle.py', core=True),
     RegrTest('test_pickletools.py', core=False),
     RegrTest('test_pipes.py'),
     RegrTest('test_pkg.py', core=True),
     RegrTest('test_pkgimport.py', core=True),
     RegrTest('test_pkgutil.py'),
-    RegrTest('test_plistlib.py', skip="unsupported module"),
-    RegrTest('test_poll.py', skip=skip_win32),
+    RegrTest('test_platform.py'),
+    RegrTest('test_plistlib.py'),
+    RegrTest('test_poll.py'),
     RegrTest('test_popen.py'),
     RegrTest('test_popen2.py'),
     RegrTest('test_poplib.py'),
@@ -357,8 +353,8 @@
     RegrTest('test_profile.py'),
     RegrTest('test_property.py', core=True),
     RegrTest('test_pstats.py'),
-    RegrTest('test_pty.py', skip="unsupported extension module"),
-    RegrTest('test_pwd.py', usemodules="pwd", skip=skip_win32),
+    RegrTest('test_pty.py', usemodules='fcntl termios select'),
+    RegrTest('test_pwd.py', usemodules="pwd"),
     RegrTest('test_py3kwarn.py'),
     RegrTest('test_py_compile.py'),
     RegrTest('test_pyclbr.py'),
@@ -370,15 +366,15 @@
     RegrTest('test_re.py', core=True),
     RegrTest('test_readline.py'),
     RegrTest('test_repr.py', core=True),
-    RegrTest('test_resource.py', skip=skip_win32),
+    RegrTest('test_resource.py'),
     RegrTest('test_rfc822.py'),
     RegrTest('test_richcmp.py', core=True),
     RegrTest('test_rlcompleter.py'),
-
     RegrTest('test_robotparser.py'),
+    RegrTest('test_runpy.py'),
     RegrTest('test_sax.py'),
     RegrTest('test_scope.py', core=True),
-    RegrTest('test_scriptpackages.py', skip="unsupported extension module"),
+    RegrTest('test_scriptpackages.py'),
     RegrTest('test_select.py'),
     RegrTest('test_set.py', core=True),
     RegrTest('test_sets.py'),
@@ -389,64 +385,59 @@
     RegrTest('test_shlex.py'),
     RegrTest('test_shutil.py'),
     RegrTest('test_signal.py'),
-    RegrTest('test_SimpleHTTPServer.py'),
+    RegrTest('test_SimpleHTTPServer.py', usemodules='binascii'),
     RegrTest('test_site.py', core=False),
     RegrTest('test_slice.py', core=True),
     RegrTest('test_smtplib.py'),
     RegrTest('test_smtpnet.py'),
     RegrTest('test_socket.py', usemodules='thread _weakref'),
-
     RegrTest('test_socketserver.py', usemodules='thread'),
-
     RegrTest('test_softspace.py', core=True),
     RegrTest('test_sort.py', core=True),
+    RegrTest('test_sqlite.py', usemodules="thread _rawffi zlib"),
     RegrTest('test_ssl.py', usemodules='_ssl _socket select'),
+    RegrTest('test_startfile.py'),
     RegrTest('test_str.py', core=True),
-
     RegrTest('test_strftime.py'),
     RegrTest('test_string.py', core=True),
-    RegrTest('test_StringIO.py', core=True, usemodules='cStringIO'),
+    RegrTest('test_StringIO.py', core=True, usemodules='cStringIO array'),
     RegrTest('test_stringprep.py'),
     RegrTest('test_strop.py', skip="deprecated"),
-
     RegrTest('test_strptime.py'),
     RegrTest('test_strtod.py'),
     RegrTest('test_struct.py', usemodules='struct'),
     RegrTest('test_structmembers.py', skip="CPython specific"),
     RegrTest('test_structseq.py'),
     RegrTest('test_subprocess.py', usemodules='signal'),
-    RegrTest('test_sunaudiodev.py', skip=True),
+    RegrTest('test_sunaudiodev.py'),
     RegrTest('test_sundry.py'),
     RegrTest('test_symtable.py', skip="implementation detail"),
     RegrTest('test_syntax.py', core=True),
     RegrTest('test_sys.py', core=True, usemodules='struct'),
+    RegrTest('test_sys_setprofile.py', core=True),
     RegrTest('test_sys_settrace.py', core=True),
-    RegrTest('test_sys_setprofile.py', core=True),
     RegrTest('test_sysconfig.py'),
-    RegrTest('test_tcl.py', skip="unsupported extension module"),
     RegrTest('test_tarfile.py'),
+    RegrTest('test_tcl.py'),
     RegrTest('test_telnetlib.py'),
     RegrTest('test_tempfile.py'),
-
     RegrTest('test_textwrap.py'),
     RegrTest('test_thread.py', usemodules="thread", core=True),
     RegrTest('test_threaded_import.py', usemodules="thread", core=True),
     RegrTest('test_threadedtempfile.py', 
              usemodules="thread", core=False),
-
     RegrTest('test_threading.py', usemodules="thread", core=True),
     RegrTest('test_threading_local.py', usemodules="thread", core=True),
     RegrTest('test_threadsignals.py', usemodules="thread"),
-
     RegrTest('test_time.py', core=True),
     RegrTest('test_timeout.py'),
     RegrTest('test_tk.py'),
-    RegrTest('test_ttk_guionly.py'),
-    RegrTest('test_ttk_textonly.py'),
     RegrTest('test_tokenize.py'),
     RegrTest('test_trace.py'),
     RegrTest('test_traceback.py', core=True),
     RegrTest('test_transformer.py', core=True),
+    RegrTest('test_ttk_guionly.py'),
+    RegrTest('test_ttk_textonly.py'),
     RegrTest('test_tuple.py', core=True),
     RegrTest('test_typechecks.py'),
     RegrTest('test_types.py', core=True),
@@ -462,6 +453,7 @@
     RegrTest('test_unpack.py', core=True),
     RegrTest('test_urllib.py'),
     RegrTest('test_urllib2.py'),
+    RegrTest('test_urllib2_localnet.py', usemodules="thread"),
     RegrTest('test_urllib2net.py'),
     RegrTest('test_urllibnet.py'),
     RegrTest('test_urlparse.py'),
@@ -469,61 +461,38 @@
     RegrTest('test_userlist.py', core=True),
     RegrTest('test_userstring.py', core=True),
     RegrTest('test_uu.py'),
-
-    RegrTest('test_warnings.py', core=True),
-    RegrTest('test_wave.py', skip="unsupported extension module"),
-    RegrTest('test_weakref.py', core=True, usemodules='_weakref'),
-    RegrTest('test_weakset.py'),
-
-    RegrTest('test_whichdb.py'),
-    RegrTest('test_winreg.py', skip=only_win32),
-    RegrTest('test_winsound.py', skip="unsupported extension module"),
-    RegrTest('test_xmllib.py'),
-    RegrTest('test_xmlrpc.py'),
-
-    RegrTest('test_xpickle.py'),
-    RegrTest('test_xrange.py', core=True),
-    RegrTest('test_zipfile.py'),
-    RegrTest('test_zipimport.py', usemodules='zlib zipimport'),
-    RegrTest('test_zipimport_support.py', usemodules='zlib zipimport'),
-    RegrTest('test_zlib.py', usemodules='zlib'),
-
-    RegrTest('test_bigaddrspace.py'),
-    RegrTest('test_bigmem.py'),
-    RegrTest('test_cmd_line.py'),
-    RegrTest('test_code.py'),
-    RegrTest('test_coding.py'),
-    RegrTest('test_complex_args.py'),
-    RegrTest('test_contextlib.py', usemodules="thread"),
-    RegrTest('test_ctypes.py', usemodules="_rawffi thread"),
-    RegrTest('test_defaultdict.py', usemodules='_collections'),
-    RegrTest('test_email_renamed.py'),
-    RegrTest('test_exception_variations.py'),
-    RegrTest('test_float.py'),
-    RegrTest('test_functools.py'),
-    RegrTest('test_index.py'),
-    RegrTest('test_old_mailbox.py'),
-    RegrTest('test_pep352.py'),
-    RegrTest('test_platform.py'),
-    RegrTest('test_runpy.py'),
-    RegrTest('test_sqlite.py', usemodules="thread _rawffi zlib"),
-    RegrTest('test_startfile.py', skip="bogus test"),
-    RegrTest('test_structmembers.py', skip="depends on _testcapi"),
-    RegrTest('test_urllib2_localnet.py', usemodules="thread"),
     RegrTest('test_uuid.py'),
     RegrTest('test_wait3.py', usemodules="thread"),
     RegrTest('test_wait4.py', usemodules="thread"),
+    RegrTest('test_warnings.py', core=True),
+    RegrTest('test_wave.py'),
+    RegrTest('test_weakref.py', core=True, usemodules='_weakref'),
+    RegrTest('test_weakset.py'),
+    RegrTest('test_whichdb.py'),
+    RegrTest('test_winreg.py'),
+    RegrTest('test_winsound.py'),
     RegrTest('test_with.py'),
     RegrTest('test_wsgiref.py'),
     RegrTest('test_xdrlib.py'),
     RegrTest('test_xml_etree.py'),
     RegrTest('test_xml_etree_c.py'),
+    RegrTest('test_xmllib.py'),
+    RegrTest('test_xmlrpc.py'),
+    RegrTest('test_xpickle.py'),
+    RegrTest('test_xrange.py', core=True),
+    RegrTest('test_zipfile.py'),
     RegrTest('test_zipfile64.py'),
+    RegrTest('test_zipimport.py', usemodules='zlib zipimport'),
+    RegrTest('test_zipimport_support.py', usemodules='zlib zipimport'),
+    RegrTest('test_zlib.py', usemodules='zlib'),
 ]
 
 def check_testmap_complete():
     listed_names = dict.fromkeys([regrtest.basename for regrtest in testmap])
-    listed_names['test_support.py'] = True     # ignore this
+    assert len(listed_names) == len(testmap)
+    # names to ignore
+    listed_names['test_support.py'] = True
+    listed_names['test_multibytecodec_support.py'] = True
     missing = []
     for path in testdir.listdir(fil='test_*.py'):
         name = path.basename
@@ -578,13 +547,13 @@
     def getinvocation(self, regrtest): 
         fspath = regrtest.getfspath() 
         python = sys.executable 
-        pypy_script = pypydir.join('bin', 'py.py')
+        pypy_script = pypydir.join('bin', 'pyinteractive.py')
         alarm_script = pypydir.join('tool', 'alarm.py')
         if sys.platform == 'win32':
             watchdog_name = 'watchdog_nt.py'
         else:
             watchdog_name = 'watchdog.py'
-        watchdog_script = pypydir.join('tool', watchdog_name)
+        watchdog_script = rpythondir.join('tool', watchdog_name)
 
         regr_script = pypydir.join('tool', 'pytest', 
                                    'run-script', 'regrverbose.py')
diff --git a/lib_pypy/_ctypes_test.py b/lib_pypy/_ctypes_test.py
--- a/lib_pypy/_ctypes_test.py
+++ b/lib_pypy/_ctypes_test.py
@@ -1,6 +1,5 @@
 import os, sys
 import tempfile
-import gc
 
 def compile_shared():
     """Compile '_ctypes_test.c' into an extension module, and import it
@@ -19,7 +18,7 @@
     if sys.platform == 'win32':
         ccflags = ['-D_CRT_SECURE_NO_WARNINGS']
     else:
-        ccflags = ['-fPIC']
+        ccflags = ['-fPIC', '-Wimplicit-function-declaration']
     res = compiler.compile([os.path.join(thisdir, '_ctypes_test.c')],
                            include_dirs=[include_dir],
                            extra_preargs=ccflags)
@@ -35,10 +34,10 @@
             library = os.path.join(thisdir, '..', 'include', 'python27')
         if not os.path.exists(library + '.lib'):
             # For a local translation
-            library = os.path.join(thisdir, '..', 'pypy', 'translator',
-                    'goal', 'libpypy-c')
+            library = os.path.join(thisdir, '..', 'pypy', 'goal', 'libpypy-c')
         libraries = [library, 'oleaut32']
-        extra_ldargs = ['/MANIFEST'] # needed for VC10
+        extra_ldargs = ['/MANIFEST',  # needed for VC10
+                        '/EXPORT:init_ctypes_test']
     else:
         libraries = []
         extra_ldargs = []
diff --git a/lib_pypy/_testcapi.py b/lib_pypy/_testcapi.py
--- a/lib_pypy/_testcapi.py
+++ b/lib_pypy/_testcapi.py
@@ -34,8 +34,7 @@
             library = os.path.join(thisdir, '..', 'include', 'python27')
         if not os.path.exists(library + '.lib'):
             # For a local translation
-            library = os.path.join(thisdir, '..', 'pypy', 'translator',
-                    'goal', 'libpypy-c')
+            library = os.path.join(thisdir, '..', 'pypy', 'goal', 'libpypy-c')
         libraries = [library, 'oleaut32']
         extra_ldargs = ['/MANIFEST',  # needed for VC10
                         '/EXPORT:init_testcapi']
diff --git a/lib_pypy/cPickle.py b/lib_pypy/cPickle.py
--- a/lib_pypy/cPickle.py
+++ b/lib_pypy/cPickle.py
@@ -1,5 +1,5 @@
 #
-# One-liner implementation of cPickle
+# Reimplementation of cPickle, mostly as a copy of pickle.py
 #
 
 from pickle import Pickler, dump, dumps, PickleError, PicklingError, UnpicklingError, _EmptyClass
@@ -131,6 +131,13 @@
 
 # Unpickling machinery
 
+class _Stack(list):
+    def pop(self, index=-1):
+        try:
+            return list.pop(self, index)
+        except IndexError:
+            raise UnpicklingError("unpickling stack underflow")
+
 class Unpickler(object):
 
     def __init__(self, file):
@@ -155,7 +162,7 @@
         Return the reconstituted object hierarchy specified in the file.
         """
         self.mark = object() # any new unique object
-        self.stack = []
+        self.stack = _Stack()
         self.append = self.stack.append
         try:
             key = ord(self.read(1))
diff --git a/lib_pypy/ctypes_config_cache/autopath.py b/lib_pypy/ctypes_config_cache/autopath.py
deleted file mode 100644
--- a/lib_pypy/ctypes_config_cache/autopath.py
+++ /dev/null
@@ -1,131 +0,0 @@
-"""
-self cloning, automatic path configuration 
-
-copy this into any subdirectory of pypy from which scripts need 
-to be run, typically all of the test subdirs. 
-The idea is that any such script simply issues
-
-    import autopath
-
-and this will make sure that the parent directory containing "pypy"
-is in sys.path. 
-
-If you modify the master "autopath.py" version (in pypy/tool/autopath.py) 
-you can directly run it which will copy itself on all autopath.py files
-it finds under the pypy root directory. 
-
-This module always provides these attributes:
-
-    pypydir    pypy root directory path 
-    this_dir   directory where this autopath.py resides 
-
-"""
-
-def __dirinfo(part):
-    """ return (partdir, this_dir) and insert parent of partdir
-    into sys.path.  If the parent directories don't have the part
-    an EnvironmentError is raised."""
-
-    import sys, os
-    try:
-        head = this_dir = os.path.realpath(os.path.dirname(__file__))
-    except NameError:
-        head = this_dir = os.path.realpath(os.path.dirname(sys.argv[0]))
-
-    error = None
-    while head:
-        partdir = head
-        head, tail = os.path.split(head)
-        if tail == part:
-            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
-            if not os.path.exists(checkfile):
-                error = "Cannot find %r" % (os.path.normpath(checkfile),)
-            break
-    else:
-        error = "Cannot find the parent directory %r of the path %r" % (
-            partdir, this_dir)
-    if not error:
-        # check for bogus end-of-line style (e.g. files checked out on
-        # Windows and moved to Unix)
-        f = open(__file__.replace('.pyc', '.py'), 'r')
-        data = f.read()
-        f.close()
-        if data.endswith('\r\n') or data.endswith('\r'):
-            error = ("Bad end-of-line style in the .py files. Typically "
-                     "caused by a zip file or a checkout done on Windows and "
-                     "moved to Unix or vice-versa.")
-    if error:
-        raise EnvironmentError("Invalid source tree - bogus checkout! " +
-                               error)
-    
-    pypy_root = os.path.join(head, '')
-    try:
-        sys.path.remove(head)
-    except ValueError:
-        pass
-    sys.path.insert(0, head)
-
-    munged = {}
-    for name, mod in sys.modules.items():
-        if '.' in name:
-            continue
-        fn = getattr(mod, '__file__', None)
-        if not isinstance(fn, str):
-            continue
-        newname = os.path.splitext(os.path.basename(fn))[0]
-        if not newname.startswith(part + '.'):
-            continue
-        path = os.path.join(os.path.dirname(os.path.realpath(fn)), '')
-        if path.startswith(pypy_root) and newname != part:
-            modpaths = os.path.normpath(path[len(pypy_root):]).split(os.sep)
-            if newname != '__init__':
-                modpaths.append(newname)
-            modpath = '.'.join(modpaths)
-            if modpath not in sys.modules:
-                munged[modpath] = mod
-
-    for name, mod in munged.iteritems():
-        if name not in sys.modules:
-            sys.modules[name] = mod
-        if '.' in name:
-            prename = name[:name.rfind('.')]
-            postname = name[len(prename)+1:]
-            if prename not in sys.modules:
-                __import__(prename)
-                if not hasattr(sys.modules[prename], postname):
-                    setattr(sys.modules[prename], postname, mod)
-
-    return partdir, this_dir
-
-def __clone():
-    """ clone master version of autopath.py into all subdirs """
-    from os.path import join, walk
-    if not this_dir.endswith(join('pypy','tool')):
-        raise EnvironmentError("can only clone master version "
-                               "'%s'" % join(pypydir, 'tool',_myname))
-
-
-    def sync_walker(arg, dirname, fnames):
-        if _myname in fnames:
-            fn = join(dirname, _myname)
-            f = open(fn, 'rwb+')
-            try:
-                if f.read() == arg:
-                    print "checkok", fn
-                else:
-                    print "syncing", fn
-                    f = open(fn, 'w')
-                    f.write(arg)
-            finally:
-                f.close()
-    s = open(join(pypydir, 'tool', _myname), 'rb').read()
-    walk(pypydir, sync_walker, s)
-
-_myname = 'autopath.py'
-
-# set guaranteed attributes
-
-pypydir, this_dir = __dirinfo('pypy')
-
-if __name__ == '__main__':
-    __clone()
diff --git a/lib_pypy/ctypes_config_cache/dumpcache.py b/lib_pypy/ctypes_config_cache/dumpcache.py
--- a/lib_pypy/ctypes_config_cache/dumpcache.py
+++ b/lib_pypy/ctypes_config_cache/dumpcache.py
@@ -1,6 +1,6 @@
 import os
 from ctypes_configure import dumpcache
-from pypy.jit.backend import detect_cpu
+from rpython.jit.backend import detect_cpu
 
 def dumpcache2(basename, config):
     model = detect_cpu.autodetect_main_model_and_size()
@@ -14,7 +14,7 @@
 try:
     from __pypy__ import cpumodel
 except ImportError:
-    from pypy.jit.backend import detect_cpu
+    from rpython.jit.backend import detect_cpu
     cpumodel = detect_cpu.autodetect_main_model_and_size()
 # XXX relative import, should be removed together with
 # XXX the relative imports done e.g. by lib_pypy/pypy_test/test_hashlib
diff --git a/lib_pypy/ctypes_config_cache/rebuild.py b/lib_pypy/ctypes_config_cache/rebuild.py
--- a/lib_pypy/ctypes_config_cache/rebuild.py
+++ b/lib_pypy/ctypes_config_cache/rebuild.py
@@ -1,21 +1,15 @@
 #! /usr/bin/env python
 # Run this script to rebuild all caches from the *.ctc.py files.
 
-# hack: we cannot directly import autopath, as we are outside the pypy
-# package.  However, we pretend to be inside pypy/tool and manually run it, to
-# get the correct path
-import os.path
-this_dir = os.path.dirname(__file__)
-autopath_py = os.path.join(this_dir, '../../pypy/tool/autopath.py')
-autopath_py = os.path.abspath(autopath_py)
-execfile(autopath_py, dict(__name__='autopath', __file__=autopath_py))
+import os, sys
 
-import os, sys
+sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..')))
+
 import py
 
 _dirpath = os.path.dirname(__file__) or os.curdir
 
-from pypy.tool.ansi_print import ansi_log
+from rpython.tool.ansi_print import ansi_log
 log = py.log.Producer("ctypes_config_cache")
 py.log.setconsumer("ctypes_config_cache", ansi_log)
 
@@ -31,7 +25,7 @@
         sys.path[:] = path
 
 def try_rebuild():
-    from pypy.jit.backend import detect_cpu
+    from rpython.jit.backend import detect_cpu
     model = detect_cpu.autodetect_main_model_and_size()
     # remove the files '_*_model_.py'
     left = {}
diff --git a/lib_pypy/ctypes_config_cache/test/test_cache.py b/lib_pypy/ctypes_config_cache/test/test_cache.py
--- a/lib_pypy/ctypes_config_cache/test/test_cache.py
+++ b/lib_pypy/ctypes_config_cache/test/test_cache.py
@@ -1,6 +1,6 @@
 import py
 import sys, os
-from pypy.tool.udir import udir
+from rpython.tool.udir import udir
 
 dirpath = py.path.local(__file__).dirpath().dirpath()
 
diff --git a/lib_pypy/ctypes_support.py b/lib_pypy/ctypes_support.py
--- a/lib_pypy/ctypes_support.py
+++ b/lib_pypy/ctypes_support.py
@@ -19,16 +19,19 @@
 
 if sys.platform == 'win32':
     standard_c_lib._errno.restype = ctypes.POINTER(ctypes.c_int)
+    standard_c_lib._errno.argtypes = None
     def _where_is_errno():
         return standard_c_lib._errno()
     
 elif sys.platform in ('linux2', 'freebsd6'):
     standard_c_lib.__errno_location.restype = ctypes.POINTER(ctypes.c_int)
+    standard_c_lib.__errno_location.argtypes = None
     def _where_is_errno():
         return standard_c_lib.__errno_location()
 
 elif sys.platform in ('darwin', 'freebsd7', 'freebsd8', 'freebsd9'):
     standard_c_lib.__error.restype = ctypes.POINTER(ctypes.c_int)
+    standard_c_lib.__error.argtypes = None
     def _where_is_errno():
         return standard_c_lib.__error()
 
diff --git a/lib_pypy/greenlet.py b/lib_pypy/greenlet.py
--- a/lib_pypy/greenlet.py
+++ b/lib_pypy/greenlet.py
@@ -1,5 +1,6 @@
 import _continuation, sys
 
+__version__ = "0.4.0"
 
 # ____________________________________________________________
 # Exceptions
@@ -57,7 +58,8 @@
     def __switch(target, methodname, *args):
         current = getcurrent()
         #
-        while not target:
+        while not (target.__main or _continulet.is_pending(target)):
+            # inlined __nonzero__ ^^^ in case it's overridden
             if not target.__started:
                 if methodname == 'switch':
                     greenlet_func = _greenlet_start
diff --git a/lib_pypy/pypy_test/test_cPickle.py b/lib_pypy/pypy_test/test_cPickle.py
new file mode 100644
--- /dev/null
+++ b/lib_pypy/pypy_test/test_cPickle.py
@@ -0,0 +1,7 @@
+from __future__ import absolute_import
+import py
+
+from lib_pypy import cPickle
+
+def test_stack_underflow():
+    py.test.raises(cPickle.UnpicklingError, cPickle.loads, "a string")
diff --git a/lib_pypy/pypy_test/test_dbm_extra.py b/lib_pypy/pypy_test/test_dbm_extra.py
--- a/lib_pypy/pypy_test/test_dbm_extra.py
+++ b/lib_pypy/pypy_test/test_dbm_extra.py
@@ -1,6 +1,6 @@
 from __future__ import absolute_import
 import py
-from pypy.tool.udir import udir
+from rpython.tool.udir import udir
 try:
     from lib_pypy import dbm
 except ImportError, e:
diff --git a/lib_pypy/pypy_test/test_marshal_extra.py b/lib_pypy/pypy_test/test_marshal_extra.py
--- a/lib_pypy/pypy_test/test_marshal_extra.py
+++ b/lib_pypy/pypy_test/test_marshal_extra.py
@@ -4,7 +4,7 @@
 import marshal as cpy_marshal
 from lib_pypy import _marshal as marshal
 
-from pypy.tool.udir import udir 
+from rpython.tool.udir import udir 
 
 hello = "he"
 hello += "llo"
diff --git a/lib_pypy/pyrepl/unix_console.py b/lib_pypy/pyrepl/unix_console.py
--- a/lib_pypy/pyrepl/unix_console.py
+++ b/lib_pypy/pyrepl/unix_console.py
@@ -398,6 +398,7 @@
 
         if hasattr(self, 'old_sigwinch'):
             signal.signal(signal.SIGWINCH, self.old_sigwinch)
+            del self.old_sigwinch
 
     def __sigwinch(self, signum, frame):
         self.height, self.width = self.getheightwidth()
diff --git a/py/_code/source.py b/py/_code/source.py
--- a/py/_code/source.py
+++ b/py/_code/source.py
@@ -141,6 +141,8 @@
                 trysource = self[start:end]
                 if trysource.isparseable():
                     return start, end
+                if end == start + 100:   # XXX otherwise, it takes forever
+                    break                # XXX
         if end is None:
             raise IndexError("no valid source range around line %d " % (lineno,))
         return start, end
diff --git a/pypy/annotation/annrpython.py b/pypy/annotation/annrpython.py
deleted file mode 100644
--- a/pypy/annotation/annrpython.py
+++ /dev/null
@@ -1,676 +0,0 @@
-from __future__ import absolute_import
-
-import types
-from pypy.tool.ansi_print import ansi_log
-from pypy.tool.pairtype import pair
-from pypy.tool.error import (format_blocked_annotation_error,
-                             AnnotatorError, gather_error, ErrorWrapper)
-from pypy.objspace.flow.model import (Variable, Constant, FunctionGraph,
-                                      c_last_exception, checkgraph)
-from pypy.translator import simplify, transform
-from pypy.annotation import model as annmodel, signature, unaryop, binaryop
-from pypy.annotation.bookkeeper import Bookkeeper
-import py
-log = py.log.Producer("annrpython")
-py.log.setconsumer("annrpython", ansi_log)
-
-
-FAIL = object()
-
-class RPythonAnnotator(object):
-    """Block annotator for RPython.
-    See description in doc/translation.txt."""
-
-    def __init__(self, translator=None, policy=None, bookkeeper=None):
-        import pypy.rpython.ootypesystem.ooregistry # has side effects
-        import pypy.rpython.extfuncregistry # has side effects
-        import pypy.rlib.nonconst # has side effects
-
-        if translator is None:
-            # interface for tests
-            from pypy.translator.translator import TranslationContext
-            translator = TranslationContext()
-            translator.annotator = self
-        self.translator = translator
-        self.pendingblocks = {}  # map {block: graph-containing-it}
-        self.bindings = {}       # map Variables to SomeValues
-        self.annotated = {}      # set of blocks already seen
-        self.added_blocks = None # see processblock() below
-        self.links_followed = {} # set of links that have ever been followed
-        self.notify = {}        # {block: {positions-to-reflow-from-when-done}}
-        self.fixed_graphs = {}  # set of graphs not to annotate again
-        self.blocked_blocks = {} # set of {blocked_block: (graph, index)}
-        # --- the following information is recorded for debugging ---
-        self.blocked_graphs = {} # set of graphs that have blocked blocks
-        # --- end of debugging information ---
-        self.frozen = False
-        if policy is None:
-            from pypy.annotation.policy import AnnotatorPolicy
-            self.policy = AnnotatorPolicy()
-        else:
-            self.policy = policy
-        if bookkeeper is None:
-            bookkeeper = Bookkeeper(self)
-        self.bookkeeper = bookkeeper
-
-    def __getstate__(self):
-        attrs = """translator pendingblocks bindings annotated links_followed
-        notify bookkeeper frozen policy added_blocks""".split()
-        ret = self.__dict__.copy()
-        for key, value in ret.items():
-            if key not in attrs:
-                assert type(value) is dict, (
-                    "%r is not dict. please update %s.__getstate__" %
-                    (key, self.__class__.__name__))
-                ret[key] = {}
-        return ret
-
-    #___ convenience high-level interface __________________
-
-    def build_types(self, function, input_arg_types, complete_now=True,
-                    main_entry_point=False):
-        """Recursively build annotations about the specific entry point."""
-        assert isinstance(function, types.FunctionType), "fix that!"
-
-        from pypy.annotation.policy import AnnotatorPolicy
-        policy = AnnotatorPolicy()
-        # make input arguments and set their type
-        args_s = [self.typeannotation(t) for t in input_arg_types]
-
-        # XXX hack
-        annmodel.TLS.check_str_without_nul = (
-            self.translator.config.translation.check_str_without_nul)
-
-        flowgraph, inputcells = self.get_call_parameters(function, args_s, policy)
-        if not isinstance(flowgraph, FunctionGraph):
-            assert isinstance(flowgraph, annmodel.SomeObject)
-            return flowgraph
-
-        if main_entry_point:
-            self.translator.entry_point_graph = flowgraph
-        return self.build_graph_types(flowgraph, inputcells, complete_now=complete_now)
-
-    def get_call_parameters(self, function, args_s, policy):
-        desc = self.bookkeeper.getdesc(function)
-        args = self.bookkeeper.build_args("simple_call", args_s[:])
-        result = []
-        def schedule(graph, inputcells):
-            result.append((graph, inputcells))
-            return annmodel.s_ImpossibleValue
-
-        prevpolicy = self.policy
-        self.policy = policy
-        self.bookkeeper.enter(None)
-        try:
-            desc.pycall(schedule, args, annmodel.s_ImpossibleValue)
-        finally:
-            self.bookkeeper.leave()
-            self.policy = prevpolicy
-        [(graph, inputcells)] = result
-        return graph, inputcells
-
-    def annotate_helper(self, function, args_s, policy=None):
-        if policy is None:
-            from pypy.annotation.policy import AnnotatorPolicy
-            policy = AnnotatorPolicy()
-        graph, inputcells = self.get_call_parameters(function, args_s, policy)
-        self.build_graph_types(graph, inputcells, complete_now=False)
-        self.complete_helpers(policy)
-        return graph
-
-    def complete_helpers(self, policy):
-        saved = self.policy, self.added_blocks
-        self.policy = policy
-        try:
-            self.added_blocks = {}
-            self.complete()
-            # invoke annotation simplifications for the new blocks
-            self.simplify(block_subset=self.added_blocks)
-        finally:
-            self.policy, self.added_blocks = saved
-
-    def build_graph_types(self, flowgraph, inputcells, complete_now=True):
-        checkgraph(flowgraph)
-
-        nbarg = len(flowgraph.getargs())
-        if len(inputcells) != nbarg: 
-            raise TypeError("%s expects %d args, got %d" %(       
-                            flowgraph, nbarg, len(inputcells)))
-        
-        # register the entry point
-        self.addpendinggraph(flowgraph, inputcells)
-        # recursively proceed until no more pending block is left
-        if complete_now:
-            self.complete()
-        return self.binding(flowgraph.getreturnvar(), None)
-
-    def gettype(self, variable):
-        """Return the known type of a control flow graph variable,
-        defaulting to 'object'."""
-        if isinstance(variable, Constant):
-            return type(variable.value)
-        elif isinstance(variable, Variable):
-            cell = self.bindings.get(variable)
-            if cell:
-                return cell.knowntype
-            else:
-                return object
-        else:
-            raise TypeError, ("Variable or Constant instance expected, "
-                              "got %r" % (variable,))
-
-    def getuserclassdefinitions(self):
-        """Return a list of ClassDefs."""
-        return self.bookkeeper.classdefs
-
-    #___ medium-level interface ____________________________
-
-    def addpendinggraph(self, flowgraph, inputcells):
-        self.addpendingblock(flowgraph, flowgraph.startblock, inputcells)
-
-    def addpendingblock(self, graph, block, cells):
-        """Register an entry point into block with the given input cells."""
-        if graph in self.fixed_graphs:
-            # special case for annotating/rtyping in several phases: calling
-            # a graph that has already been rtyped.  Safety-check the new
-            # annotations that are passed in, and don't annotate the old
-            # graph -- it's already low-level operations!
-            for a, s_newarg in zip(graph.getargs(), cells):
-                s_oldarg = self.binding(a)
-                assert annmodel.unionof(s_oldarg, s_newarg) == s_oldarg
-        else:
-            assert not self.frozen
-            for a in cells:
-                assert isinstance(a, annmodel.SomeObject)
-            if block not in self.annotated:
-                self.bindinputargs(graph, block, cells)
-            else:
-                self.mergeinputargs(graph, block, cells)
-            if not self.annotated[block]:
-                self.pendingblocks[block] = graph
-
-    def complete(self):
-        """Process pending blocks until none is left."""
-        while True:
-            while self.pendingblocks:
-                block, graph = self.pendingblocks.popitem()
-                self.processblock(graph, block)
-            self.policy.no_more_blocks_to_annotate(self)
-            if not self.pendingblocks:
-                break   # finished
-        # make sure that the return variables of all graphs is annotated
-        if self.added_blocks is not None:
-            newgraphs = [self.annotated[block] for block in self.added_blocks]
-            newgraphs = dict.fromkeys(newgraphs)
-            got_blocked_blocks = False in newgraphs
-        else:
-            newgraphs = self.translator.graphs  #all of them
-            got_blocked_blocks = False in self.annotated.values()
-        if got_blocked_blocks:
-            for graph in self.blocked_graphs.values():
-                self.blocked_graphs[graph] = True
-
-            blocked_blocks = [block for block, done in self.annotated.items()
-                                    if done is False]
-            assert len(blocked_blocks) == len(self.blocked_blocks)
-
-            text = format_blocked_annotation_error(self, self.blocked_blocks)
-            #raise SystemExit()
-            raise AnnotatorError(text)
-        for graph in newgraphs:
-            v = graph.getreturnvar()
-            if v not in self.bindings:
-                self.setbinding(v, annmodel.s_ImpossibleValue)
-        # policy-dependent computation
-        self.bookkeeper.compute_at_fixpoint()
-
-    def binding(self, arg, default=FAIL):
-        "Gives the SomeValue corresponding to the given Variable or Constant."
-        if isinstance(arg, Variable):
-            try:
-                return self.bindings[arg]
-            except KeyError:
-                if default is not FAIL:
-                    return default
-                else:
-                    raise
-        elif isinstance(arg, Constant):
-            #if arg.value is undefined_value:   # undefined local variables
-            #    return annmodel.s_ImpossibleValue
-            return self.bookkeeper.immutableconstant(arg)
-        else:
-            raise TypeError, 'Variable or Constant expected, got %r' % (arg,)
-
-    def typeannotation(self, t):
-        return signature.annotation(t, self.bookkeeper)
-
-    def setbinding(self, arg, s_value):
-        if arg in self.bindings:
-            assert s_value.contains(self.bindings[arg])
-        self.bindings[arg] = s_value
-
-    def transfer_binding(self, v_target, v_source):
-        assert v_source in self.bindings
-        self.bindings[v_target] = self.bindings[v_source]
-
-    def warning(self, msg, pos=None):
-        if pos is None:
-            try:
-                pos = self.bookkeeper.position_key
-            except AttributeError:
-                pos = '?'
-        if pos != '?':
-            pos = self.whereami(pos)
- 
-        log.WARNING("%s/ %s" % (pos, msg))
-
-
-    #___ interface for annotator.bookkeeper _______
-
-    def recursivecall(self, graph, whence, inputcells):
-        if isinstance(whence, tuple):
-            parent_graph, parent_block, parent_index = whence
-            tag = parent_block, parent_index
-            self.translator.update_call_graph(parent_graph, graph, tag)
-        # self.notify[graph.returnblock] is a dictionary of call
-        # points to this func which triggers a reflow whenever the
-        # return block of this graph has been analysed.
-        callpositions = self.notify.setdefault(graph.returnblock, {})
-        if whence is not None:
-            if callable(whence):
-                def callback():
-                    whence(self, graph)
-            else:
-                callback = whence
-            callpositions[callback] = True
-
-        # generalize the function's input arguments
-        self.addpendingblock(graph, graph.startblock, inputcells)
-
-        # get the (current) return value
-        v = graph.getreturnvar()
-        try:
-            return self.bindings[v]
-        except KeyError: 
-            # the function didn't reach any return statement so far.
-            # (some functions actually never do, they always raise exceptions)
-            return annmodel.s_ImpossibleValue
-
-    def reflowfromposition(self, position_key):
-        graph, block, index = position_key
-        self.reflowpendingblock(graph, block)
-
-
-    #___ simplification (should be moved elsewhere?) _______
-
-    def simplify(self, block_subset=None, extra_passes=None):
-        # Generic simplifications
-        transform.transform_graph(self, block_subset=block_subset,
-                                  extra_passes=extra_passes)
-        if block_subset is None:
-            graphs = self.translator.graphs
-        else:
-            graphs = {}
-            for block in block_subset:
-                graph = self.annotated.get(block)
-                if graph:
-                    graphs[graph] = True
-        for graph in graphs:
-            simplify.eliminate_empty_blocks(graph)
-
-
-    #___ flowing annotations in blocks _____________________
-
-    def processblock(self, graph, block):
-        # Important: this is not called recursively.
-        # self.flowin() can only issue calls to self.addpendingblock().
-        # The analysis of a block can be in three states:
-        #  * block not in self.annotated:
-        #      never seen the block.
-        #  * self.annotated[block] == False:
-        #      the input variables of the block are in self.bindings but we
-        #      still have to consider all the operations in the block.
-        #  * self.annotated[block] == graph-containing-block:
-        #      analysis done (at least until we find we must generalize the
-        #      input variables).
-
-        #print '* processblock', block, cells
-        self.annotated[block] = graph
-        if block in self.blocked_blocks:
-            del self.blocked_blocks[block]
-        try:
-            self.flowin(graph, block)
-        except BlockedInference, e:
-            self.annotated[block] = False   # failed, hopefully temporarily
-            self.blocked_blocks[block] = (graph, e.opindex)
-        except Exception, e:
-            # hack for debug tools only
-            if not hasattr(e, '__annotator_block'):
-                setattr(e, '__annotator_block', block)
-            raise
-
-        # The dict 'added_blocks' is used by rpython.annlowlevel to
-        # detect which are the new blocks that annotating an additional
-        # small helper creates.
-        if self.added_blocks is not None:
-            self.added_blocks[block] = True
-
-    def reflowpendingblock(self, graph, block):
-        assert not self.frozen
-        assert graph not in self.fixed_graphs
-        self.pendingblocks[block] = graph
-        assert block in self.annotated
-        self.annotated[block] = False  # must re-flow
-        self.blocked_blocks[block] = (graph, None)
-
-    def bindinputargs(self, graph, block, inputcells):
-        # Create the initial bindings for the input args of a block.
-        assert len(block.inputargs) == len(inputcells)
-        for a, cell in zip(block.inputargs, inputcells):
-            self.setbinding(a, cell)
-        self.annotated[block] = False  # must flowin.
-        self.blocked_blocks[block] = (graph, None)
-
-    def mergeinputargs(self, graph, block, inputcells):
-        # Merge the new 'cells' with each of the block's existing input
-        # variables.
-        oldcells = [self.binding(a) for a in block.inputargs]
-        try:
-            unions = [annmodel.unionof(c1,c2) for c1, c2 in zip(oldcells,inputcells)]
-        except annmodel.UnionError, e:
-            e.args = e.args + (
-                ErrorWrapper(gather_error(self, graph, block, None)),)
-            raise
-        # if the merged cells changed, we must redo the analysis
-        if unions != oldcells:
-            self.bindinputargs(graph, block, unions)
-
-    def whereami(self, position_key):
-        graph, block, i = position_key
-        blk = ""
-        if block:
-            at = block.at()
-            if at:
-                blk = " block"+at
-        opid=""
-        if i is not None:
-            opid = " op=%d" % i
-        return repr(graph) + blk + opid
-
-    def flowin(self, graph, block):
-        #print 'Flowing', block, [self.binding(a) for a in block.inputargs]
-        try:
-            for i in range(len(block.operations)):
-                try:
-                    self.bookkeeper.enter((graph, block, i))
-                    self.consider_op(block, i)
-                finally:
-                    self.bookkeeper.leave()
-
-        except BlockedInference, e:
-            if (e.op is block.operations[-1] and
-                block.exitswitch == c_last_exception):
-                # this is the case where the last operation of the block will
-                # always raise an exception which is immediately caught by
-                # an exception handler.  We then only follow the exceptional
-                # branches.
-                exits = [link for link in block.exits
-                              if link.exitcase is not None]
-
-            elif e.op.opname in ('simple_call', 'call_args', 'next'):
-                # XXX warning, keep the name of the call operations in sync
-                # with the flow object space.  These are the operations for
-                # which it is fine to always raise an exception.  We then
-                # swallow the BlockedInference and that's it.
-                # About 'next': see test_annotate_iter_empty_container().
-                return
-
-            else:
-                # other cases are problematic (but will hopefully be solved
-                # later by reflowing).  Throw the BlockedInference up to
-                # processblock().
-                raise
-
-        except annmodel.HarmlesslyBlocked:
-            return
-
-        else:
-            # dead code removal: don't follow all exits if the exitswitch
-            # is known
-            exits = block.exits
-            if isinstance(block.exitswitch, Variable):
-                s_exitswitch = self.bindings[block.exitswitch]
-                if s_exitswitch.is_constant():
-                    exits = [link for link in exits
-                                  if link.exitcase == s_exitswitch.const]
-
-        # mapping (exitcase, variable) -> s_annotation
-        # that can be attached to booleans, exitswitches
-        knowntypedata = getattr(self.bindings.get(block.exitswitch),
-                                "knowntypedata", {})
-
-        # filter out those exceptions which cannot
-        # occour for this specific, typed operation.
-        if block.exitswitch == c_last_exception:
-            op = block.operations[-1]
-            if op.opname in binaryop.BINARY_OPERATIONS:
-                arg1 = self.binding(op.args[0])
-                arg2 = self.binding(op.args[1])
-                binop = getattr(pair(arg1, arg2), op.opname, None)
-                can_only_throw = annmodel.read_can_only_throw(binop, arg1, arg2)
-            elif op.opname in unaryop.UNARY_OPERATIONS:
-                arg1 = self.binding(op.args[0])
-                opname = op.opname
-                if opname == 'contains': opname = 'op_contains'
-                unop = getattr(arg1, opname, None)
-                can_only_throw = annmodel.read_can_only_throw(unop, arg1)
-            else:
-                can_only_throw = None
-
-            if can_only_throw is not None:
-                candidates = can_only_throw
-                candidate_exits = exits
-                exits = []
-                for link in candidate_exits:
-                    case = link.exitcase
-                    if case is None:
-                        exits.append(link)
-                        continue
-                    covered = [c for c in candidates if issubclass(c, case)]
-                    if covered:
-                        exits.append(link)
-                        candidates = [c for c in candidates if c not in covered]
-
-        for link in exits:
-            in_except_block = False
-
-            last_exception_var = link.last_exception # may be None for non-exception link
-            last_exc_value_var = link.last_exc_value # may be None for non-exception link
-
-            if isinstance(link.exitcase, (types.ClassType, type)) \
-                   and issubclass(link.exitcase, py.builtin.BaseException):
-                assert last_exception_var and last_exc_value_var
-                last_exc_value_object = self.bookkeeper.valueoftype(link.exitcase)
-                last_exception_object = annmodel.SomeType()
-                if isinstance(last_exception_var, Constant):
-                    last_exception_object.const = last_exception_var.value
-                last_exception_object.is_type_of = [last_exc_value_var]
-
-                if isinstance(last_exception_var, Variable):
-                    self.setbinding(last_exception_var, last_exception_object)
-                if isinstance(last_exc_value_var, Variable):
-                    self.setbinding(last_exc_value_var, last_exc_value_object)
-
-                last_exception_object = annmodel.SomeType()
-                if isinstance(last_exception_var, Constant):
-                    last_exception_object.const = last_exception_var.value
-                #if link.exitcase is Exception:
-                #    last_exc_value_object = annmodel.SomeObject()
-                #else:
-                last_exc_value_vars = []
-                in_except_block = True
-
-            ignore_link = False
-            cells = []
-            renaming = {}
-            for a,v in zip(link.args,link.target.inputargs):
-                renaming.setdefault(a, []).append(v)
-            for a,v in zip(link.args,link.target.inputargs):
-                if a == last_exception_var:
-                    assert in_except_block
-                    cells.append(last_exception_object)
-                elif a == last_exc_value_var:
-                    assert in_except_block
-                    cells.append(last_exc_value_object)
-                    last_exc_value_vars.append(v)
-                else:
-                    cell = self.binding(a)
-                    if (link.exitcase, a) in knowntypedata:
-                        knownvarvalue = knowntypedata[(link.exitcase, a)]
-                        cell = pair(cell, knownvarvalue).improve()
-                        # ignore links that try to pass impossible values
-                        if cell == annmodel.s_ImpossibleValue:
-                            ignore_link = True
-
-                    if hasattr(cell,'is_type_of'):
-                        renamed_is_type_of = []
-                        for v in cell.is_type_of:
-                            new_vs = renaming.get(v,[])
-                            renamed_is_type_of += new_vs
-                        assert cell.knowntype is type
-                        newcell = annmodel.SomeType()
-                        if cell.is_constant():
-                            newcell.const = cell.const
-                        cell = newcell
-                        cell.is_type_of = renamed_is_type_of
-
-                    if hasattr(cell, 'knowntypedata'):
-                        renamed_knowntypedata = {}
-                        for (value, v), s in cell.knowntypedata.items():
-                            new_vs = renaming.get(v, [])
-                            for new_v in new_vs:
-                                renamed_knowntypedata[value, new_v] = s
-                        assert isinstance(cell, annmodel.SomeBool)
-                        newcell = annmodel.SomeBool()
-                        if cell.is_constant():
-                            newcell.const = cell.const
-                        cell = newcell
-                        cell.set_knowntypedata(renamed_knowntypedata)
-
-                    cells.append(cell)
-
-            if ignore_link:
-                continue
-
-            if in_except_block:
-                last_exception_object.is_type_of = last_exc_value_vars
-
-            self.links_followed[link] = True
-            self.addpendingblock(graph, link.target, cells)
-
-        if block in self.notify:
-            # reflow from certain positions when this block is done
-            for callback in self.notify[block]:
-                if isinstance(callback, tuple):
-                    self.reflowfromposition(callback) # callback is a position
-                else:
-                    callback()
-
-
-    #___ creating the annotations based on operations ______
-
-    def consider_op(self, block, opindex):
-        op = block.operations[opindex]
-        argcells = [self.binding(a) for a in op.args]
-        consider_meth = getattr(self,'consider_op_'+op.opname,
-                                None)
-        if not consider_meth:
-            raise Exception,"unknown op: %r" % op
-
-        # let's be careful about avoiding propagated SomeImpossibleValues
-        # to enter an op; the latter can result in violations of the
-        # more general results invariant: e.g. if SomeImpossibleValue enters is_
-        #  is_(SomeImpossibleValue, None) -> SomeBool
-        #  is_(SomeInstance(not None), None) -> SomeBool(const=False) ...
-        # boom -- in the assert of setbinding()
-        for arg in argcells:
-            if isinstance(arg, annmodel.SomeImpossibleValue):
-                raise BlockedInference(self, op, opindex)
-        try:
-            resultcell = consider_meth(*argcells)
-        except Exception, e:
-            graph = self.bookkeeper.position_key[0]
-            e.args = e.args + (
-                ErrorWrapper(gather_error(self, graph, block, opindex)),)
-            raise
-        if resultcell is None:
-            resultcell = self.noreturnvalue(op)
-        elif resultcell == annmodel.s_ImpossibleValue:
-            raise BlockedInference(self, op, opindex) # the operation cannot succeed
-        assert isinstance(resultcell, annmodel.SomeObject)
-        assert isinstance(op.result, Variable)
-        self.setbinding(op.result, resultcell)  # bind resultcell to op.result
-
-    def noreturnvalue(self, op):
-        return annmodel.s_ImpossibleValue  # no return value (hook method)
-
-    # XXX "contains" clash with SomeObject method
-    def consider_op_contains(self, seq, elem):


More information about the pypy-commit mailing list