[pypy-commit] pypy py3.5-fstring-pep498: hg merge py3.5

arigo pypy.commits at gmail.com
Tue Jan 24 09:54:15 EST 2017


Author: Armin Rigo <arigo at tunes.org>
Branch: py3.5-fstring-pep498
Changeset: r89742:3d484dcb6db5
Date: 2017-01-24 15:51 +0100
http://bitbucket.org/pypy/pypy/changeset/3d484dcb6db5/

Log:	hg merge py3.5

diff too long, truncating to 2000 out of 3413 lines

diff --git a/lib-python/3/test/test_asyncio/test_events.py b/lib-python/3/test/test_asyncio/test_events.py
--- a/lib-python/3/test/test_asyncio/test_events.py
+++ b/lib-python/3/test/test_asyncio/test_events.py
@@ -825,9 +825,15 @@
         server = self.loop.run_until_complete(f)
         self.assertEqual(len(server.sockets), 1)
         sock = server.sockets[0]
-        self.assertFalse(
-            sock.getsockopt(
-                socket.SOL_SOCKET, socket.SO_REUSEPORT))
+        try:
+            self.assertFalse(
+                sock.getsockopt(
+                    socket.SOL_SOCKET, socket.SO_REUSEPORT))
+        except OSError:
+            raise unittest.SkipTest(
+                "Python's socket module was compiled using modern headers "
+                "thus defining SO_REUSEPORT but this process is running "
+                "under an older kernel that does not support SO_REUSEPORT.")
         server.close()
 
         test_utils.run_briefly(self.loop)
diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO
--- a/lib_pypy/cffi.egg-info/PKG-INFO
+++ b/lib_pypy/cffi.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: cffi
-Version: 1.9.2
+Version: 1.10.0
 Summary: Foreign Function Interface for Python calling C code.
 Home-page: http://cffi.readthedocs.org
 Author: Armin Rigo, Maciej Fijalkowski
diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py
--- a/lib_pypy/cffi/__init__.py
+++ b/lib_pypy/cffi/__init__.py
@@ -1,11 +1,11 @@
 __all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
            'FFIError']
 
-from .api import FFI, CDefError, FFIError
-from .ffiplatform import VerificationError, VerificationMissing
+from .api import FFI
+from .error import CDefError, FFIError, VerificationError, VerificationMissing
 
-__version__ = "1.9.2"
-__version_info__ = (1, 9, 2)
+__version__ = "1.10.0"
+__version_info__ = (1, 10, 0)
 
 # The verifier module file names are based on the CRC32 of a string that
 # contains the following version number.  It may be older than __version__
diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h
--- a/lib_pypy/cffi/_embedding.h
+++ b/lib_pypy/cffi/_embedding.h
@@ -233,7 +233,7 @@
         f = PySys_GetObject((char *)"stderr");
         if (f != NULL && f != Py_None) {
             PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
-                               "\ncompiled with cffi version: 1.9.2"
+                               "\ncompiled with cffi version: 1.10.0"
                                "\n_cffi_backend module: ", f);
             modules = PyImport_GetModuleDict();
             mod = PyDict_GetItemString(modules, "_cffi_backend");
diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py
--- a/lib_pypy/cffi/api.py
+++ b/lib_pypy/cffi/api.py
@@ -1,5 +1,7 @@
 import sys, types
 from .lock import allocate_lock
+from .error import CDefError
+from . import model
 
 try:
     callable
@@ -15,17 +17,6 @@
     basestring = str
 
 
-class FFIError(Exception):
-    pass
-
-class CDefError(Exception):
-    def __str__(self):
-        try:
-            line = 'line %d: ' % (self.args[1].coord.line,)
-        except (AttributeError, TypeError, IndexError):
-            line = ''
-        return '%s%s' % (line, self.args[0])
-
 
 class FFI(object):
     r'''
@@ -49,18 +40,27 @@
         """Create an FFI instance.  The 'backend' argument is used to
         select a non-default backend, mostly for tests.
         """
-        from . import cparser, model
         if backend is None:
             # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
             # _cffi_backend.so compiled.
             import _cffi_backend as backend
             from . import __version__
-            assert backend.__version__ == __version__, \
-               "version mismatch, %s != %s" % (backend.__version__, __version__)
+            if backend.__version__ != __version__:
+                # bad version!  Try to be as explicit as possible.
+                if hasattr(backend, '__file__'):
+                    # CPython
+                    raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r.  When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r.  The two versions should be equal; check your installation." % (
+                        __version__, __file__,
+                        backend.__version__, backend.__file__))
+                else:
+                    # PyPy
+                    raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r.  This interpreter comes with a built-in '_cffi_backend' module, which is version %s.  The two versions should be equal; check your installation." % (
+                        __version__, __file__, backend.__version__))
             # (If you insist you can also try to pass the option
             # 'backend=backend_ctypes.CTypesBackend()', but don't
             # rely on it!  It's probably not going to work well.)
 
+        from . import cparser
         self._backend = backend
         self._lock = allocate_lock()
         self._parser = cparser.Parser()
@@ -212,7 +212,7 @@
 
     def offsetof(self, cdecl, *fields_or_indexes):
         """Return the offset of the named field inside the given
-        structure or array, which must be given as a C type name.  
+        structure or array, which must be given as a C type name.
         You can give several field names in case of nested structures.
         You can also give numeric values which correspond to array
         items, in case of an array type.
@@ -300,7 +300,7 @@
         return self._backend.string(cdata, maxlen)
 
     def unpack(self, cdata, length):
-        """Unpack an array of C data of the given length, 
+        """Unpack an array of C data of the given length,
         returning a Python string/unicode/list.
 
         If 'cdata' is a pointer to 'char', returns a byte string.
@@ -452,7 +452,6 @@
         return self._backend.getwinerror(code)
 
     def _pointer_to(self, ctype):
-        from . import model
         with self._lock:
             return model.pointer_cache(self, ctype)
 
@@ -764,7 +763,6 @@
         return backend.load_library(path, flags)
 
 def _make_ffi_library(ffi, libname, flags):
-    import os
     backend = ffi._backend
     backendlib = _load_backend_lib(backend, libname, flags)
     #
@@ -802,7 +800,6 @@
         if accessors_version[0] is ffi._cdef_version:
             return
         #
-        from . import model
         for key, (tp, _) in ffi._parser._declarations.items():
             if not isinstance(tp, model.EnumType):
                 tag, name = key.split(' ', 1)
diff --git a/lib_pypy/cffi/cffi_opcode.py b/lib_pypy/cffi/cffi_opcode.py
--- a/lib_pypy/cffi/cffi_opcode.py
+++ b/lib_pypy/cffi/cffi_opcode.py
@@ -1,3 +1,4 @@
+from .error import VerificationError
 
 class CffiOp(object):
     def __init__(self, op, arg):
@@ -19,7 +20,6 @@
                                     % (self.arg,))
             return format_four_bytes(value)
         if isinstance(self.arg, str):
-            from .ffiplatform import VerificationError
             raise VerificationError("cannot emit to Python: %r" % (self.arg,))
         return format_four_bytes((self.arg << 8) | self.op)
 
diff --git a/lib_pypy/cffi/commontypes.py b/lib_pypy/cffi/commontypes.py
--- a/lib_pypy/cffi/commontypes.py
+++ b/lib_pypy/cffi/commontypes.py
@@ -1,5 +1,6 @@
 import sys
-from . import api, model
+from . import model
+from .error import FFIError
 
 
 COMMON_TYPES = {}
@@ -31,11 +32,11 @@
         elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
             result, quals = model.PrimitiveType(cdecl), 0
         elif cdecl == 'set-unicode-needed':
-            raise api.FFIError("The Windows type %r is only available after "
-                               "you call ffi.set_unicode()" % (commontype,))
+            raise FFIError("The Windows type %r is only available after "
+                           "you call ffi.set_unicode()" % (commontype,))
         else:
             if commontype == cdecl:
-                raise api.FFIError(
+                raise FFIError(
                     "Unsupported type: %r.  Please look at "
         "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
                     "and file an issue if you think this type should really "
diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py
--- a/lib_pypy/cffi/cparser.py
+++ b/lib_pypy/cffi/cparser.py
@@ -1,5 +1,6 @@
-from . import api, model
+from . import model
 from .commontypes import COMMON_TYPES, resolve_common_type
+from .error import FFIError, CDefError
 try:
     from . import _pycparser as pycparser
 except ImportError:
@@ -113,7 +114,7 @@
             # grouping variant
             closing = csource.find('}', endpos)
             if closing < 0:
-                raise api.CDefError("'extern \"Python\" {': no '}' found")
+                raise CDefError("'extern \"Python\" {': no '}' found")
             if csource.find('{', endpos + 1, closing) >= 0:
                 raise NotImplementedError("cannot use { } inside a block "
                                           "'extern \"Python\" { ... }'")
@@ -123,7 +124,7 @@
             # non-grouping variant
             semicolon = csource.find(';', endpos)
             if semicolon < 0:
-                raise api.CDefError("'extern \"Python\": no ';' found")
+                raise CDefError("'extern \"Python\": no ';' found")
             parts.append(csource[endpos:semicolon+1])
             csource = csource[semicolon+1:]
         parts.append(' void __cffi_extern_python_stop;')
@@ -288,7 +289,7 @@
             msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
         else:
             msg = 'parse error\n%s' % (msg,)
-        raise api.CDefError(msg)
+        raise CDefError(msg)
 
     def parse(self, csource, override=False, packed=False, dllexport=False):
         prev_options = self._options
@@ -318,8 +319,8 @@
                     self._parse_decl(decl)
                 elif isinstance(decl, pycparser.c_ast.Typedef):
                     if not decl.name:
-                        raise api.CDefError("typedef does not declare any name",
-                                            decl)
+                        raise CDefError("typedef does not declare any name",
+                                        decl)
                     quals = 0
                     if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType)
                             and decl.type.type.names[-1] == '__dotdotdot__'):
@@ -337,8 +338,8 @@
                 elif decl.__class__.__name__ == 'Pragma':
                     pass    # skip pragma, only in pycparser 2.15
                 else:
-                    raise api.CDefError("unrecognized construct", decl)
-        except api.FFIError as e:
+                    raise CDefError("unrecognized construct", decl)
+        except FFIError as e:
             msg = self._convert_pycparser_error(e, csource)
             if msg:
                 e.args = (e.args[0] + "\n    *** Err: %s" % msg,)
@@ -348,7 +349,7 @@
         if key in self._int_constants:
             if self._int_constants[key] == val:
                 return     # ignore identical double declarations
-            raise api.FFIError(
+            raise FFIError(
                 "multiple declarations of constant: %s" % (key,))
         self._int_constants[key] = val
 
@@ -375,7 +376,7 @@
             elif value == '...':
                 self._declare('macro ' + key, value)
             else:
-                raise api.CDefError(
+                raise CDefError(
                     'only supports one of the following syntax:\n'
                     '  #define %s ...     (literally dot-dot-dot)\n'
                     '  #define %s NUMBER  (with NUMBER an integer'
@@ -410,8 +411,8 @@
             elif isinstance(node, pycparser.c_ast.Enum):
                 self._get_struct_union_enum_type('enum', node)
             elif not decl.name:
-                raise api.CDefError("construct does not declare any variable",
-                                    decl)
+                raise CDefError("construct does not declare any variable",
+                                decl)
             #
             if decl.name:
                 tp, quals = self._get_type_and_quals(node,
@@ -438,7 +439,7 @@
                     self._inside_extern_python = decl.name
                 else:
                     if self._inside_extern_python !='__cffi_extern_python_stop':
-                        raise api.CDefError(
+                        raise CDefError(
                             "cannot declare constants or "
                             "variables with 'extern \"Python\"'")
                     if (quals & model.Q_CONST) and not tp.is_array_type:
@@ -454,7 +455,7 @@
         assert not macros
         exprnode = ast.ext[-1].type.args.params[0]
         if isinstance(exprnode, pycparser.c_ast.ID):
-            raise api.CDefError("unknown identifier '%s'" % (exprnode.name,))
+            raise CDefError("unknown identifier '%s'" % (exprnode.name,))
         return self._get_type_and_quals(exprnode.type)
 
     def _declare(self, name, obj, included=False, quals=0):
@@ -463,7 +464,7 @@
             if prevobj is obj and prevquals == quals:
                 return
             if not self._options.get('override'):
-                raise api.FFIError(
+                raise FFIError(
                     "multiple declarations of %s (for interactive usage, "
                     "try cdef(xx, override=True))" % (name,))
         assert '__dotdotdot__' not in name.split()
@@ -551,7 +552,7 @@
                 if ident == 'void':
                     return model.void_type, quals
                 if ident == '__dotdotdot__':
-                    raise api.FFIError(':%d: bad usage of "..."' %
+                    raise FFIError(':%d: bad usage of "..."' %
                             typenode.coord.line)
                 tp0, quals0 = resolve_common_type(self, ident)
                 return tp0, (quals | quals0)
@@ -583,14 +584,14 @@
             return self._get_struct_union_enum_type('union', typenode, name,
                                                     nested=True), 0
         #
-        raise api.FFIError(":%d: bad or unsupported type declaration" %
+        raise FFIError(":%d: bad or unsupported type declaration" %
                 typenode.coord.line)
 
     def _parse_function_type(self, typenode, funcname=None):
         params = list(getattr(typenode.args, 'params', []))
         for i, arg in enumerate(params):
             if not hasattr(arg, 'type'):
-                raise api.CDefError("%s arg %d: unknown type '%s'"
+                raise CDefError("%s arg %d: unknown type '%s'"
                     " (if you meant to use the old C syntax of giving"
                     " untyped arguments, it is not supported)"
                     % (funcname or 'in expression', i + 1,
@@ -604,7 +605,7 @@
         if ellipsis:
             params.pop()
             if not params:
-                raise api.CDefError(
+                raise CDefError(
                     "%s: a function with only '(...)' as argument"
                     " is not correct C" % (funcname or 'in expression'))
         args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
@@ -705,7 +706,7 @@
             return tp
         #
         if tp.fldnames is not None:
-            raise api.CDefError("duplicate declaration of struct %s" % name)
+            raise CDefError("duplicate declaration of struct %s" % name)
         fldnames = []
         fldtypes = []
         fldbitsize = []
@@ -749,7 +750,7 @@
 
     def _make_partial(self, tp, nested):
         if not isinstance(tp, model.StructOrUnion):
-            raise api.CDefError("%s cannot be partial" % (tp,))
+            raise CDefError("%s cannot be partial" % (tp,))
         if not tp.has_c_name() and not nested:
             raise NotImplementedError("%s is partial but has no C name" %(tp,))
         tp.partial = True
@@ -769,7 +770,7 @@
                     len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
                 return ord(s[-2])
             else:
-                raise api.CDefError("invalid constant %r" % (s,))
+                raise CDefError("invalid constant %r" % (s,))
         #
         if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
                 exprnode.op == '+'):
@@ -788,12 +789,12 @@
             if partial_length_ok:
                 self._partial_length = True
                 return '...'
-            raise api.FFIError(":%d: unsupported '[...]' here, cannot derive "
-                               "the actual array length in this context"
-                               % exprnode.coord.line)
+            raise FFIError(":%d: unsupported '[...]' here, cannot derive "
+                           "the actual array length in this context"
+                           % exprnode.coord.line)
         #
-        raise api.FFIError(":%d: unsupported expression: expected a "
-                           "simple numeric constant" % exprnode.coord.line)
+        raise FFIError(":%d: unsupported expression: expected a "
+                       "simple numeric constant" % exprnode.coord.line)
 
     def _build_enum_type(self, explicit_name, decls):
         if decls is not None:
@@ -843,8 +844,8 @@
             for t in typenames[:-1]:
                 if t not in ['int', 'short', 'long', 'signed',
                              'unsigned', 'char']:
-                    raise api.FFIError(':%d: bad usage of "..."' %
-                                       decl.coord.line)
+                    raise FFIError(':%d: bad usage of "..."' %
+                                   decl.coord.line)
             result = model.UnknownIntegerType(decl.name)
 
         if self._uses_new_feature is None:
diff --git a/lib_pypy/cffi/error.py b/lib_pypy/cffi/error.py
new file mode 100644
--- /dev/null
+++ b/lib_pypy/cffi/error.py
@@ -0,0 +1,20 @@
+
+class FFIError(Exception):
+    pass
+
+class CDefError(Exception):
+    def __str__(self):
+        try:
+            line = 'line %d: ' % (self.args[1].coord.line,)
+        except (AttributeError, TypeError, IndexError):
+            line = ''
+        return '%s%s' % (line, self.args[0])
+
+class VerificationError(Exception):
+    """ An error raised when verification fails
+    """
+
+class VerificationMissing(Exception):
+    """ An error raised when incomplete structures are passed into
+    cdef, but no verification has been done
+    """
diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py
--- a/lib_pypy/cffi/ffiplatform.py
+++ b/lib_pypy/cffi/ffiplatform.py
@@ -1,14 +1,5 @@
 import sys, os
-
-
-class VerificationError(Exception):
-    """ An error raised when verification fails
-    """
-
-class VerificationMissing(Exception):
-    """ An error raised when incomplete structures are passed into
-    cdef, but no verification has been done
-    """
+from .error import VerificationError
 
 
 LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
diff --git a/lib_pypy/cffi/model.py b/lib_pypy/cffi/model.py
--- a/lib_pypy/cffi/model.py
+++ b/lib_pypy/cffi/model.py
@@ -1,8 +1,8 @@
-import types, sys
+import types
 import weakref
 
 from .lock import allocate_lock
-
+from .error import CDefError, VerificationError, VerificationMissing
 
 # type qualifiers
 Q_CONST    = 0x01
@@ -39,7 +39,6 @@
         replace_with = qualify(quals, replace_with)
         result = result.replace('&', replace_with)
         if '$' in result:
-            from .ffiplatform import VerificationError
             raise VerificationError(
                 "cannot generate '%s' in %s: unknown type name"
                 % (self._get_c_name(), context))
@@ -223,9 +222,8 @@
     is_raw_function = True
 
     def build_backend_type(self, ffi, finishlist):
-        from . import api
-        raise api.CDefError("cannot render the type %r: it is a function "
-                            "type, not a pointer-to-function type" % (self,))
+        raise CDefError("cannot render the type %r: it is a function "
+                        "type, not a pointer-to-function type" % (self,))
 
     def as_function_pointer(self):
         return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
@@ -307,9 +305,8 @@
 
     def build_backend_type(self, ffi, finishlist):
         if self.length == '...':
-            from . import api
-            raise api.CDefError("cannot render the type %r: unknown length" %
-                                (self,))
+            raise CDefError("cannot render the type %r: unknown length" %
+                            (self,))
         self.item.get_cached_btype(ffi, finishlist)   # force the item BType
         BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
         return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
@@ -455,13 +452,11 @@
         self.completed = 2
 
     def _verification_error(self, msg):
-        from .ffiplatform import VerificationError
         raise VerificationError(msg)
 
     def check_not_partial(self):
         if self.partial and self.fixedlayout is None:
-            from . import ffiplatform
-            raise ffiplatform.VerificationMissing(self._get_c_name())
+            raise VerificationMissing(self._get_c_name())
 
     def build_backend_type(self, ffi, finishlist):
         self.check_not_partial()
@@ -499,8 +494,7 @@
 
     def check_not_partial(self):
         if self.partial and not self.partial_resolved:
-            from . import ffiplatform
-            raise ffiplatform.VerificationMissing(self._get_c_name())
+            raise VerificationMissing(self._get_c_name())
 
     def build_backend_type(self, ffi, finishlist):
         self.check_not_partial()
@@ -514,7 +508,6 @@
         if self.baseinttype is not None:
             return self.baseinttype.get_cached_btype(ffi, finishlist)
         #
-        from . import api
         if self.enumvalues:
             smallest_value = min(self.enumvalues)
             largest_value = max(self.enumvalues)
@@ -549,8 +542,8 @@
         if (smallest_value >= ((-1) << (8*size2-1)) and
             largest_value < (1 << (8*size2-sign))):
             return btype2
-        raise api.CDefError("%s values don't all fit into either 'long' "
-                            "or 'unsigned long'" % self._get_c_name())
+        raise CDefError("%s values don't all fit into either 'long' "
+                        "or 'unsigned long'" % self._get_c_name())
 
 def unknown_type(name, structname=None):
     if structname is None:
diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py
--- a/lib_pypy/cffi/recompiler.py
+++ b/lib_pypy/cffi/recompiler.py
@@ -1,5 +1,6 @@
 import os, sys, io
 from . import ffiplatform, model
+from .error import VerificationError
 from .cffi_opcode import *
 
 VERSION = "0x2601"
@@ -211,7 +212,7 @@
                 method = getattr(self, '_generate_cpy_%s_%s' % (kind,
                                                                 step_name))
             except AttributeError:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "not implemented in recompile(): %r" % name)
             try:
                 self._current_quals = quals
@@ -354,12 +355,12 @@
                     included_module_name, included_source = (
                         ffi_to_include._assigned_source[:2])
                 except AttributeError:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "ffi object %r includes %r, but the latter has not "
                         "been prepared with set_source()" % (
                             self.ffi, ffi_to_include,))
                 if included_source is None:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "not implemented yet: ffi.include() of a Python-based "
                         "ffi inside a C-based ffi")
                 prnt('  "%s",' % (included_module_name,))
@@ -391,6 +392,10 @@
         prnt()
         #
         # the init function
+        prnt('#ifdef __GNUC__')
+        prnt('#  pragma GCC visibility push(default)  /* for -fvisibility= */')
+        prnt('#endif')
+        prnt()
         prnt('#ifdef PYPY_VERSION')
         prnt('PyMODINIT_FUNC')
         prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,))
@@ -429,6 +434,10 @@
             self.module_name, version))
         prnt('}')
         prnt('#endif')
+        prnt()
+        prnt('#ifdef __GNUC__')
+        prnt('#  pragma GCC visibility pop')
+        prnt('#endif')
 
     def _to_py(self, x):
         if isinstance(x, str):
@@ -456,12 +465,12 @@
                 included_module_name, included_source = (
                     ffi_to_include._assigned_source[:2])
             except AttributeError:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "ffi object %r includes %r, but the latter has not "
                     "been prepared with set_source()" % (
                         self.ffi, ffi_to_include,))
             if included_source is not None:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "not implemented yet: ffi.include() of a C-based "
                     "ffi inside a Python-based ffi")
             prnt('from %s import ffi as _ffi%d' % (included_module_name, i))
@@ -831,7 +840,7 @@
                 prnt('  { %s = &p->%s; (void)tmp; }' % (
                     ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
                     fname))
-            except ffiplatform.VerificationError as e:
+            except VerificationError as e:
                 prnt('  /* %s */' % str(e))   # cannot verify it, ignore
         prnt('}')
         prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname))
@@ -994,7 +1003,7 @@
     def _generate_cpy_const(self, is_int, name, tp=None, category='const',
                             check_value=None):
         if (category, name) in self._seen_constants:
-            raise ffiplatform.VerificationError(
+            raise VerificationError(
                 "duplicate declaration of %s '%s'" % (category, name))
         self._seen_constants.add((category, name))
         #
@@ -1093,7 +1102,7 @@
     def _generate_cpy_macro_ctx(self, tp, name):
         if tp == '...':
             if self.target_is_python:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "cannot use the syntax '...' in '#define %s ...' when "
                     "using the ABI mode" % (name,))
             check_value = None
@@ -1226,7 +1235,7 @@
 
     def _generate_cpy_extern_python_ctx(self, tp, name):
         if self.target_is_python:
-            raise ffiplatform.VerificationError(
+            raise VerificationError(
                 "cannot use 'extern \"Python\"' in the ABI mode")
         if tp.ellipsis:
             raise NotImplementedError("a vararg function is extern \"Python\"")
@@ -1307,7 +1316,7 @@
         if tp.length is None:
             self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index)
         elif tp.length == '...':
-            raise ffiplatform.VerificationError(
+            raise VerificationError(
                 "type %s badly placed: the '...' array length can only be "
                 "used on global arrays or on fields of structures" % (
                     str(tp).replace('/*...*/', '...'),))
diff --git a/lib_pypy/cffi/setuptools_ext.py b/lib_pypy/cffi/setuptools_ext.py
--- a/lib_pypy/cffi/setuptools_ext.py
+++ b/lib_pypy/cffi/setuptools_ext.py
@@ -79,9 +79,10 @@
     CPython itself should ignore the flag in a debugging version
     (by not listing .abi3.so in the extensions it supports), but
     it doesn't so far, creating troubles.  That's why we check
-    for "not sys.flags.debug". (http://bugs.python.org/issue28401)
+    for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
+    of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
     """
-    if 'py_limited_api' not in kwds and not sys.flags.debug:
+    if 'py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount'):
         import setuptools
         try:
             setuptools_major_version = int(setuptools.__version__.partition('.')[0])
diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py
--- a/lib_pypy/cffi/vengine_cpy.py
+++ b/lib_pypy/cffi/vengine_cpy.py
@@ -2,7 +2,8 @@
 # DEPRECATED: implementation for ffi.verify()
 #
 import sys, imp
-from . import model, ffiplatform
+from . import model
+from .error import VerificationError
 
 
 class VCPythonEngine(object):
@@ -155,7 +156,7 @@
                                           self.verifier.modulefilename)
             except ImportError as e:
                 error = "importing %r: %s" % (self.verifier.modulefilename, e)
-                raise ffiplatform.VerificationError(error)
+                raise VerificationError(error)
             finally:
                 if hasattr(sys, "setdlopenflags"):
                     sys.setdlopenflags(previous_flags)
@@ -185,7 +186,7 @@
             def __dir__(self):
                 return FFILibrary._cffi_dir + list(self.__dict__)
         library = FFILibrary()
-        if module._cffi_setup(lst, ffiplatform.VerificationError, library):
+        if module._cffi_setup(lst, VerificationError, library):
             import warnings
             warnings.warn("reimporting %r might overwrite older definitions"
                           % (self.verifier.get_module_name()))
@@ -212,7 +213,7 @@
                 method = getattr(self, '_generate_cpy_%s_%s' % (kind,
                                                                 step_name))
             except AttributeError:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "not implemented in verify(): %r" % name)
             try:
                 method(tp, realname)
@@ -485,7 +486,7 @@
                     prnt('  { %s = &p->%s; (void)tmp; }' % (
                         ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
                         fname))
-                except ffiplatform.VerificationError as e:
+                except VerificationError as e:
                     prnt('  /* %s */' % str(e))   # cannot verify it, ignore
         prnt('}')
         prnt('static PyObject *')
@@ -550,7 +551,7 @@
             # check that the layout sizes and offsets match the real ones
             def check(realvalue, expectedvalue, msg):
                 if realvalue != expectedvalue:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "%s (we have %d, but C compiler says %d)"
                         % (msg, expectedvalue, realvalue))
             ffi = self.ffi
@@ -771,7 +772,7 @@
                 BItemType = self.ffi._get_cached_btype(tp.item)
                 length, rest = divmod(size, self.ffi.sizeof(BItemType))
                 if rest != 0:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "bad size: %r does not seem to be an array of %s" %
                         (name, tp.item))
                 tp = tp.resolve_length(length)
diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py
--- a/lib_pypy/cffi/vengine_gen.py
+++ b/lib_pypy/cffi/vengine_gen.py
@@ -4,7 +4,8 @@
 import sys, os
 import types
 
-from . import model, ffiplatform
+from . import model
+from .error import VerificationError
 
 
 class VGenericEngine(object):
@@ -102,7 +103,7 @@
                 method = getattr(self, '_generate_gen_%s_%s' % (kind,
                                                                 step_name))
             except AttributeError:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "not implemented in verify(): %r" % name)
             try:
                 method(tp, realname)
@@ -281,7 +282,7 @@
                     prnt('  { %s = &p->%s; (void)tmp; }' % (
                         ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
                         fname))
-                except ffiplatform.VerificationError as e:
+                except VerificationError as e:
                     prnt('  /* %s */' % str(e))   # cannot verify it, ignore
         prnt('}')
         self.export_symbols.append(layoutfuncname)
@@ -344,7 +345,7 @@
             # check that the layout sizes and offsets match the real ones
             def check(realvalue, expectedvalue, msg):
                 if realvalue != expectedvalue:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "%s (we have %d, but C compiler says %d)"
                         % (msg, expectedvalue, realvalue))
             ffi = self.ffi
@@ -498,7 +499,7 @@
             error = self.ffi.string(p)
             if sys.version_info >= (3,):
                 error = str(error, 'utf-8')
-            raise ffiplatform.VerificationError(error)
+            raise VerificationError(error)
 
     def _enum_funcname(self, prefix, name):
         # "$enum_$1" => "___D_enum____D_1"
@@ -591,7 +592,7 @@
                 BItemType = self.ffi._get_cached_btype(tp.item)
                 length, rest = divmod(size, self.ffi.sizeof(BItemType))
                 if rest != 0:
-                    raise ffiplatform.VerificationError(
+                    raise VerificationError(
                         "bad size: %r does not seem to be an array of %s" %
                         (name, tp.item))
                 tp = tp.resolve_length(length)
diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py
--- a/lib_pypy/cffi/verifier.py
+++ b/lib_pypy/cffi/verifier.py
@@ -4,6 +4,7 @@
 import sys, os, binascii, shutil, io
 from . import __version_verifier_modules__
 from . import ffiplatform
+from .error import VerificationError
 
 if sys.version_info >= (3, 3):
     import importlib.machinery
@@ -42,7 +43,7 @@
                  ext_package=None, tag='', force_generic_engine=False,
                  source_extension='.c', flags=None, relative_to=None, **kwds):
         if ffi._parser._uses_new_feature:
-            raise ffiplatform.VerificationError(
+            raise VerificationError(
                 "feature not supported with ffi.verify(), but only "
                 "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
         self.ffi = ffi
@@ -83,7 +84,7 @@
         which can be tweaked beforehand."""
         with self.ffi._lock:
             if self._has_source and file is None:
-                raise ffiplatform.VerificationError(
+                raise VerificationError(
                     "source code already written")
             self._write_source(file)
 
@@ -92,7 +93,7 @@
         This produces a dynamic link library in 'self.modulefilename'."""
         with self.ffi._lock:
             if self._has_module:
-                raise ffiplatform.VerificationError("module already compiled")
+                raise VerificationError("module already compiled")
             if not self._has_source:
                 self._write_source()
             self._compile_module()
diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py
--- a/pypy/interpreter/astcompiler/ast.py
+++ b/pypy/interpreter/astcompiler/ast.py
@@ -31,6 +31,7 @@
 
 class AST(object):
     __metaclass__ = extendabletype
+    _attrs_ = ['lineno', 'col_offset']
 
     def walkabout(self, visitor):
         raise AssertionError("walkabout() implementation not provided")
@@ -138,7 +139,7 @@
         self.w_AST = space.gettypeobject(W_AST.typedef)
         for (name, base, fields, attributes) in self.AST_TYPES:
             self.make_new_type(space, name, base, fields, attributes)
-        
+
     def make_new_type(self, space, name, base, fields, attributes):
         w_base = getattr(self, 'w_%s' % base)
         w_dict = space.newdict()
@@ -150,7 +151,7 @@
             space.setitem_str(w_dict, "_attributes",
                               space.newtuple([space.wrap(a) for a in attributes]))
         w_type = space.call_function(
-            space.w_type, 
+            space.w_type,
             space.wrap(name), space.newtuple([w_base]), w_dict)
         setattr(self, 'w_%s' % name, w_type)
 
@@ -184,7 +185,9 @@
 
     def mutate_over(self, visitor):
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         return visitor.visit_Module(self)
 
     def to_object(self, space):
@@ -217,7 +220,9 @@
 
     def mutate_over(self, visitor):
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         return visitor.visit_Interactive(self)
 
     def to_object(self, space):
@@ -279,7 +284,9 @@
 
     def mutate_over(self, visitor):
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         return visitor.visit_Suite(self)
 
     def to_object(self, space):
@@ -380,9 +387,13 @@
     def mutate_over(self, visitor):
         self.args = self.args.mutate_over(visitor)
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         if self.decorator_list:
-            visitor._mutate_sequence(self.decorator_list)
+            for i in range(len(self.decorator_list)):
+                if self.decorator_list[i] is not None:
+                    self.decorator_list[i] = self.decorator_list[i].mutate_over(visitor)
         if self.returns:
             self.returns = self.returns.mutate_over(visitor)
         return visitor.visit_FunctionDef(self)
@@ -456,9 +467,13 @@
     def mutate_over(self, visitor):
         self.args = self.args.mutate_over(visitor)
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         if self.decorator_list:
-            visitor._mutate_sequence(self.decorator_list)
+            for i in range(len(self.decorator_list)):
+                if self.decorator_list[i] is not None:
+                    self.decorator_list[i] = self.decorator_list[i].mutate_over(visitor)
         if self.returns:
             self.returns = self.returns.mutate_over(visitor)
         return visitor.visit_AsyncFunctionDef(self)
@@ -531,13 +546,21 @@
 
     def mutate_over(self, visitor):
         if self.bases:
-            visitor._mutate_sequence(self.bases)
+            for i in range(len(self.bases)):
+                if self.bases[i] is not None:
+                    self.bases[i] = self.bases[i].mutate_over(visitor)
         if self.keywords:
-            visitor._mutate_sequence(self.keywords)
+            for i in range(len(self.keywords)):
+                if self.keywords[i] is not None:
+                    self.keywords[i] = self.keywords[i].mutate_over(visitor)
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         if self.decorator_list:
-            visitor._mutate_sequence(self.decorator_list)
+            for i in range(len(self.decorator_list)):
+                if self.decorator_list[i] is not None:
+                    self.decorator_list[i] = self.decorator_list[i].mutate_over(visitor)
         return visitor.visit_ClassDef(self)
 
     def to_object(self, space):
@@ -649,7 +672,9 @@
 
     def mutate_over(self, visitor):
         if self.targets:
-            visitor._mutate_sequence(self.targets)
+            for i in range(len(self.targets)):
+                if self.targets[i] is not None:
+                    self.targets[i] = self.targets[i].mutate_over(visitor)
         return visitor.visit_Delete(self)
 
     def to_object(self, space):
@@ -692,7 +717,9 @@
 
     def mutate_over(self, visitor):
         if self.targets:
-            visitor._mutate_sequence(self.targets)
+            for i in range(len(self.targets)):
+                if self.targets[i] is not None:
+                    self.targets[i] = self.targets[i].mutate_over(visitor)
         self.value = self.value.mutate_over(visitor)
         return visitor.visit_Assign(self)
 
@@ -799,9 +826,13 @@
         self.target = self.target.mutate_over(visitor)
         self.iter = self.iter.mutate_over(visitor)
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         if self.orelse:
-            visitor._mutate_sequence(self.orelse)
+            for i in range(len(self.orelse)):
+                if self.orelse[i] is not None:
+                    self.orelse[i] = self.orelse[i].mutate_over(visitor)
         return visitor.visit_For(self)
 
     def to_object(self, space):
@@ -869,9 +900,13 @@
         self.target = self.target.mutate_over(visitor)
         self.iter = self.iter.mutate_over(visitor)
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         if self.orelse:
-            visitor._mutate_sequence(self.orelse)
+            for i in range(len(self.orelse)):
+                if self.orelse[i] is not None:
+                    self.orelse[i] = self.orelse[i].mutate_over(visitor)
         return visitor.visit_AsyncFor(self)
 
     def to_object(self, space):
@@ -937,9 +972,13 @@
     def mutate_over(self, visitor):
         self.test = self.test.mutate_over(visitor)
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         if self.orelse:
-            visitor._mutate_sequence(self.orelse)
+            for i in range(len(self.orelse)):
+                if self.orelse[i] is not None:
+                    self.orelse[i] = self.orelse[i].mutate_over(visitor)
         return visitor.visit_While(self)
 
     def to_object(self, space):
@@ -999,9 +1038,13 @@
     def mutate_over(self, visitor):
         self.test = self.test.mutate_over(visitor)
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         if self.orelse:
-            visitor._mutate_sequence(self.orelse)
+            for i in range(len(self.orelse)):
+                if self.orelse[i] is not None:
+                    self.orelse[i] = self.orelse[i].mutate_over(visitor)
         return visitor.visit_If(self)
 
     def to_object(self, space):
@@ -1059,9 +1102,13 @@
 
     def mutate_over(self, visitor):
         if self.items:
-            visitor._mutate_sequence(self.items)
+            for i in range(len(self.items)):
+                if self.items[i] is not None:
+                    self.items[i] = self.items[i].mutate_over(visitor)
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         return visitor.visit_With(self)
 
     def to_object(self, space):
@@ -1113,9 +1160,13 @@
 
     def mutate_over(self, visitor):
         if self.items:
-            visitor._mutate_sequence(self.items)
+            for i in range(len(self.items)):
+                if self.items[i] is not None:
+                    self.items[i] = self.items[i].mutate_over(visitor)
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         return visitor.visit_AsyncWith(self)
 
     def to_object(self, space):
@@ -1213,13 +1264,21 @@
 
     def mutate_over(self, visitor):
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         if self.handlers:
-            visitor._mutate_sequence(self.handlers)
+            for i in range(len(self.handlers)):
+                if self.handlers[i] is not None:
+                    self.handlers[i] = self.handlers[i].mutate_over(visitor)
         if self.orelse:
-            visitor._mutate_sequence(self.orelse)
+            for i in range(len(self.orelse)):
+                if self.orelse[i] is not None:
+                    self.orelse[i] = self.orelse[i].mutate_over(visitor)
         if self.finalbody:
-            visitor._mutate_sequence(self.finalbody)
+            for i in range(len(self.finalbody)):
+                if self.finalbody[i] is not None:
+                    self.finalbody[i] = self.finalbody[i].mutate_over(visitor)
         return visitor.visit_Try(self)
 
     def to_object(self, space):
@@ -1333,7 +1392,9 @@
 
     def mutate_over(self, visitor):
         if self.names:
-            visitor._mutate_sequence(self.names)
+            for i in range(len(self.names)):
+                if self.names[i] is not None:
+                    self.names[i] = self.names[i].mutate_over(visitor)
         return visitor.visit_Import(self)
 
     def to_object(self, space):
@@ -1377,7 +1438,9 @@
 
     def mutate_over(self, visitor):
         if self.names:
-            visitor._mutate_sequence(self.names)
+            for i in range(len(self.names)):
+                if self.names[i] is not None:
+                    self.names[i] = self.names[i].mutate_over(visitor)
         return visitor.visit_ImportFrom(self)
 
     def to_object(self, space):
@@ -1710,7 +1773,9 @@
 
     def mutate_over(self, visitor):
         if self.values:
-            visitor._mutate_sequence(self.values)
+            for i in range(len(self.values)):
+                if self.values[i] is not None:
+                    self.values[i] = self.values[i].mutate_over(visitor)
         return visitor.visit_BoolOp(self)
 
     def to_object(self, space):
@@ -1957,9 +2022,13 @@
 
     def mutate_over(self, visitor):
         if self.keys:
-            visitor._mutate_sequence(self.keys)
+            for i in range(len(self.keys)):
+                if self.keys[i] is not None:
+                    self.keys[i] = self.keys[i].mutate_over(visitor)
         if self.values:
-            visitor._mutate_sequence(self.values)
+            for i in range(len(self.values)):
+                if self.values[i] is not None:
+                    self.values[i] = self.values[i].mutate_over(visitor)
         return visitor.visit_Dict(self)
 
     def to_object(self, space):
@@ -2010,7 +2079,9 @@
 
     def mutate_over(self, visitor):
         if self.elts:
-            visitor._mutate_sequence(self.elts)
+            for i in range(len(self.elts)):
+                if self.elts[i] is not None:
+                    self.elts[i] = self.elts[i].mutate_over(visitor)
         return visitor.visit_Set(self)
 
     def to_object(self, space):
@@ -2054,7 +2125,9 @@
     def mutate_over(self, visitor):
         self.elt = self.elt.mutate_over(visitor)
         if self.generators:
-            visitor._mutate_sequence(self.generators)
+            for i in range(len(self.generators)):
+                if self.generators[i] is not None:
+                    self.generators[i] = self.generators[i].mutate_over(visitor)
         return visitor.visit_ListComp(self)
 
     def to_object(self, space):
@@ -2104,7 +2177,9 @@
     def mutate_over(self, visitor):
         self.elt = self.elt.mutate_over(visitor)
         if self.generators:
-            visitor._mutate_sequence(self.generators)
+            for i in range(len(self.generators)):
+                if self.generators[i] is not None:
+                    self.generators[i] = self.generators[i].mutate_over(visitor)
         return visitor.visit_SetComp(self)
 
     def to_object(self, space):
@@ -2156,7 +2231,9 @@
         self.key = self.key.mutate_over(visitor)
         self.value = self.value.mutate_over(visitor)
         if self.generators:
-            visitor._mutate_sequence(self.generators)
+            for i in range(len(self.generators)):
+                if self.generators[i] is not None:
+                    self.generators[i] = self.generators[i].mutate_over(visitor)
         return visitor.visit_DictComp(self)
 
     def to_object(self, space):
@@ -2212,7 +2289,9 @@
     def mutate_over(self, visitor):
         self.elt = self.elt.mutate_over(visitor)
         if self.generators:
-            visitor._mutate_sequence(self.generators)
+            for i in range(len(self.generators)):
+                if self.generators[i] is not None:
+                    self.generators[i] = self.generators[i].mutate_over(visitor)
         return visitor.visit_GeneratorExp(self)
 
     def to_object(self, space):
@@ -2376,7 +2455,9 @@
     def mutate_over(self, visitor):
         self.left = self.left.mutate_over(visitor)
         if self.comparators:
-            visitor._mutate_sequence(self.comparators)
+            for i in range(len(self.comparators)):
+                if self.comparators[i] is not None:
+                    self.comparators[i] = self.comparators[i].mutate_over(visitor)
         return visitor.visit_Compare(self)
 
     def to_object(self, space):
@@ -2436,9 +2517,13 @@
     def mutate_over(self, visitor):
         self.func = self.func.mutate_over(visitor)
         if self.args:
-            visitor._mutate_sequence(self.args)
+            for i in range(len(self.args)):
+                if self.args[i] is not None:
+                    self.args[i] = self.args[i].mutate_over(visitor)
         if self.keywords:
-            visitor._mutate_sequence(self.keywords)
+            for i in range(len(self.keywords)):
+                if self.keywords[i] is not None:
+                    self.keywords[i] = self.keywords[i].mutate_over(visitor)
         return visitor.visit_Call(self)
 
     def to_object(self, space):
@@ -2689,8 +2774,8 @@
 
 class NameConstant(expr):
 
-    def __init__(self, single, lineno, col_offset):
-        self.single = single
+    def __init__(self, value, lineno, col_offset):
+        self.value = value
         expr.__init__(self, lineno, col_offset)
 
     def walkabout(self, visitor):
@@ -2701,8 +2786,8 @@
 
     def to_object(self, space):
         w_node = space.call_function(get(space).w_NameConstant)
-        w_single = self.single  # singleton
-        space.setattr(w_node, space.wrap('single'), w_single)
+        w_value = self.value  # singleton
+        space.setattr(w_node, space.wrap('value'), w_value)
         w_lineno = space.wrap(self.lineno)  # int
         space.setattr(w_node, space.wrap('lineno'), w_lineno)
         w_col_offset = space.wrap(self.col_offset)  # int
@@ -2711,17 +2796,17 @@
 
     @staticmethod
     def from_object(space, w_node):
-        w_single = get_field(space, w_node, 'single', False)
+        w_value = get_field(space, w_node, 'value', False)
         w_lineno = get_field(space, w_node, 'lineno', False)
         w_col_offset = get_field(space, w_node, 'col_offset', False)
-        _single = w_single
-        if _single is None:
-            raise_required_value(space, w_node, 'single')
+        _value = w_value
+        if _value is None:
+            raise_required_value(space, w_node, 'value')
         _lineno = space.int_w(w_lineno)
         _col_offset = space.int_w(w_col_offset)
-        return NameConstant(_single, _lineno, _col_offset)
-
-State.ast_type('NameConstant', 'expr', ['single'])
+        return NameConstant(_value, _lineno, _col_offset)
+
+State.ast_type('NameConstant', 'expr', ['value'])
 
 
 class Ellipsis(expr):
@@ -2960,7 +3045,9 @@
 
     def mutate_over(self, visitor):
         if self.elts:
-            visitor._mutate_sequence(self.elts)
+            for i in range(len(self.elts)):
+                if self.elts[i] is not None:
+                    self.elts[i] = self.elts[i].mutate_over(visitor)
         return visitor.visit_List(self)
 
     def to_object(self, space):
@@ -3009,7 +3096,9 @@
 
     def mutate_over(self, visitor):
         if self.elts:
-            visitor._mutate_sequence(self.elts)
+            for i in range(len(self.elts)):
+                if self.elts[i] is not None:
+                    self.elts[i] = self.elts[i].mutate_over(visitor)
         return visitor.visit_Tuple(self)
 
     def to_object(self, space):
@@ -3215,7 +3304,9 @@
 
     def mutate_over(self, visitor):
         if self.dims:
-            visitor._mutate_sequence(self.dims)
+            for i in range(len(self.dims)):
+                if self.dims[i] is not None:
+                    self.dims[i] = self.dims[i].mutate_over(visitor)
         return visitor.visit_ExtSlice(self)
 
     def to_object(self, space):
@@ -3583,7 +3674,9 @@
         self.target = self.target.mutate_over(visitor)
         self.iter = self.iter.mutate_over(visitor)
         if self.ifs:
-            visitor._mutate_sequence(self.ifs)
+            for i in range(len(self.ifs)):
+                if self.ifs[i] is not None:
+                    self.ifs[i] = self.ifs[i].mutate_over(visitor)
         return visitor.visit_comprehension(self)
 
     def walkabout(self, visitor):
@@ -3651,7 +3744,9 @@
         if self.type:
             self.type = self.type.mutate_over(visitor)
         if self.body:
-            visitor._mutate_sequence(self.body)
+            for i in range(len(self.body)):
+                if self.body[i] is not None:
+                    self.body[i] = self.body[i].mutate_over(visitor)
         return visitor.visit_ExceptHandler(self)
 
     def to_object(self, space):
@@ -3702,17 +3797,25 @@
 
     def mutate_over(self, visitor):
         if self.args:
-            visitor._mutate_sequence(self.args)
+            for i in range(len(self.args)):
+                if self.args[i] is not None:
+                    self.args[i] = self.args[i].mutate_over(visitor)
         if self.vararg:
             self.vararg = self.vararg.mutate_over(visitor)
         if self.kwonlyargs:
-            visitor._mutate_sequence(self.kwonlyargs)
+            for i in range(len(self.kwonlyargs)):
+                if self.kwonlyargs[i] is not None:
+                    self.kwonlyargs[i] = self.kwonlyargs[i].mutate_over(visitor)
         if self.kw_defaults:
-            visitor._mutate_sequence(self.kw_defaults)
+            for i in range(len(self.kw_defaults)):
+                if self.kw_defaults[i] is not None:
+                    self.kw_defaults[i] = self.kw_defaults[i].mutate_over(visitor)
         if self.kwarg:
             self.kwarg = self.kwarg.mutate_over(visitor)
         if self.defaults:
-            visitor._mutate_sequence(self.defaults)
+            for i in range(len(self.defaults)):
+                if self.defaults[i] is not None:
+                    self.defaults[i] = self.defaults[i].mutate_over(visitor)
         return visitor.visit_arguments(self)
 
     def walkabout(self, visitor):
@@ -3923,11 +4026,6 @@
     def default_visitor(self, node):
         raise NodeVisitorNotImplemented
 
-    def _mutate_sequence(self, seq):
-        for i in range(len(seq)):
-            if seq[i] is not None:
-                seq[i] = seq[i].mutate_over(self)
-
     def visit_Module(self, node):
         return self.default_visitor(node)
     def visit_Interactive(self, node):
diff --git a/pypy/interpreter/astcompiler/codegen.py b/pypy/interpreter/astcompiler/codegen.py
--- a/pypy/interpreter/astcompiler/codegen.py
+++ b/pypy/interpreter/astcompiler/codegen.py
@@ -8,6 +8,7 @@
 # please.
 import struct
 
+from rpython.rlib.objectmodel import specialize
 from pypy.interpreter.astcompiler import ast, assemble, symtable, consts, misc
 from pypy.interpreter.astcompiler import optimize # For side effects
 from pypy.interpreter.pyparser.error import SyntaxError
@@ -337,6 +338,7 @@
         for i, default in enumerate(args.kw_defaults):
             if default:
                 kwonly = args.kwonlyargs[i]
+                assert isinstance(kwonly, ast.arg)
                 mangled = self.scope.mangle(kwonly.arg).decode('utf-8')
                 self.load_const(self.space.wrap(mangled))
                 default.walkabout(self)
@@ -351,16 +353,20 @@
     def _visit_arg_annotations(self, args, names):
         if args:
             for arg in args:
+                assert isinstance(arg, ast.arg)
                 self._visit_arg_annotation(arg.arg, arg.annotation, names)
 
+    @specialize.argtype(1)
     def _visit_annotations(self, func, args, returns):
         space = self.space
         names = []
         self._visit_arg_annotations(args.args, names)
-        if args.vararg:
-            self._visit_arg_annotation(args.vararg.arg, args.vararg.annotation,
+        vararg = args.vararg
+        if vararg:
+            self._visit_arg_annotation(vararg.arg, vararg.annotation,
                                        names)
         self._visit_arg_annotations(args.kwonlyargs, names)
+        kwarg = args.kwarg
         if args.kwarg:
             self._visit_arg_annotation(args.kwarg.arg, args.kwarg.annotation,
                                        names)
@@ -375,6 +381,7 @@
             l += 1
         return l
 
+    @specialize.arg(2)
     def _visit_function(self, func, function_code_generator):
         self.update_position(func.lineno, True)
         # Load decorators first, but apply them after the function is created.
@@ -923,10 +930,12 @@
         self.update_position(wih.lineno, True)
         self.handle_withitem(wih, 0, is_async=False)
 
+    @specialize.argtype(1)
     def handle_withitem(self, wih, pos, is_async):
         body_block = self.new_block()
         cleanup = self.new_block()
         witem = wih.items[pos]
+        assert isinstance(witem, ast.withitem)
         witem.context_expr.walkabout(self)
         if not is_async:
             self.emit_jump(ops.SETUP_WITH, cleanup)
@@ -1237,7 +1246,7 @@
 
     def visit_NameConstant(self, node):
         self.update_position(node.lineno)
-        self.load_const(node.single)
+        self.load_const(node.value)
 
     def visit_keyword(self, keyword):
         if keyword.arg is not None:
@@ -1288,6 +1297,7 @@
         nseen = 0 # the number of keyword arguments on the stack following
         if keywords is not None:
             for kw in keywords:
+                assert isinstance(kw, ast.keyword)
                 if kw.arg is None:
                     # A keyword argument unpacking.
                     if nseen:
@@ -1345,6 +1355,7 @@
                     return False
         if call.keywords is not None:
             for kw in call.keywords:
+                assert isinstance(kw, ast.keyword)
                 if kw.arg is None:
                     return False
         return True
diff --git a/pypy/interpreter/astcompiler/fstring.py b/pypy/interpreter/astcompiler/fstring.py
--- a/pypy/interpreter/astcompiler/fstring.py
+++ b/pypy/interpreter/astcompiler/fstring.py
@@ -343,11 +343,16 @@
                 parse_f_string(astbuilder, joined_pieces, w_next, atom_node)
 
     except error.OperationError as e:
-        if not (e.match(space, space.w_UnicodeError) or
-                e.match(space, space.w_ValueError)):
+        if e.match(space, space.w_UnicodeError):
+            kind = 'unicode error'
+        elif e.match(space, space.w_ValueError):
+            kind = 'value error'
+        else:
             raise
         # Unicode/ValueError in literal: turn into SyntaxError
-        raise astbuilder.error(e.errorstr(space), atom_node)
+        e.normalize_exception(space)
+        errmsg = space.str_w(space.str(e.get_w_value(space)))
+        raise self.error('(%s) %s' % (kind, errmsg), atom_node)
 
     if len(joined_pieces) == 1:   # <= the common path
         return joined_pieces[0]   # ast.Str, Bytes or FormattedValue
diff --git a/pypy/interpreter/astcompiler/optimize.py b/pypy/interpreter/astcompiler/optimize.py
--- a/pypy/interpreter/astcompiler/optimize.py
+++ b/pypy/interpreter/astcompiler/optimize.py
@@ -6,6 +6,7 @@
 from pypy.interpreter.error import OperationError
 from rpython.rlib.unroll import unrolling_iterable
 from rpython.rlib.runicode import MAXUNICODE
+from rpython.rlib.objectmodel import specialize
 
 
 def optimize_ast(space, tree, compile_info):
@@ -70,7 +71,7 @@
 class __extend__(ast.NameConstant):
 
     def as_constant(self):
-        return self.single
+        return self.value
 
 class __extend__(ast.Index):
     def as_constant(self):
@@ -177,6 +178,7 @@
         self.space = space
         self.compile_info = compile_info
 
+    @specialize.argtype(1)
     def default_visitor(self, node):
         return node
 
diff --git a/pypy/interpreter/astcompiler/symtable.py b/pypy/interpreter/astcompiler/symtable.py
--- a/pypy/interpreter/astcompiler/symtable.py
+++ b/pypy/interpreter/astcompiler/symtable.py
@@ -89,12 +89,12 @@
         """Called when a yield is found."""
         raise SyntaxError("'yield' outside function", yield_node.lineno,
                           yield_node.col_offset)
-    
+
     def note_yieldFrom(self, yieldFrom_node):
         """Called when a yield from is found."""
         raise SyntaxError("'yield' outside function", yieldFrom_node.lineno,
                           yieldFrom_node.col_offset)
-    
+
     def note_await(self, await_node):
         """Called when await is found."""
         raise SyntaxError("'await' outside function", await_node.lineno,
@@ -260,12 +260,12 @@
         self.is_generator = True
         if self._in_try_body_depth > 0:
             self.has_yield_inside_try = True
-    
+
     def note_yieldFrom(self, yield_node):
         self.is_generator = True
         if self._in_try_body_depth > 0:
             self.has_yield_inside_try = True
-            
+
     def note_await(self, await_node):
         if self.name == '<genexpr>':
             msg = "'await' expressions in comprehensions are not supported"
@@ -315,7 +315,7 @@
     def note_yieldFrom(self, yield_node):
         raise SyntaxError("'yield from' inside async function", yield_node.lineno,
                           yield_node.col_offset)
-        
+
     def note_await(self, await_node):
         # Compatibility with CPython 3.5: set the CO_GENERATOR flag in
         # addition to the CO_COROUTINE flag if the function uses the
@@ -414,7 +414,7 @@
         func.args.walkabout(self)
         self.visit_sequence(func.body)
         self.pop_scope()
-    
+
     def visit_AsyncFunctionDef(self, func):
         self.note_symbol(func.name, SYM_ASSIGNED)
         # Function defaults and decorators happen in the outer scope.
@@ -429,7 +429,7 @@
         func.args.walkabout(self)
         self.visit_sequence(func.body)
         self.pop_scope()
-    
+
     def visit_Await(self, aw):
         self.scope.note_await(aw)
         ast.GenericASTVisitor.visit_Await(self, aw)
@@ -572,7 +572,7 @@
         witem.context_expr.walkabout(self)
         if witem.optional_vars:
             witem.optional_vars.walkabout(self)
-    
+
     def visit_AsyncWith(self, aw):
         self.scope.new_temporary_name()
         self.visit_sequence(aw.items)
@@ -595,8 +595,9 @@
             scope.note_keywords_arg(arguments.kwarg)
 
     def _handle_params(self, params, is_toplevel):
-        for i in range(len(params)):
-            arg = params[i].arg
+        for param in params:
+            assert isinstance(param, ast.arg)
+            arg = param.arg
             self.note_symbol(arg, SYM_PARAM)
 
     def _visit_annotations(self, func):
@@ -611,6 +612,7 @@
 
     def _visit_arg_annotations(self, args):
         for arg in args:
+            assert isinstance(arg, ast.arg)
             if arg.annotation:
                 arg.annotation.walkabout(self)
 
diff --git a/pypy/interpreter/astcompiler/test/test_astbuilder.py b/pypy/interpreter/astcompiler/test/test_astbuilder.py
--- a/pypy/interpreter/astcompiler/test/test_astbuilder.py
+++ b/pypy/interpreter/astcompiler/test/test_astbuilder.py
@@ -1398,3 +1398,9 @@
         assert len(asyncwith.body) == 1
         assert isinstance(asyncwith.body[0], ast.Expr)
         assert isinstance(asyncwith.body[0].value, ast.Num)
+
+    def test_decode_error_in_string_literal(self):
+        input = "u'\\x'"
+        exc = py.test.raises(SyntaxError, self.get_ast, input).value
+        assert exc.msg == ("(unicode error) 'unicodeescape' codec can't decode"
+                           " bytes in position 0-1: truncated \\xXX escape")
diff --git a/pypy/interpreter/astcompiler/tools/Python.asdl b/pypy/interpreter/astcompiler/tools/Python.asdl
--- a/pypy/interpreter/astcompiler/tools/Python.asdl
+++ b/pypy/interpreter/astcompiler/tools/Python.asdl
@@ -73,8 +73,7 @@
          | FormattedValue(expr value, int? conversion, expr? format_spec)
          | JoinedStr(expr* values)
          | Bytes(bytes s)
-         -- PyPy mod. first argument name must not be value
-         | NameConstant(singleton single)
+         | NameConstant(singleton value)
          | Ellipsis
 
          -- the following expression can appear in assignment context
diff --git a/pypy/interpreter/astcompiler/tools/asdl.py b/pypy/interpreter/astcompiler/tools/asdl.py
--- a/pypy/interpreter/astcompiler/tools/asdl.py
+++ b/pypy/interpreter/astcompiler/tools/asdl.py
@@ -33,7 +33,8 @@
 # See the EBNF at the top of the file to understand the logical connection
 # between the various node types.
 
-builtin_types = {'identifier', 'string', 'bytes', 'int', 'object', 'singleton'}
+builtin_types = {'identifier', 'string', 'bytes', 'int', 'bool', 'object',
+                 'singleton'}
 
 class AST:
     def __repr__(self):
diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py
--- a/pypy/interpreter/astcompiler/tools/asdl_py.py
+++ b/pypy/interpreter/astcompiler/tools/asdl_py.py
@@ -145,7 +145,7 @@
             if allow_none:
                 wrapper += " if %s is not None else space.w_None" % (value,)
             return wrapper
-        
+
     def get_value_extractor(self, field, value):
         if field.type in self.data.simple_types:
             return "%s.from_object(space, %s)" % (field.type, value)
@@ -207,7 +207,7 @@
                 lines.append("if _%s is None:" % (field.name,))
                 lines.append("    raise_required_value(space, w_node, '%s')"
                              % (field.name,))
-            
+
         return lines
 
     def make_converters(self, fields, name, extras=None):
@@ -245,7 +245,7 @@
             if extras:
                 base_args = ", ".join(str(field.name) for field in extras)
                 self.emit("%s.__init__(self, %s)" % (base, base_args), 2)
-    
+
     def make_mutate_over(self, cons, name):
         self.emit("def mutate_over(self, visitor):", 1)
         for field in cons.fields:
@@ -257,12 +257,19 @@
                 else:
                     level = 2
                 if field.seq:
-                    sub = (field.name,)
-                    self.emit("visitor._mutate_sequence(self.%s)" % sub, level)
+                    sub = field.name
+                    self.emit("for i in range(len(self.{})):".format(sub),
+                        level)
+                    self.emit("if self.{}[i] is not None:".format(sub),
+                        level + 1)
+                    self.emit(
+                        "self.{0}[i] = self.{0}[i].mutate_over(visitor)".format(sub),
+                        level + 2)
                 else:
-                    sub = (field.name, field.name)
-                    self.emit("self.%s = self.%s.mutate_over(visitor)" % sub,
-                              level)
+                    sub = field.name
+                    self.emit(
+                        "self.{0} = self.{0}.mutate_over(visitor)".format(sub),
+                        level)
         self.emit("return visitor.visit_%s(self)" % (name,), 2)
         self.emit("")
 
@@ -276,7 +283,7 @@
         self.emit("")
         self.make_mutate_over(cons, cons.name)
         self.make_converters(cons.fields, cons.name, extra_attributes)
-        self.emit("State.ast_type(%r, '%s', %s)" % 
+        self.emit("State.ast_type(%r, '%s', %s)" %
                   (cons.name, base, [f.name for f in cons.fields]))
         self.emit("")
 
@@ -305,11 +312,6 @@
         self.emit("def default_visitor(self, node):", 1)
         self.emit("raise NodeVisitorNotImplemented", 2)
         self.emit("")
-        self.emit("def _mutate_sequence(self, seq):", 1)
-        self.emit("for i in range(len(seq)):", 2)
-        self.emit("if seq[i] is not None:", 3)
-        self.emit("seq[i] = seq[i].mutate_over(self)", 4)
-        self.emit("")
         super(ASTVisitorVisitor, self).visitModule(mod)
         self.emit("")
 
@@ -357,7 +359,7 @@
         self.emit("")
 
     def visitField(self, field):
-        if (field.type not in asdl.builtin_types and 
+        if (field.type not in asdl.builtin_types and
             field.type not in self.data.simple_types):
             level = 2
             template = "node.%s.walkabout(self)"
@@ -451,6 +453,7 @@
 
 class AST(object):
     __metaclass__ = extendabletype
+    _attrs_ = ['lineno', 'col_offset']
 
     def walkabout(self, visitor):
         raise AssertionError("walkabout() implementation not provided")
@@ -558,7 +561,7 @@
         self.w_AST = space.gettypeobject(W_AST.typedef)
         for (name, base, fields, attributes) in self.AST_TYPES:
             self.make_new_type(space, name, base, fields, attributes)
-        
+
     def make_new_type(self, space, name, base, fields, attributes):
         w_base = getattr(self, 'w_%s' % base)
         w_dict = space.newdict()
@@ -570,7 +573,7 @@
             space.setitem_str(w_dict, "_attributes",
                               space.newtuple([space.wrap(a) for a in attributes]))
         w_type = space.call_function(
-            space.w_type, 
+            space.w_type,
             space.wrap(name), space.newtuple([w_base]), w_dict)
         setattr(self, 'w_%s' % name, w_type)
 
diff --git a/pypy/interpreter/astcompiler/tools/spark.py b/pypy/interpreter/astcompiler/tools/spark.py
deleted file mode 100644
--- a/pypy/interpreter/astcompiler/tools/spark.py
+++ /dev/null
@@ -1,839 +0,0 @@
-#  Copyright (c) 1998-2002 John Aycock
-#
-#  Permission is hereby granted, free of charge, to any person obtaining
-#  a copy of this software and associated documentation files (the
-#  "Software"), to deal in the Software without restriction, including
-#  without limitation the rights to use, copy, modify, merge, publish,
-#  distribute, sublicense, and/or sell copies of the Software, and to
-#  permit persons to whom the Software is furnished to do so, subject to
-#  the following conditions:
-#
-#  The above copyright notice and this permission notice shall be
-#  included in all copies or substantial portions of the Software.
-#
-#  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-#  EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-#  MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-#  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-#  CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-#  TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-#  SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-__version__ = 'SPARK-0.7 (pre-alpha-5)'
-
-import re
-import string
-
-def _namelist(instance):
-    namelist, namedict, classlist = [], {}, [instance.__class__]
-    for c in classlist:
-        for b in c.__bases__:
-            classlist.append(b)
-        for name in c.__dict__.keys():
-            if not namedict.has_key(name):
-                namelist.append(name)
-                namedict[name] = 1
-    return namelist
-
-class GenericScanner:
-    def __init__(self, flags=0):
-        pattern = self.reflect()
-        self.re = re.compile(pattern, re.VERBOSE|flags)
-
-        self.index2func = {}
-        for name, number in self.re.groupindex.items():
-            self.index2func[number-1] = getattr(self, 't_' + name)
-
-    def makeRE(self, name):
-        doc = getattr(self, name).__doc__
-        rv = '(?P<%s>%s)' % (name[2:], doc)
-        return rv
-
-    def reflect(self):
-        rv = []
-        for name in _namelist(self):
-            if name[:2] == 't_' and name != 't_default':
-                rv.append(self.makeRE(name))
-
-        rv.append(self.makeRE('t_default'))
-        return string.join(rv, '|')
-
-    def error(self, s, pos):
-        print "Lexical error at position %s" % pos
-        raise SystemExit
-
-    def tokenize(self, s):
-        pos = 0
-        n = len(s)
-        while pos < n:
-            m = self.re.match(s, pos)
-            if m is None:
-                self.error(s, pos)
-
-            groups = m.groups()
-            for i in range(len(groups)):
-                if groups[i] and self.index2func.has_key(i):
-                    self.index2func[i](groups[i])
-            pos = m.end()
-
-    def t_default(self, s):
-        r'( . | \n )+'
-        print "Specification error: unmatched input"
-        raise SystemExit
-
-#
-#  Extracted from GenericParser and made global so that [un]picking works.
-#
-class _State:
-    def __init__(self, stateno, items):
-        self.T, self.complete, self.items = [], [], items
-        self.stateno = stateno
-
-class GenericParser:
-    #
-    #  An Earley parser, as per J. Earley, "An Efficient Context-Free
-    #  Parsing Algorithm", CACM 13(2), pp. 94-102.  Also J. C. Earley,
-    #  "An Efficient Context-Free Parsing Algorithm", Ph.D. thesis,
-    #  Carnegie-Mellon University, August 1968.  New formulation of
-    #  the parser according to J. Aycock, "Practical Earley Parsing
-    #  and the SPARK Toolkit", Ph.D. thesis, University of Victoria,
-    #  2001, and J. Aycock and R. N. Horspool, "Practical Earley
-    #  Parsing", unpublished paper, 2001.
-    #
-
-    def __init__(self, start):
-        self.rules = {}
-        self.rule2func = {}
-        self.rule2name = {}
-        self.collectRules()
-        self.augment(start)
-        self.ruleschanged = 1
-
-    _NULLABLE = '\e_'
-    _START = 'START'
-    _BOF = '|-'
-
-    #
-    #  When pickling, take the time to generate the full state machine;
-    #  some information is then extraneous, too.  Unfortunately we
-    #  can't save the rule2func map.
-    #
-    def __getstate__(self):
-        if self.ruleschanged:
-            #
-            #  XXX - duplicated from parse()
-            #
-            self.computeNull()
-            self.newrules = {}
-            self.new2old = {}
-            self.makeNewRules()
-            self.ruleschanged = 0
-            self.edges, self.cores = {}, {}
-            self.states = { 0: self.makeState0() }
-            self.makeState(0, self._BOF)
-        #
-        #  XXX - should find a better way to do this..
-        #
-        changes = 1
-        while changes:
-            changes = 0
-            for k, v in self.edges.items():
-                if v is None:
-                    state, sym = k
-                    if self.states.has_key(state):
-                        self.goto(state, sym)
-                        changes = 1
-        rv = self.__dict__.copy()
-        for s in self.states.values():
-            del s.items
-        del rv['rule2func']
-        del rv['nullable']
-        del rv['cores']
-        return rv
-
-    def __setstate__(self, D):
-        self.rules = {}
-        self.rule2func = {}
-        self.rule2name = {}
-        self.collectRules()
-        start = D['rules'][self._START][0][1][1]        # Blech.
-        self.augment(start)
-        D['rule2func'] = self.rule2func
-        D['makeSet'] = self.makeSet_fast
-        self.__dict__ = D
-
-    #
-    #  A hook for GenericASTBuilder and GenericASTMatcher.  Mess
-    #  thee not with this; nor shall thee toucheth the _preprocess
-    #  argument to addRule.
-    #
-    def preprocess(self, rule, func):       return rule, func
-
-    def addRule(self, doc, func, _preprocess=1):
-        fn = func
-        rules = string.split(doc)
-
-        index = []
-        for i in range(len(rules)):
-            if rules[i] == '::=':
-                index.append(i-1)
-        index.append(len(rules))
-
-        for i in range(len(index)-1):
-            lhs = rules[index[i]]
-            rhs = rules[index[i]+2:index[i+1]]
-            rule = (lhs, tuple(rhs))
-
-            if _preprocess:
-                rule, fn = self.preprocess(rule, func)
-
-            if self.rules.has_key(lhs):
-                self.rules[lhs].append(rule)
-            else:
-                self.rules[lhs] = [ rule ]
-            self.rule2func[rule] = fn
-            self.rule2name[rule] = func.__name__[2:]
-        self.ruleschanged = 1
-
-    def collectRules(self):
-        for name in _namelist(self):
-            if name[:2] == 'p_':
-                func = getattr(self, name)
-                doc = func.__doc__
-                self.addRule(doc, func)
-
-    def augment(self, start):
-        rule = '%s ::= %s %s' % (self._START, self._BOF, start)
-        self.addRule(rule, lambda args: args[1], 0)
-
-    def computeNull(self):
-        self.nullable = {}
-        tbd = []
-
-        for rulelist in self.rules.values():
-            lhs = rulelist[0][0]
-            self.nullable[lhs] = 0
-            for rule in rulelist:
-                rhs = rule[1]
-                if len(rhs) == 0:
-                    self.nullable[lhs] = 1
-                    continue
-                #
-                #  We only need to consider rules which
-                #  consist entirely of nonterminal symbols.
-                #  This should be a savings on typical
-                #  grammars.
-                #
-                for sym in rhs:
-                    if not self.rules.has_key(sym):
-                        break
-                else:
-                    tbd.append(rule)
-        changes = 1
-        while changes:
-            changes = 0


More information about the pypy-commit mailing list