[pypy-commit] pypy stmgc-c8: Merge with stmgc-c7

Raemi noreply at buildbot.pypy.org
Wed Feb 25 11:53:23 CET 2015


Author: Remi Meier <remi.meier at gmail.com>
Branch: stmgc-c8
Changeset: r76118:f9544d0d6739
Date: 2015-02-25 10:23 +0100
http://bitbucket.org/pypy/pypy/changeset/f9544d0d6739/

Log:	Merge with stmgc-c7

diff too long, truncating to 2000 out of 40925 lines

diff --git a/LICENSE b/LICENSE
--- a/LICENSE
+++ b/LICENSE
@@ -28,7 +28,7 @@
     DEALINGS IN THE SOFTWARE.
 
 
-PyPy Copyright holders 2003-2014
+PyPy Copyright holders 2003-2015
 ----------------------------------- 
 
 Except when otherwise stated (look for LICENSE files or information at
@@ -42,19 +42,19 @@
   Amaury Forgeot d'Arc
   Samuele Pedroni
   Alex Gaynor
+  Brian Kearns
+  Matti Picus
+  Philip Jenvey
   Michael Hudson
   David Schneider
-  Matti Picus
-  Brian Kearns
-  Philip Jenvey
   Holger Krekel
   Christian Tismer
   Hakan Ardo
   Benjamin Peterson
   Manuel Jacob
+  Ronan Lamy
   Anders Chrigstrom
   Eric van Riet Paap
-  Ronan Lamy
   Wim Lavrijsen
   Richard Emslie
   Alexander Schremmer
@@ -68,9 +68,9 @@
   Camillo Bruni
   Laura Creighton
   Toon Verwaest
+  Romain Guillebert
   Leonardo Santagada
   Seo Sanghyeon
-  Romain Guillebert
   Justin Peel
   Ronny Pfannschmidt
   David Edelsohn
@@ -91,15 +91,16 @@
   Michal Bendowski
   Jan de Mooij
   stian
+  Tyler Wade
   Michael Foord
   Stephan Diehl
-  Tyler Wade
   Stefan Schwarzer
   Valentino Volonghi
   Tomek Meka
   Patrick Maupin
   Bob Ippolito
   Bruno Gola
+  David Malcolm
   Jean-Paul Calderone
   Timo Paulssen
   Squeaky
@@ -108,18 +109,19 @@
   Marius Gedminas
   Martin Matusiak
   Konstantin Lopuhin
+  Wenzhu Man
   John Witulski
-  Wenzhu Man
+  Laurence Tratt
+  Ivan Sichmann Freitas
   Greg Price
   Dario Bertini
   Mark Pearse
   Simon Cross
-  Ivan Sichmann Freitas
   Andreas Stührk
+  Stefano Rivera
   Jean-Philippe St. Pierre
   Guido van Rossum
   Pavel Vinogradov
-  Stefano Rivera
   Paweł Piotr Przeradowski
   Paul deGrandis
   Ilya Osadchiy
@@ -129,7 +131,6 @@
   tav
   Taavi Burns
   Georg Brandl
-  Laurence Tratt
   Bert Freudenberg
   Stian Andreassen
   Wanja Saatkamp
@@ -141,13 +142,12 @@
   Jeremy Thurgood
   Rami Chowdhury
   Tobias Pape
-  David Malcolm
   Eugene Oden
   Henry Mason
   Vasily Kuznetsov
   Preston Timmons
+  David Ripton
   Jeff Terrace
-  David Ripton
   Dusty Phillips
   Lukas Renggli
   Guenter Jantzen
@@ -166,13 +166,16 @@
   Gintautas Miliauskas
   Michael Twomey
   Lucian Branescu Mihaila
+  Yichao Yu
   Gabriel Lavoie
   Olivier Dormond
   Jared Grubb
   Karl Bartel
+  Wouter van Heyst
   Brian Dorsey
   Victor Stinner
   Andrews Medina
+  anatoly techtonik
   Stuart Williams
   Jasper Schulz
   Christian Hudon
@@ -182,12 +185,11 @@
   Michael Cheng
   Justas Sadzevicius
   Gasper Zejn
-  anatoly techtonik
   Neil Shepperd
+  Stanislaw Halik
   Mikael Schönenberg
   Elmo M?ntynen
   Jonathan David Riehl
-  Stanislaw Halik
   Anders Qvist
   Corbin Simpson
   Chirag Jadwani
@@ -196,10 +198,13 @@
   Vincent Legoll
   Alan McIntyre
   Alexander Sedov
+  Attila Gobi
   Christopher Pope
   Christian Tismer 
   Marc Abramowitz
   Dan Stromberg
+  Arjun Naik
+  Valentina Mukhamedzhanova
   Stefano Parmesan
   Alexis Daboville
   Jens-Uwe Mager
@@ -213,8 +218,6 @@
   Sylvain Thenault
   Nathan Taylor
   Vladimir Kryachko
-  Arjun Naik
-  Attila Gobi
   Jacek Generowicz
   Alejandro J. Cura
   Jacob Oscarson
@@ -222,22 +225,23 @@
   Ryan Gonzalez
   Ian Foote
   Kristjan Valur Jonsson
+  David Lievens
   Neil Blakey-Milner
   Lutz Paelike
   Lucio Torre
   Lars Wassermann
-  Valentina Mukhamedzhanova
   Henrik Vendelbo
   Dan Buch
   Miguel de Val Borro
   Artur Lisiecki
   Sergey Kishchenko
-  Yichao Yu
   Ignas Mikalajunas
   Christoph Gerum
   Martin Blais
   Lene Wagner
   Tomo Cocoa
+  Toni Mattis
+  Lucas Stadler
   roberto at goyle
   Yury V. Zaytsev
   Anna Katrina Dominguez
@@ -265,23 +269,30 @@
   Stephan Busemann
   Rafał Gałczyński
   Christian Muirhead
+  Berker Peksag
   James Lan
   shoma hosaka
-  Daniel Neuh?user
-  Matthew Miller
+  Daniel Neuhäuser
+  Ben Mather
+  halgari
+  Boglarka Vezer
+  Chris Pressey
   Buck Golemon
   Konrad Delong
   Dinu Gherman
   Chris Lambacher
   coolbutuseless at gmail.com
+  Jim Baker
   Rodrigo Araújo
-  Jim Baker
+  Nikolaos-Digenis Karagiannis
   James Robert
   Armin Ronacher
   Brett Cannon
+  Donald Stufft
   yrttyr
   aliceinwire
   OlivierBlanvillain
+  Dan Sanders
   Zooko Wilcox-O Hearn
   Tomer Chachamu
   Christopher Groskopf
@@ -295,6 +306,7 @@
   Markus Unterwaditzer
   Even Wiik Thomassen
   jbs
+  squeaky
   soareschen
   Kurt Griffiths
   Mike Bayer
@@ -306,6 +318,7 @@
   Anna Ravencroft
   Dan Crosta
   Julien Phalip
+  Roman Podoliaka
   Dan Loewenherz
 
   Heinrich-Heine University, Germany 
diff --git a/TODO b/TODO
--- a/TODO
+++ b/TODO
@@ -1,3 +1,10 @@
+------------------------------------------------------------
+
+maybe statically optimize away some stm_become_inevitable(), there
+are some loops that call it repeatedly (may be not relevant
+for performance) 
+
+------------------------------------------------------------
 
 1b664888133d (March 15, 2014): the overhead of a non-JIT STM, when
 compared with a non-JIT plain PyPy, is measured to be 54% in a
@@ -146,6 +153,12 @@
 stm_read(p125)
 cond_call_gc_wb_array(p125...)    # don't need the stm_read maybe?
 
+------------------------------------------------------------
+
+we should fake the stm_location inside jit/metainterp, so that it
+is reported correctly even if we're (1) tracing, (2) blackholing,
+or (3) in ResumeDataDirectReader.change_stm_location
+
 
 
 ===============================================================================
diff --git a/lib-python/2.7/collections.py b/lib-python/2.7/collections.py
--- a/lib-python/2.7/collections.py
+++ b/lib-python/2.7/collections.py
@@ -17,6 +17,10 @@
 except ImportError:
     assert '__pypy__' not in _sys.builtin_module_names
     newdict = lambda _ : {}
+try:
+    from __pypy__ import reversed_dict
+except ImportError:
+    reversed_dict = lambda d: reversed(d.keys())
 
 try:
     from thread import get_ident as _get_ident
@@ -29,142 +33,35 @@
 ################################################################################
 
 class OrderedDict(dict):
-    'Dictionary that remembers insertion order'
-    # An inherited dict maps keys to values.
-    # The inherited dict provides __getitem__, __len__, __contains__, and get.
-    # The remaining methods are order-aware.
-    # Big-O running times for all methods are the same as regular dictionaries.
+    '''Dictionary that remembers insertion order.
 
-    # The internal self.__map dict maps keys to links in a doubly linked list.
-    # The circular doubly linked list starts and ends with a sentinel element.
-    # The sentinel element never gets deleted (this simplifies the algorithm).
-    # Each link is stored as a list of length three:  [PREV, NEXT, KEY].
+    In PyPy all dicts are ordered anyway.  This is mostly useful as a
+    placeholder to mean "this dict must be ordered even on CPython".
 
-    def __init__(self, *args, **kwds):
-        '''Initialize an ordered dictionary.  The signature is the same as
-        regular dictionaries, but keyword arguments are not recommended because
-        their insertion order is arbitrary.
-
-        '''
-        if len(args) > 1:
-            raise TypeError('expected at most 1 arguments, got %d' % len(args))
-        try:
-            self.__root
-        except AttributeError:
-            self.__root = root = []                     # sentinel node
-            root[:] = [root, root, None]
-            self.__map = {}
-        self.__update(*args, **kwds)
-
-    def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
-        'od.__setitem__(i, y) <==> od[i]=y'
-        # Setting a new item creates a new link at the end of the linked list,
-        # and the inherited dictionary is updated with the new key/value pair.
-        if key not in self:
-            root = self.__root
-            last = root[0]
-            last[1] = root[0] = self.__map[key] = [last, root, key]
-        return dict_setitem(self, key, value)
-
-    def __delitem__(self, key, dict_delitem=dict.__delitem__):
-        'od.__delitem__(y) <==> del od[y]'
-        # Deleting an existing item uses self.__map to find the link which gets
-        # removed by updating the links in the predecessor and successor nodes.
-        dict_delitem(self, key)
-        link_prev, link_next, _ = self.__map.pop(key)
-        link_prev[1] = link_next                        # update link_prev[NEXT]
-        link_next[0] = link_prev                        # update link_next[PREV]
-
-    def __iter__(self):
-        'od.__iter__() <==> iter(od)'
-        # Traverse the linked list in order.
-        root = self.__root
-        curr = root[1]                                  # start at the first node
-        while curr is not root:
-            yield curr[2]                               # yield the curr[KEY]
-            curr = curr[1]                              # move to next node
+    Known difference: iterating over an OrderedDict which is being
+    concurrently modified raises RuntimeError in PyPy.  In CPython
+    instead we get some behavior that appears reasonable in some
+    cases but is nonsensical in other cases.  This is officially
+    forbidden by the CPython docs, so we forbid it explicitly for now.
+    '''
 
     def __reversed__(self):
-        'od.__reversed__() <==> reversed(od)'
-        # Traverse the linked list in reverse order.
-        root = self.__root
-        curr = root[0]                                  # start at the last node
-        while curr is not root:
-            yield curr[2]                               # yield the curr[KEY]
-            curr = curr[0]                              # move to previous node
-
-    def clear(self):
-        'od.clear() -> None.  Remove all items from od.'
-        root = self.__root
-        root[:] = [root, root, None]
-        self.__map.clear()
-        dict.clear(self)
-
-    # -- the following methods do not depend on the internal structure --
-
-    def keys(self):
-        'od.keys() -> list of keys in od'
-        return list(self)
-
-    def values(self):
-        'od.values() -> list of values in od'
-        return [self[key] for key in self]
-
-    def items(self):
-        'od.items() -> list of (key, value) pairs in od'
-        return [(key, self[key]) for key in self]
-
-    def iterkeys(self):
-        'od.iterkeys() -> an iterator over the keys in od'
-        return iter(self)
-
-    def itervalues(self):
-        'od.itervalues -> an iterator over the values in od'
-        for k in self:
-            yield self[k]
-
-    def iteritems(self):
-        'od.iteritems -> an iterator over the (key, value) pairs in od'
-        for k in self:
-            yield (k, self[k])
-
-    update = MutableMapping.update
-
-    __update = update # let subclasses override update without breaking __init__
-
-    __marker = object()
-
-    def pop(self, key, default=__marker):
-        '''od.pop(k[,d]) -> v, remove specified key and return the corresponding
-        value.  If key is not found, d is returned if given, otherwise KeyError
-        is raised.
-
-        '''
-        if key in self:
-            result = self[key]
-            del self[key]
-            return result
-        if default is self.__marker:
-            raise KeyError(key)
-        return default
-
-    def setdefault(self, key, default=None):
-        'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
-        if key in self:
-            return self[key]
-        self[key] = default
-        return default
+        return reversed_dict(self)
 
     def popitem(self, last=True):
         '''od.popitem() -> (k, v), return and remove a (key, value) pair.
         Pairs are returned in LIFO order if last is true or FIFO order if false.
 
         '''
-        if not self:
-            raise KeyError('dictionary is empty')
-        key = next(reversed(self) if last else iter(self))
-        value = self.pop(key)
-        return key, value
+        if last:
+            return dict.popitem(self)
+        else:
+            it = dict.__iter__(self)
+            try:
+                k = it.next()
+            except StopIteration:
+                raise KeyError('dictionary is empty')
+            return (k, self.pop(k))
 
     def __repr__(self, _repr_running={}):
         'od.__repr__() <==> repr(od)'
@@ -183,8 +80,6 @@
         'Return state information for pickling'
         items = [[k, self[k]] for k in self]
         inst_dict = vars(self).copy()
-        for k in vars(OrderedDict()):
-            inst_dict.pop(k, None)
         if inst_dict:
             return (self.__class__, (items,), inst_dict)
         return self.__class__, (items,)
@@ -193,17 +88,6 @@
         'od.copy() -> a shallow copy of od'
         return self.__class__(self)
 
-    @classmethod
-    def fromkeys(cls, iterable, value=None):
-        '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
-        If not specified, the value defaults to None.
-
-        '''
-        self = cls()
-        for key in iterable:
-            self[key] = value
-        return self
-
     def __eq__(self, other):
         '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
         while comparison to a regular mapping is order-insensitive.
diff --git a/lib-python/2.7/ctypes/test/test_frombuffer.py b/lib-python/2.7/ctypes/test/test_frombuffer.py
--- a/lib-python/2.7/ctypes/test/test_frombuffer.py
+++ b/lib-python/2.7/ctypes/test/test_frombuffer.py
@@ -2,7 +2,6 @@
 import array
 import gc
 import unittest
-from ctypes.test import xfail
 
 class X(Structure):
     _fields_ = [("c_int", c_int)]
@@ -11,7 +10,6 @@
         self._init_called = True
 
 class Test(unittest.TestCase):
-    @xfail
     def test_fom_buffer(self):
         a = array.array("i", range(16))
         x = (c_int * 16).from_buffer(a)
@@ -34,10 +32,9 @@
         del a; gc.collect(); gc.collect(); gc.collect()
         self.assertEqual(x[:], expected)
 
-        self.assertRaises(TypeError,
+        self.assertRaises((TypeError, ValueError),
                           (c_char * 16).from_buffer, "a" * 16)
 
-    @xfail
     def test_fom_buffer_with_offset(self):
         a = array.array("i", range(16))
         x = (c_int * 15).from_buffer(a, sizeof(c_int))
@@ -46,7 +43,6 @@
         self.assertRaises(ValueError, lambda: (c_int * 16).from_buffer(a, sizeof(c_int)))
         self.assertRaises(ValueError, lambda: (c_int * 1).from_buffer(a, 16 * sizeof(c_int)))
 
-    @xfail
     def test_from_buffer_copy(self):
         a = array.array("i", range(16))
         x = (c_int * 16).from_buffer_copy(a)
@@ -71,7 +67,6 @@
         x = (c_char * 16).from_buffer_copy("a" * 16)
         self.assertEqual(x[:], "a" * 16)
 
-    @xfail
     def test_fom_buffer_copy_with_offset(self):
         a = array.array("i", range(16))
         x = (c_int * 15).from_buffer_copy(a, sizeof(c_int))
diff --git a/lib-python/2.7/distutils/unixccompiler.py b/lib-python/2.7/distutils/unixccompiler.py
--- a/lib-python/2.7/distutils/unixccompiler.py
+++ b/lib-python/2.7/distutils/unixccompiler.py
@@ -58,7 +58,7 @@
     executables = {'preprocessor' : None,
                    'compiler'     : ["cc"],
                    'compiler_so'  : ["cc"],
-                   'compiler_cxx' : ["cc"],
+                   'compiler_cxx' : ["c++"],  # pypy: changed, 'cc' is bogus
                    'linker_so'    : ["cc", "-shared"],
                    'linker_exe'   : ["cc"],
                    'archiver'     : ["ar", "-cr"],
diff --git a/lib-python/2.7/platform.py b/lib-python/2.7/platform.py
--- a/lib-python/2.7/platform.py
+++ b/lib-python/2.7/platform.py
@@ -1382,7 +1382,7 @@
 _pypy_sys_version_parser = re.compile(
     r'([\w.+]+)\s*'
     '\(#?([^,]+),\s*([\w ]+),\s*([\w :]+)\)\s*'
-    '\[PyPy [^\]]+\]?')
+    '\[PyPy[^\]]+\]?')   # this also covers 'PyPy-STM x.y'
 
 _sys_version_cache = {}
 
diff --git a/lib-python/2.7/sqlite3/test/dbapi.py b/lib-python/2.7/sqlite3/test/dbapi.py
--- a/lib-python/2.7/sqlite3/test/dbapi.py
+++ b/lib-python/2.7/sqlite3/test/dbapi.py
@@ -478,6 +478,29 @@
         except TypeError:
             pass
 
+    def CheckCurDescription(self):
+        self.cu.execute("select * from test")
+
+        actual = self.cu.description
+        expected = [
+            ('id', None, None, None, None, None, None),
+            ('name', None, None, None, None, None, None),
+            ('income', None, None, None, None, None, None),
+        ]
+        self.assertEqual(expected, actual)
+
+    def CheckCurDescriptionVoidStatement(self):
+        self.cu.execute("insert into test(name) values (?)", ("foo",))
+        self.assertIsNone(self.cu.description)
+
+    def CheckCurDescriptionWithoutStatement(self):
+        cu = self.cx.cursor()
+        try:
+            self.assertIsNone(cu.description)
+        finally:
+            cu.close()
+
+
 @unittest.skipUnless(threading, 'This test requires threading.')
 class ThreadTests(unittest.TestCase):
     def setUp(self):
diff --git a/lib-python/2.7/subprocess.py b/lib-python/2.7/subprocess.py
--- a/lib-python/2.7/subprocess.py
+++ b/lib-python/2.7/subprocess.py
@@ -1589,7 +1589,7 @@
                   'copyfile' in caller.f_globals):
         dest_dir = sys.pypy_resolvedirof(target_executable)
         src_dir = sys.pypy_resolvedirof(sys.executable)
-        for libname in ['libpypy-c.so']:
+        for libname in ['libpypy-c.so', 'libpypy-c.dylib']:
             dest_library = os.path.join(dest_dir, libname)
             src_library = os.path.join(src_dir, libname)
             if os.path.exists(src_library):
diff --git a/lib-python/2.7/test/test_collections.py b/lib-python/2.7/test/test_collections.py
--- a/lib-python/2.7/test/test_collections.py
+++ b/lib-python/2.7/test/test_collections.py
@@ -578,7 +578,12 @@
             def __repr__(self):
                 return "MySet(%s)" % repr(list(self))
         s = MySet([5,43,2,1])
-        self.assertEqual(s.pop(), 1)
+        # changed from CPython 2.7: it was "s.pop() == 1" but I see
+        # nothing that guarantees a particular order here.  In the
+        # 'all_ordered_dicts' branch of PyPy (or with OrderedDict
+        # instead of sets), it consistently returns 5, but this test
+        # should not rely on this or any other order.
+        self.assert_(s.pop() in [5,43,2,1])
 
     def test_issue8750(self):
         empty = WithSet()
@@ -1010,8 +1015,9 @@
                                           c=3, e=5).items()), pairs)                # mixed input
 
         # make sure no positional args conflict with possible kwdargs
-        self.assertEqual(inspect.getargspec(OrderedDict.__dict__['__init__']).args,
-                         ['self'])
+        if '__init__' in OrderedDict.__dict__:   # absent in PyPy
+            self.assertEqual(inspect.getargspec(OrderedDict.__dict__['__init__']).args,
+                             ['self'])
 
         # Make sure that direct calls to __init__ do not clear previous contents
         d = OrderedDict([('a', 1), ('b', 2), ('c', 3), ('d', 44), ('e', 55)])
@@ -1108,6 +1114,16 @@
             od.popitem()
         self.assertEqual(len(od), 0)
 
+    def test_popitem_first(self):
+        pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
+        shuffle(pairs)
+        od = OrderedDict(pairs)
+        while pairs:
+            self.assertEqual(od.popitem(last=False), pairs.pop(0))
+        with self.assertRaises(KeyError):
+            od.popitem(last=False)
+        self.assertEqual(len(od), 0)
+
     def test_pop(self):
         pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
         shuffle(pairs)
@@ -1179,7 +1195,11 @@
         od = OrderedDict(pairs)
         # yaml.dump(od) -->
         # '!!python/object/apply:__main__.OrderedDict\n- - [a, 1]\n  - [b, 2]\n'
-        self.assertTrue(all(type(pair)==list for pair in od.__reduce__()[1]))
+
+        # PyPy bug fix: added [0] at the end of this line, because the
+        # test is really about the 2-tuples that need to be 2-lists
+        # inside the list of 6 of them
+        self.assertTrue(all(type(pair)==list for pair in od.__reduce__()[1][0]))
 
     def test_reduce_not_too_fat(self):
         # do not save instance dictionary if not needed
@@ -1189,6 +1209,16 @@
         od.x = 10
         self.assertEqual(len(od.__reduce__()), 3)
 
+    def test_reduce_exact_output(self):
+        # PyPy: test that __reduce__() produces the exact same answer as
+        # CPython does, even though in the 'all_ordered_dicts' branch we
+        # have to emulate it.
+        pairs = [['c', 1], ['b', 2], ['d', 4]]
+        od = OrderedDict(pairs)
+        self.assertEqual(od.__reduce__(), (OrderedDict, (pairs,)))
+        od.x = 10
+        self.assertEqual(od.__reduce__(), (OrderedDict, (pairs,), {'x': 10}))
+
     def test_repr(self):
         od = OrderedDict([('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)])
         self.assertEqual(repr(od),
diff --git a/lib-python/2.7/test/test_xml_etree.py b/lib-python/2.7/test/test_xml_etree.py
--- a/lib-python/2.7/test/test_xml_etree.py
+++ b/lib-python/2.7/test/test_xml_etree.py
@@ -225,9 +225,9 @@
     >>> element.remove(subelement)
     >>> serialize(element) # 5
     '<tag key="value" />'
-    >>> element.remove(subelement)
+    >>> element.remove(subelement)    # doctest: +ELLIPSIS
     Traceback (most recent call last):
-    ValueError: list.remove(x): x not in list
+    ValueError: list.remove(...
     >>> serialize(element) # 6
     '<tag key="value" />'
     >>> element[0:0] = [subelement, subelement, subelement]
diff --git a/lib-python/stdlib-upgrade.txt b/lib-python/stdlib-upgrade.txt
--- a/lib-python/stdlib-upgrade.txt
+++ b/lib-python/stdlib-upgrade.txt
@@ -7,7 +7,7 @@
 
 1. check out the branch vendor/stdlib
 2. upgrade the files there
-3. update stdlib-versions.txt with the output of hg -id from the cpython repo
+3. update stdlib-version.txt with the output of hg -id from the cpython repo
 4. commit
 5. update to default/py3k
 6. create a integration branch for the new stdlib
diff --git a/lib_pypy/_ctypes/basics.py b/lib_pypy/_ctypes/basics.py
--- a/lib_pypy/_ctypes/basics.py
+++ b/lib_pypy/_ctypes/basics.py
@@ -83,6 +83,37 @@
     def in_dll(self, dll, name):
         return self.from_address(dll._handle.getaddressindll(name))
 
+    def from_buffer(self, obj, offset=0):
+        size = self._sizeofinstances()
+        buf = buffer(obj, offset, size)
+        if len(buf) < size:
+            raise ValueError(
+                "Buffer size too small (%d instead of at least %d bytes)"
+                % (len(buf) + offset, size + offset))
+        raw_addr = buf._pypy_raw_address()
+        result = self.from_address(raw_addr)
+        result._ensure_objects()['ffffffff'] = obj
+        return result
+
+    def from_buffer_copy(self, obj, offset=0):
+        size = self._sizeofinstances()
+        buf = buffer(obj, offset, size)
+        if len(buf) < size:
+            raise ValueError(
+                "Buffer size too small (%d instead of at least %d bytes)"
+                % (len(buf) + offset, size + offset))
+        result = self()
+        dest = result._buffer.buffer
+        try:
+            raw_addr = buf._pypy_raw_address()
+        except ValueError:
+            _rawffi.rawstring2charp(dest, buf)
+        else:
+            from ctypes import memmove
+            memmove(dest, raw_addr, size)
+        return result
+
+
 class CArgObject(object):
     """ simple wrapper around buffer, just for the case of freeing
     it afterwards
diff --git a/lib_pypy/_ctypes/structure.py b/lib_pypy/_ctypes/structure.py
--- a/lib_pypy/_ctypes/structure.py
+++ b/lib_pypy/_ctypes/structure.py
@@ -1,3 +1,4 @@
+import sys
 import _rawffi
 from _ctypes.basics import _CData, _CDataMeta, keepalive_key,\
      store_reference, ensure_objects, CArgObject
@@ -178,6 +179,8 @@
         instance = StructOrUnion.__new__(self)
         if isinstance(address, _rawffi.StructureInstance):
             address = address.buffer
+        # fix the address: turn it into as unsigned, in case it is negative
+        address = address & (sys.maxint * 2 + 1)
         instance.__dict__['_buffer'] = self._ffistruct.fromaddress(address)
         return instance
 
diff --git a/lib_pypy/_functools.py b/lib_pypy/_functools.py
--- a/lib_pypy/_functools.py
+++ b/lib_pypy/_functools.py
@@ -9,7 +9,10 @@
     of the given arguments and keywords.
     """
 
-    def __init__(self, func, *args, **keywords):
+    def __init__(self, *args, **keywords):
+        if not args:
+            raise TypeError('__init__() takes at least 2 arguments (1 given)')
+        func, args = args[0], args[1:]
         if not callable(func):
             raise TypeError("the first argument must be callable")
         self._func = func
diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py
--- a/lib_pypy/_sqlite3.py
+++ b/lib_pypy/_sqlite3.py
@@ -1175,8 +1175,9 @@
         try:
             return self.__description
         except AttributeError:
-            self.__description = self.__statement._get_description()
-            return self.__description
+            if self.__statement:
+                self.__description = self.__statement._get_description()
+                return self.__description
     description = property(__get_description)
 
     def __get_lastrowid(self):
diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py
--- a/lib_pypy/cffi/__init__.py
+++ b/lib_pypy/cffi/__init__.py
@@ -6,3 +6,8 @@
 
 __version__ = "0.8.6"
 __version_info__ = (0, 8, 6)
+
+# The verifier module file names are based on the CRC32 of a string that
+# contains the following version number.  It may be older than __version__
+# if nothing is clearly incompatible.
+__version_verifier_modules__ = "0.8.6"
diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py
--- a/lib_pypy/cffi/api.py
+++ b/lib_pypy/cffi/api.py
@@ -69,6 +69,7 @@
         self._function_caches = []
         self._libraries = []
         self._cdefsources = []
+        self._windows_unicode = None
         if hasattr(backend, 'set_ffi'):
             backend.set_ffi(self)
         for name in backend.__dict__:
@@ -77,6 +78,7 @@
         #
         with self._lock:
             self.BVoidP = self._get_cached_btype(model.voidp_type)
+            self.BCharA = self._get_cached_btype(model.char_array_type)
         if isinstance(backend, types.ModuleType):
             # _cffi_backend: attach these constants to the class
             if not hasattr(FFI, 'NULL'):
@@ -189,13 +191,16 @@
             cdecl = self._typeof(cdecl)
         return self._backend.alignof(cdecl)
 
-    def offsetof(self, cdecl, fieldname):
+    def offsetof(self, cdecl, *fields_or_indexes):
         """Return the offset of the named field inside the given
-        structure, which must be given as a C type name.
+        structure or array, which must be given as a C type name.  
+        You can give several field names in case of nested structures.
+        You can also give numeric values which correspond to array
+        items, in case of an array type.
         """
         if isinstance(cdecl, basestring):
             cdecl = self._typeof(cdecl)
-        return self._backend.typeoffsetof(cdecl, fieldname)[1]
+        return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
 
     def new(self, cdecl, init=None):
         """Allocate an instance according to the specified C type and
@@ -264,6 +269,16 @@
         """
         return self._backend.buffer(cdata, size)
 
+    def from_buffer(self, python_buffer):
+        """Return a <cdata 'char[]'> that points to the data of the
+        given Python object, which must support the buffer interface.
+        Note that this is not meant to be used on the built-in types str,
+        unicode, or bytearray (you can build 'char[]' arrays explicitly)
+        but only on objects containing large quantities of raw data
+        in some other format, like 'array.array' or numpy arrays.
+        """
+        return self._backend.from_buffer(self.BCharA, python_buffer)
+
     def callback(self, cdecl, python_callable=None, error=None):
         """Return a callback object or a decorator making such a
         callback object.  'cdecl' must name a C function pointer type.
@@ -335,9 +350,23 @@
         which requires binary compatibility in the signatures.
         """
         from .verifier import Verifier, _caller_dir_pycache
+        #
+        # If set_unicode(True) was called, insert the UNICODE and
+        # _UNICODE macro declarations
+        if self._windows_unicode:
+            self._apply_windows_unicode(kwargs)
+        #
+        # Set the tmpdir here, and not in Verifier.__init__: it picks
+        # up the caller's directory, which we want to be the caller of
+        # ffi.verify(), as opposed to the caller of Veritier().
         tmpdir = tmpdir or _caller_dir_pycache()
+        #
+        # Make a Verifier() and use it to load the library.
         self.verifier = Verifier(self, source, tmpdir, **kwargs)
         lib = self.verifier.load_library()
+        #
+        # Save the loaded library for keep-alive purposes, even
+        # if the caller doesn't keep it alive itself (it should).
         self._libraries.append(lib)
         return lib
 
@@ -356,15 +385,29 @@
         with self._lock:
             return model.pointer_cache(self, ctype)
 
-    def addressof(self, cdata, field=None):
+    def addressof(self, cdata, *fields_or_indexes):
         """Return the address of a <cdata 'struct-or-union'>.
-        If 'field' is specified, return the address of this field.
+        If 'fields_or_indexes' are given, returns the address of that
+        field or array item in the structure or array, recursively in
+        case of nested structures.
         """
         ctype = self._backend.typeof(cdata)
-        ctype, offset = self._backend.typeoffsetof(ctype, field)
+        if fields_or_indexes:
+            ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
+        else:
+            if ctype.kind == "pointer":
+                raise TypeError("addressof(pointer)")
+            offset = 0
         ctypeptr = self._pointer_to(ctype)
         return self._backend.rawaddressof(ctypeptr, cdata, offset)
 
+    def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
+        ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
+        for field1 in fields_or_indexes:
+            ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
+            offset += offset1
+        return ctype, offset
+
     def include(self, ffi_to_include):
         """Includes the typedefs, structs, unions and enums defined
         in another FFI instance.  Usage is similar to a #include in C,
@@ -387,6 +430,44 @@
     def from_handle(self, x):
         return self._backend.from_handle(x)
 
+    def set_unicode(self, enabled_flag):
+        """Windows: if 'enabled_flag' is True, enable the UNICODE and
+        _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
+        to be (pointers to) wchar_t.  If 'enabled_flag' is False,
+        declare these types to be (pointers to) plain 8-bit characters.
+        This is mostly for backward compatibility; you usually want True.
+        """
+        if self._windows_unicode is not None:
+            raise ValueError("set_unicode() can only be called once")
+        enabled_flag = bool(enabled_flag)
+        if enabled_flag:
+            self.cdef("typedef wchar_t TBYTE;"
+                      "typedef wchar_t TCHAR;"
+                      "typedef const wchar_t *LPCTSTR;"
+                      "typedef const wchar_t *PCTSTR;"
+                      "typedef wchar_t *LPTSTR;"
+                      "typedef wchar_t *PTSTR;"
+                      "typedef TBYTE *PTBYTE;"
+                      "typedef TCHAR *PTCHAR;")
+        else:
+            self.cdef("typedef char TBYTE;"
+                      "typedef char TCHAR;"
+                      "typedef const char *LPCTSTR;"
+                      "typedef const char *PCTSTR;"
+                      "typedef char *LPTSTR;"
+                      "typedef char *PTSTR;"
+                      "typedef TBYTE *PTBYTE;"
+                      "typedef TCHAR *PTCHAR;")
+        self._windows_unicode = enabled_flag
+
+    def _apply_windows_unicode(self, kwds):
+        defmacros = kwds.get('define_macros', ())
+        if not isinstance(defmacros, (list, tuple)):
+            raise TypeError("'define_macros' must be a list or tuple")
+        defmacros = list(defmacros) + [('UNICODE', '1'),
+                                       ('_UNICODE', '1')]
+        kwds['define_macros'] = defmacros
+
 
 def _load_backend_lib(backend, name, flags):
     if name is None:
diff --git a/lib_pypy/cffi/backend_ctypes.py b/lib_pypy/cffi/backend_ctypes.py
--- a/lib_pypy/cffi/backend_ctypes.py
+++ b/lib_pypy/cffi/backend_ctypes.py
@@ -169,6 +169,7 @@
 class CTypesGenericPtr(CTypesData):
     __slots__ = ['_address', '_as_ctype_ptr']
     _automatic_casts = False
+    kind = "pointer"
 
     @classmethod
     def _newp(cls, init):
@@ -370,10 +371,12 @@
                                 (CTypesPrimitive, type(source).__name__))
             return source
         #
+        kind1 = kind
         class CTypesPrimitive(CTypesGenericPrimitive):
             __slots__ = ['_value']
             _ctype = ctype
             _reftypename = '%s &' % name
+            kind = kind1
 
             def __init__(self, value):
                 self._value = value
@@ -703,12 +706,13 @@
         class struct_or_union(base_ctypes_class):
             pass
         struct_or_union.__name__ = '%s_%s' % (kind, name)
+        kind1 = kind
         #
         class CTypesStructOrUnion(CTypesBaseStructOrUnion):
             __slots__ = ['_blob']
             _ctype = struct_or_union
             _reftypename = '%s &' % (name,)
-            _kind = kind
+            _kind = kind = kind1
         #
         CTypesStructOrUnion._fix_class()
         return CTypesStructOrUnion
@@ -994,27 +998,42 @@
     def getcname(self, BType, replace_with):
         return BType._get_c_name(replace_with)
 
-    def typeoffsetof(self, BType, fieldname):
-        if fieldname is not None and issubclass(BType, CTypesGenericPtr):
-            BType = BType._BItem
-        if not issubclass(BType, CTypesBaseStructOrUnion):
-            raise TypeError("expected a struct or union ctype")
-        if fieldname is None:
-            return (BType, 0)
-        else:
+    def typeoffsetof(self, BType, fieldname, num=0):
+        if isinstance(fieldname, str):
+            if num == 0 and issubclass(BType, CTypesGenericPtr):
+                BType = BType._BItem
+            if not issubclass(BType, CTypesBaseStructOrUnion):
+                raise TypeError("expected a struct or union ctype")
             BField = BType._bfield_types[fieldname]
             if BField is Ellipsis:
                 raise TypeError("not supported for bitfields")
             return (BField, BType._offsetof(fieldname))
+        elif isinstance(fieldname, (int, long)):
+            if issubclass(BType, CTypesGenericArray):
+                BType = BType._CTPtr
+            if not issubclass(BType, CTypesGenericPtr):
+                raise TypeError("expected an array or ptr ctype")
+            BItem = BType._BItem
+            offset = BItem._get_size() * fieldname
+            if offset > sys.maxsize:
+                raise OverflowError
+            return (BItem, offset)
+        else:
+            raise TypeError(type(fieldname))
 
-    def rawaddressof(self, BTypePtr, cdata, offset):
+    def rawaddressof(self, BTypePtr, cdata, offset=None):
         if isinstance(cdata, CTypesBaseStructOrUnion):
             ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata))
         elif isinstance(cdata, CTypesGenericPtr):
+            if offset is None or not issubclass(type(cdata)._BItem,
+                                                CTypesBaseStructOrUnion):
+                raise TypeError("unexpected cdata type")
+            ptr = type(cdata)._to_ctypes(cdata)
+        elif isinstance(cdata, CTypesGenericArray):
             ptr = type(cdata)._to_ctypes(cdata)
         else:
             raise TypeError("expected a <cdata 'struct-or-union'>")
-        if offset != 0:
+        if offset:
             ptr = ctypes.cast(
                 ctypes.c_void_p(
                     ctypes.cast(ptr, ctypes.c_void_p).value + offset),
diff --git a/lib_pypy/cffi/commontypes.py b/lib_pypy/cffi/commontypes.py
--- a/lib_pypy/cffi/commontypes.py
+++ b/lib_pypy/cffi/commontypes.py
@@ -29,6 +29,9 @@
                 result = model.PointerType(resolve_common_type(result[:-2]))
         elif result in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
             result = model.PrimitiveType(result)
+        elif result == 'set-unicode-needed':
+            raise api.FFIError("The Windows type %r is only available after "
+                               "you call ffi.set_unicode()" % (commontype,))
         else:
             if commontype == result:
                 raise api.FFIError("Unsupported type: %r.  Please file a bug "
@@ -86,8 +89,6 @@
         "ULONGLONG": "unsigned long long",
         "WCHAR": "wchar_t",
         "SHORT": "short",
-        "TBYTE": "WCHAR",
-        "TCHAR": "WCHAR",
         "UCHAR": "unsigned char",
         "UINT": "unsigned int",
         "UINT8": "unsigned char",
@@ -157,14 +158,12 @@
 
         "LPCVOID": model.const_voidp_type,
         "LPCWSTR": "const WCHAR *",
-        "LPCTSTR": "LPCWSTR",
         "LPDWORD": "DWORD *",
         "LPHANDLE": "HANDLE *",
         "LPINT": "int *",
         "LPLONG": "long *",
         "LPSTR": "CHAR *",
         "LPWSTR": "WCHAR *",
-        "LPTSTR": "LPWSTR",
         "LPVOID": model.voidp_type,
         "LPWORD": "WORD *",
         "LRESULT": "LONG_PTR",
@@ -173,7 +172,6 @@
         "PBYTE": "BYTE *",
         "PCHAR": "CHAR *",
         "PCSTR": "const CHAR *",
-        "PCTSTR": "LPCWSTR",
         "PCWSTR": "const WCHAR *",
         "PDWORD": "DWORD *",
         "PDWORDLONG": "DWORDLONG *",
@@ -200,9 +198,6 @@
         "PSIZE_T": "SIZE_T *",
         "PSSIZE_T": "SSIZE_T *",
         "PSTR": "CHAR *",
-        "PTBYTE": "TBYTE *",
-        "PTCHAR": "TCHAR *",
-        "PTSTR": "LPWSTR",
         "PUCHAR": "UCHAR *",
         "PUHALF_PTR": "UHALF_PTR *",
         "PUINT": "UINT *",
@@ -240,6 +235,15 @@
         "USN": "LONGLONG",
         "VOID": model.void_type,
         "WPARAM": "UINT_PTR",
+
+        "TBYTE": "set-unicode-needed",
+        "TCHAR": "set-unicode-needed",
+        "LPCTSTR": "set-unicode-needed",
+        "PCTSTR": "set-unicode-needed",
+        "LPTSTR": "set-unicode-needed",
+        "PTSTR": "set-unicode-needed",
+        "PTBYTE": "set-unicode-needed",
+        "PTCHAR": "set-unicode-needed",
         })
     return result
 
diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py
--- a/lib_pypy/cffi/cparser.py
+++ b/lib_pypy/cffi/cparser.py
@@ -1,4 +1,3 @@
-
 from . import api, model
 from .commontypes import COMMON_TYPES, resolve_common_type
 try:
@@ -209,6 +208,8 @@
 
     def _add_constants(self, key, val):
         if key in self._int_constants:
+            if self._int_constants[key] == val:
+                return     # ignore identical double declarations
             raise api.FFIError(
                 "multiple declarations of constant: %s" % (key,))
         self._int_constants[key] = val
@@ -228,12 +229,18 @@
 
                 pyvalue = int(int_str, 0)
                 self._add_constants(key, pyvalue)
+                self._declare('macro ' + key, pyvalue)
             elif value == '...':
                 self._declare('macro ' + key, value)
             else:
-                raise api.CDefError('only supports the syntax "#define '
-                                    '%s ..." (literally) or "#define '
-                                    '%s 0x1FF" for now' % (key, key))
+                raise api.CDefError(
+                    'only supports one of the following syntax:\n'
+                    '  #define %s ...     (literally dot-dot-dot)\n'
+                    '  #define %s NUMBER  (with NUMBER an integer'
+                                    ' constant, decimal/hex/octal)\n'
+                    'got:\n'
+                    '  #define %s %s'
+                    % (key, key, key, value))
 
     def _parse_decl(self, decl):
         node = decl.type
@@ -460,6 +467,8 @@
             elif kind == 'union':
                 tp = model.UnionType(explicit_name, None, None, None)
             elif kind == 'enum':
+                if explicit_name == '__dotdotdot__':
+                    raise CDefError("Enums cannot be declared with ...")
                 tp = self._build_enum_type(explicit_name, type.values)
             else:
                 raise AssertionError("kind = %r" % (kind,))
@@ -532,9 +541,24 @@
 
     def _parse_constant(self, exprnode, partial_length_ok=False):
         # for now, limited to expressions that are an immediate number
-        # or negative number
+        # or positive/negative number
         if isinstance(exprnode, pycparser.c_ast.Constant):
-            return int(exprnode.value, 0)
+            s = exprnode.value
+            if s.startswith('0'):
+                if s.startswith('0x') or s.startswith('0X'):
+                    return int(s, 16)
+                return int(s, 8)
+            elif '1' <= s[0] <= '9':
+                return int(s, 10)
+            elif s[0] == "'" and s[-1] == "'" and (
+                    len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
+                return ord(s[-2])
+            else:
+                raise api.CDefError("invalid constant %r" % (s,))
+        #
+        if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
+                exprnode.op == '+'):
+            return self._parse_constant(exprnode.expr)
         #
         if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
                 exprnode.op == '-'):
diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py
--- a/lib_pypy/cffi/ffiplatform.py
+++ b/lib_pypy/cffi/ffiplatform.py
@@ -11,6 +11,9 @@
     """
 
 
+LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
+                      'extra_objects', 'depends']
+
 def get_extension(srcfilename, modname, sources=(), **kwds):
     from distutils.core import Extension
     allsources = [srcfilename]
diff --git a/lib_pypy/cffi/model.py b/lib_pypy/cffi/model.py
--- a/lib_pypy/cffi/model.py
+++ b/lib_pypy/cffi/model.py
@@ -235,6 +235,8 @@
         BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
         return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
 
+char_array_type = ArrayType(PrimitiveType('char'), None)
+
 
 class StructOrUnionOrEnum(BaseTypeByIdentity):
     _attrs_ = ('name',)
@@ -478,7 +480,7 @@
     try:
         res = getattr(ffi._backend, funcname)(*args)
     except NotImplementedError as e:
-        raise NotImplementedError("%r: %s" % (srctype, e))
+        raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
     # note that setdefault() on WeakValueDictionary is not atomic
     # and contains a rare bug (http://bugs.python.org/issue19542);
     # we have to use a lock and do it ourselves
diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py
--- a/lib_pypy/cffi/vengine_cpy.py
+++ b/lib_pypy/cffi/vengine_cpy.py
@@ -65,7 +65,7 @@
         # The following two 'chained_list_constants' items contains
         # the head of these two chained lists, as a string that gives the
         # call to do, if any.
-        self._chained_list_constants = ['0', '0']
+        self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)']
         #
         prnt = self._prnt
         # first paste some standard set of lines that are mostly '#define'
@@ -138,15 +138,22 @@
         prnt()
         prnt('#endif')
 
-    def load_library(self):
+    def load_library(self, flags=None):
         # XXX review all usages of 'self' here!
         # import it as a new extension module
+        if hasattr(sys, "getdlopenflags"):
+            previous_flags = sys.getdlopenflags()
         try:
+            if hasattr(sys, "setdlopenflags") and flags is not None:
+                sys.setdlopenflags(flags)
             module = imp.load_dynamic(self.verifier.get_module_name(),
                                       self.verifier.modulefilename)
         except ImportError as e:
             error = "importing %r: %s" % (self.verifier.modulefilename, e)
             raise ffiplatform.VerificationError(error)
+        finally:
+            if hasattr(sys, "setdlopenflags"):
+                sys.setdlopenflags(previous_flags)
         #
         # call loading_cpy_struct() to get the struct layout inferred by
         # the C compiler
@@ -228,7 +235,8 @@
                 converter = '_cffi_to_c_int'
                 extraarg = ', %s' % tp.name
             else:
-                converter = '_cffi_to_c_%s' % (tp.name.replace(' ', '_'),)
+                converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''),
+                                                   tp.name.replace(' ', '_'))
             errvalue = '-1'
         #
         elif isinstance(tp, model.PointerType):
@@ -267,8 +275,8 @@
         self._prnt('  if (datasize != 0) {')
         self._prnt('    if (datasize < 0)')
         self._prnt('      %s;' % errcode)
-        self._prnt('    %s = alloca(datasize);' % (tovar,))
-        self._prnt('    memset((void *)%s, 0, datasize);' % (tovar,))
+        self._prnt('    %s = alloca((size_t)datasize);' % (tovar,))
+        self._prnt('    memset((void *)%s, 0, (size_t)datasize);' % (tovar,))
         self._prnt('    if (_cffi_convert_array_from_object('
                    '(char *)%s, _cffi_type(%d), %s) < 0)' % (
             tovar, self._gettypenum(tp), fromvar))
@@ -336,7 +344,7 @@
         prnt = self._prnt
         numargs = len(tp.args)
         if numargs == 0:
-            argname = 'no_arg'
+            argname = 'noarg'
         elif numargs == 1:
             argname = 'arg0'
         else:
@@ -386,6 +394,9 @@
         prnt('  Py_END_ALLOW_THREADS')
         prnt()
         #
+        prnt('  (void)self; /* unused */')
+        if numargs == 0:
+            prnt('  (void)noarg; /* unused */')
         if result_code:
             prnt('  return %s;' %
                  self._convert_expr_from_c(tp.result, 'result', 'result type'))
@@ -452,6 +463,7 @@
         prnt('static void %s(%s *p)' % (checkfuncname, cname))
         prnt('{')
         prnt('  /* only to generate compile-time warnings or errors */')
+        prnt('  (void)p;')
         for fname, ftype, fbitsize in tp.enumfields():
             if (isinstance(ftype, model.PrimitiveType)
                 and ftype.is_integer_type()) or fbitsize >= 0:
@@ -482,6 +494,8 @@
                 prnt('    sizeof(((%s *)0)->%s),' % (cname, fname))
         prnt('    -1')
         prnt('  };')
+        prnt('  (void)self; /* unused */')
+        prnt('  (void)noarg; /* unused */')
         prnt('  return _cffi_get_struct_layout(nums);')
         prnt('  /* the next line is not executed, but compiled */')
         prnt('  %s(0);' % (checkfuncname,))
@@ -578,7 +592,8 @@
     # constants, likely declared with '#define'
 
     def _generate_cpy_const(self, is_int, name, tp=None, category='const',
-                            vartp=None, delayed=True, size_too=False):
+                            vartp=None, delayed=True, size_too=False,
+                            check_value=None):
         prnt = self._prnt
         funcname = '_cffi_%s_%s' % (category, name)
         prnt('static int %s(PyObject *lib)' % funcname)
@@ -590,6 +605,9 @@
         else:
             assert category == 'const'
         #
+        if check_value is not None:
+            self._check_int_constant_value(name, check_value)
+        #
         if not is_int:
             if category == 'var':
                 realexpr = '&' + name
@@ -637,6 +655,27 @@
     # ----------
     # enums
 
+    def _check_int_constant_value(self, name, value, err_prefix=''):
+        prnt = self._prnt
+        if value <= 0:
+            prnt('  if ((%s) > 0 || (long)(%s) != %dL) {' % (
+                name, name, value))
+        else:
+            prnt('  if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
+                name, name, value))
+        prnt('    char buf[64];')
+        prnt('    if ((%s) <= 0)' % name)
+        prnt('        snprintf(buf, 63, "%%ld", (long)(%s));' % name)
+        prnt('    else')
+        prnt('        snprintf(buf, 63, "%%lu", (unsigned long)(%s));' %
+             name)
+        prnt('    PyErr_Format(_cffi_VerificationError,')
+        prnt('                 "%s%s has the real value %s, not %s",')
+        prnt('                 "%s", "%s", buf, "%d");' % (
+            err_prefix, name, value))
+        prnt('    return -1;')
+        prnt('  }')
+
     def _enum_funcname(self, prefix, name):
         # "$enum_$1" => "___D_enum____D_1"
         name = name.replace('$', '___D_')
@@ -653,25 +692,8 @@
         prnt('static int %s(PyObject *lib)' % funcname)
         prnt('{')
         for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
-            if enumvalue < 0:
-                prnt('  if ((%s) >= 0 || (long)(%s) != %dL) {' % (
-                    enumerator, enumerator, enumvalue))
-            else:
-                prnt('  if ((%s) < 0 || (unsigned long)(%s) != %dUL) {' % (
-                    enumerator, enumerator, enumvalue))
-            prnt('    char buf[64];')
-            prnt('    if ((%s) < 0)' % enumerator)
-            prnt('        snprintf(buf, 63, "%%ld", (long)(%s));' % enumerator)
-            prnt('    else')
-            prnt('        snprintf(buf, 63, "%%lu", (unsigned long)(%s));' %
-                 enumerator)
-            prnt('    PyErr_Format(_cffi_VerificationError,')
-            prnt('                 "enum %s: %s has the real value %s, '
-                 'not %s",')
-            prnt('                 "%s", "%s", buf, "%d");' % (
-                name, enumerator, enumvalue))
-            prnt('    return -1;')
-            prnt('  }')
+            self._check_int_constant_value(enumerator, enumvalue,
+                                           "enum %s: " % name)
         prnt('  return %s;' % self._chained_list_constants[True])
         self._chained_list_constants[True] = funcname + '(lib)'
         prnt('}')
@@ -695,8 +717,11 @@
     # macros: for now only for integers
 
     def _generate_cpy_macro_decl(self, tp, name):
-        assert tp == '...'
-        self._generate_cpy_const(True, name)
+        if tp == '...':
+            check_value = None
+        else:
+            check_value = tp     # an integer
+        self._generate_cpy_const(True, name, check_value=check_value)
 
     _generate_cpy_macro_collecttype = _generate_nothing
     _generate_cpy_macro_method = _generate_nothing
@@ -783,6 +808,24 @@
    typedef unsigned __int16 uint16_t;
    typedef unsigned __int32 uint32_t;
    typedef unsigned __int64 uint64_t;
+   typedef __int8 int_least8_t;
+   typedef __int16 int_least16_t;
+   typedef __int32 int_least32_t;
+   typedef __int64 int_least64_t;
+   typedef unsigned __int8 uint_least8_t;
+   typedef unsigned __int16 uint_least16_t;
+   typedef unsigned __int32 uint_least32_t;
+   typedef unsigned __int64 uint_least64_t;
+   typedef __int8 int_fast8_t;
+   typedef __int16 int_fast16_t;
+   typedef __int32 int_fast32_t;
+   typedef __int64 int_fast64_t;
+   typedef unsigned __int8 uint_fast8_t;
+   typedef unsigned __int16 uint_fast16_t;
+   typedef unsigned __int32 uint_fast32_t;
+   typedef unsigned __int64 uint_fast64_t;
+   typedef __int64 intmax_t;
+   typedef unsigned __int64 uintmax_t;
 # else
 #  include <stdint.h>
 # endif
@@ -828,12 +871,15 @@
             PyLong_FromLongLong((long long)(x)))
 
 #define _cffi_from_c_int(x, type)                                        \
-    (((type)-1) > 0 ?   /* unsigned */                                   \
-        (sizeof(type) < sizeof(long) ? PyInt_FromLong(x) :               \
-         sizeof(type) == sizeof(long) ? PyLong_FromUnsignedLong(x) :     \
-                                        PyLong_FromUnsignedLongLong(x))  \
-      : (sizeof(type) <= sizeof(long) ? PyInt_FromLong(x) :              \
-                                        PyLong_FromLongLong(x)))
+    (((type)-1) > 0 ? /* unsigned */                                     \
+        (sizeof(type) < sizeof(long) ?                                   \
+            PyInt_FromLong((long)x) :                                    \
+         sizeof(type) == sizeof(long) ?                                  \
+            PyLong_FromUnsignedLong((unsigned long)x) :                  \
+            PyLong_FromUnsignedLongLong((unsigned long long)x)) :        \
+        (sizeof(type) <= sizeof(long) ?                                  \
+            PyInt_FromLong((long)x) :                                    \
+            PyLong_FromLongLong((long long)x)))
 
 #define _cffi_to_c_int(o, type)                                          \
     (sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o)        \
@@ -844,7 +890,7 @@
                                          : (type)_cffi_to_c_i32(o)) :    \
      sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o)       \
                                          : (type)_cffi_to_c_i64(o)) :    \
-     (Py_FatalError("unsupported size for type " #type), 0))
+     (Py_FatalError("unsupported size for type " #type), (type)0))
 
 #define _cffi_to_c_i8                                                    \
                  ((int(*)(PyObject *))_cffi_exports[1])
@@ -907,6 +953,7 @@
 {
     PyObject *library;
     int was_alive = (_cffi_types != NULL);
+    (void)self; /* unused */
     if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError,
                                        &library))
         return NULL;
diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py
--- a/lib_pypy/cffi/vengine_gen.py
+++ b/lib_pypy/cffi/vengine_gen.py
@@ -58,12 +58,12 @@
             modname = self.verifier.get_module_name()
             prnt("void %s%s(void) { }\n" % (prefix, modname))
 
-    def load_library(self):
+    def load_library(self, flags=0):
         # import it with the CFFI backend
         backend = self.ffi._backend
         # needs to make a path that contains '/', on Posix
         filename = os.path.join(os.curdir, self.verifier.modulefilename)
-        module = backend.load_library(filename)
+        module = backend.load_library(filename, flags)
         #
         # call loading_gen_struct() to get the struct layout inferred by
         # the C compiler
@@ -235,6 +235,7 @@
         prnt('static void %s(%s *p)' % (checkfuncname, cname))
         prnt('{')
         prnt('  /* only to generate compile-time warnings or errors */')
+        prnt('  (void)p;')
         for fname, ftype, fbitsize in tp.enumfields():
             if (isinstance(ftype, model.PrimitiveType)
                 and ftype.is_integer_type()) or fbitsize >= 0:
@@ -354,11 +355,20 @@
     # ----------
     # constants, likely declared with '#define'
 
-    def _generate_gen_const(self, is_int, name, tp=None, category='const'):
+    def _generate_gen_const(self, is_int, name, tp=None, category='const',
+                            check_value=None):
         prnt = self._prnt
         funcname = '_cffi_%s_%s' % (category, name)
         self.export_symbols.append(funcname)
-        if is_int:
+        if check_value is not None:
+            assert is_int
+            assert category == 'const'
+            prnt('int %s(char *out_error)' % funcname)
+            prnt('{')
+            self._check_int_constant_value(name, check_value)
+            prnt('  return 0;')
+            prnt('}')
+        elif is_int:
             assert category == 'const'
             prnt('int %s(long long *out_value)' % funcname)
             prnt('{')
@@ -367,6 +377,7 @@
             prnt('}')
         else:
             assert tp is not None
+            assert check_value is None
             prnt(tp.get_c_name(' %s(void)' % funcname, name),)
             prnt('{')
             if category == 'var':
@@ -383,9 +394,13 @@
 
     _loading_gen_constant = _loaded_noop
 
-    def _load_constant(self, is_int, tp, name, module):
+    def _load_constant(self, is_int, tp, name, module, check_value=None):
         funcname = '_cffi_const_%s' % name
-        if is_int:
+        if check_value is not None:
+            assert is_int
+            self._load_known_int_constant(module, funcname)
+            value = check_value
+        elif is_int:
             BType = self.ffi._typeof_locked("long long*")[0]
             BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
             function = module.load_function(BFunc, funcname)
@@ -396,6 +411,7 @@
                 BLongLong = self.ffi._typeof_locked("long long")[0]
                 value += (1 << (8*self.ffi.sizeof(BLongLong)))
         else:
+            assert check_value is None
             BFunc = self.ffi._typeof_locked(tp.get_c_name('(*)(void)', name))[0]
             function = module.load_function(BFunc, funcname)
             value = function()
@@ -410,6 +426,36 @@
     # ----------
     # enums
 
+    def _check_int_constant_value(self, name, value):
+        prnt = self._prnt
+        if value <= 0:
+            prnt('  if ((%s) > 0 || (long)(%s) != %dL) {' % (
+                name, name, value))
+        else:
+            prnt('  if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
+                name, name, value))
+        prnt('    char buf[64];')
+        prnt('    if ((%s) <= 0)' % name)
+        prnt('        sprintf(buf, "%%ld", (long)(%s));' % name)
+        prnt('    else')
+        prnt('        sprintf(buf, "%%lu", (unsigned long)(%s));' %
+             name)
+        prnt('    sprintf(out_error, "%s has the real value %s, not %s",')
+        prnt('            "%s", buf, "%d");' % (name[:100], value))
+        prnt('    return -1;')
+        prnt('  }')
+
+    def _load_known_int_constant(self, module, funcname):
+        BType = self.ffi._typeof_locked("char[]")[0]
+        BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
+        function = module.load_function(BFunc, funcname)
+        p = self.ffi.new(BType, 256)
+        if function(p) < 0:
+            error = self.ffi.string(p)
+            if sys.version_info >= (3,):
+                error = str(error, 'utf-8')
+            raise ffiplatform.VerificationError(error)
+
     def _enum_funcname(self, prefix, name):
         # "$enum_$1" => "___D_enum____D_1"
         name = name.replace('$', '___D_')
@@ -427,24 +473,7 @@
         prnt('int %s(char *out_error)' % funcname)
         prnt('{')
         for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
-            if enumvalue < 0:
-                prnt('  if ((%s) >= 0 || (long)(%s) != %dL) {' % (
-                    enumerator, enumerator, enumvalue))
-            else:
-                prnt('  if ((%s) < 0 || (unsigned long)(%s) != %dUL) {' % (
-                    enumerator, enumerator, enumvalue))
-            prnt('    char buf[64];')
-            prnt('    if ((%s) < 0)' % enumerator)
-            prnt('        sprintf(buf, "%%ld", (long)(%s));' % enumerator)
-            prnt('    else')
-            prnt('        sprintf(buf, "%%lu", (unsigned long)(%s));' %
-                 enumerator)
-            prnt('    sprintf(out_error,'
-                             ' "%s has the real value %s, not %s",')
-            prnt('            "%s", buf, "%d");' % (
-                enumerator[:100], enumvalue))
-            prnt('    return -1;')
-            prnt('  }')
+            self._check_int_constant_value(enumerator, enumvalue)
         prnt('  return 0;')
         prnt('}')
         prnt()
@@ -456,16 +485,8 @@
             tp.enumvalues = tuple(enumvalues)
             tp.partial_resolved = True
         else:
-            BType = self.ffi._typeof_locked("char[]")[0]
-            BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
             funcname = self._enum_funcname(prefix, name)
-            function = module.load_function(BFunc, funcname)
-            p = self.ffi.new(BType, 256)
-            if function(p) < 0:
-                error = self.ffi.string(p)
-                if sys.version_info >= (3,):
-                    error = str(error, 'utf-8')
-                raise ffiplatform.VerificationError(error)
+            self._load_known_int_constant(module, funcname)
 
     def _loaded_gen_enum(self, tp, name, module, library):
         for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
@@ -476,13 +497,21 @@
     # macros: for now only for integers
 
     def _generate_gen_macro_decl(self, tp, name):
-        assert tp == '...'
-        self._generate_gen_const(True, name)
+        if tp == '...':
+            check_value = None
+        else:
+            check_value = tp     # an integer
+        self._generate_gen_const(True, name, check_value=check_value)
 
     _loading_gen_macro = _loaded_noop
 
     def _loaded_gen_macro(self, tp, name, module, library):
-        value = self._load_constant(True, tp, name, module)
+        if tp == '...':
+            check_value = None
+        else:
+            check_value = tp     # an integer
+        value = self._load_constant(True, tp, name, module,
+                                    check_value=check_value)
         setattr(library, name, value)
         type(library)._cffi_dir.append(name)
 
@@ -565,6 +594,24 @@
    typedef unsigned __int16 uint16_t;
    typedef unsigned __int32 uint32_t;
    typedef unsigned __int64 uint64_t;
+   typedef __int8 int_least8_t;
+   typedef __int16 int_least16_t;
+   typedef __int32 int_least32_t;
+   typedef __int64 int_least64_t;
+   typedef unsigned __int8 uint_least8_t;
+   typedef unsigned __int16 uint_least16_t;
+   typedef unsigned __int32 uint_least32_t;
+   typedef unsigned __int64 uint_least64_t;
+   typedef __int8 int_fast8_t;
+   typedef __int16 int_fast16_t;
+   typedef __int32 int_fast32_t;
+   typedef __int64 int_fast64_t;
+   typedef unsigned __int8 uint_fast8_t;
+   typedef unsigned __int16 uint_fast16_t;
+   typedef unsigned __int32 uint_fast32_t;
+   typedef unsigned __int64 uint_fast64_t;
+   typedef __int64 intmax_t;
+   typedef unsigned __int64 uintmax_t;
 # else
 #  include <stdint.h>
 # endif
diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py
--- a/lib_pypy/cffi/verifier.py
+++ b/lib_pypy/cffi/verifier.py
@@ -1,12 +1,23 @@
-import sys, os, binascii, imp, shutil
-from . import __version__
+import sys, os, binascii, shutil
+from . import __version_verifier_modules__
 from . import ffiplatform
 
+if sys.version_info >= (3, 3):
+    import importlib.machinery
+    def _extension_suffixes():
+        return importlib.machinery.EXTENSION_SUFFIXES[:]
+else:
+    import imp
+    def _extension_suffixes():
+        return [suffix for suffix, _, type in imp.get_suffixes()
+                if type == imp.C_EXTENSION]
+
 
 class Verifier(object):
 
     def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
-                 ext_package=None, tag='', force_generic_engine=False, **kwds):
+                 ext_package=None, tag='', force_generic_engine=False,
+                 source_extension='.c', flags=None, relative_to=None, **kwds):
         self.ffi = ffi
         self.preamble = preamble
         if not modulename:
@@ -14,14 +25,15 @@
         vengine_class = _locate_engine_class(ffi, force_generic_engine)
         self._vengine = vengine_class(self)
         self._vengine.patch_extension_kwds(kwds)
-        self.kwds = kwds
+        self.flags = flags
+        self.kwds = self.make_relative_to(kwds, relative_to)
         #
         if modulename:
             if tag:
                 raise TypeError("can't specify both 'modulename' and 'tag'")
         else:
-            key = '\x00'.join([sys.version[:3], __version__, preamble,
-                               flattened_kwds] +
+            key = '\x00'.join([sys.version[:3], __version_verifier_modules__,
+                               preamble, flattened_kwds] +
                               ffi._cdefsources)
             if sys.version_info >= (3,):
                 key = key.encode('utf-8')
@@ -33,7 +45,7 @@
                                               k1, k2)
         suffix = _get_so_suffixes()[0]
         self.tmpdir = tmpdir or _caller_dir_pycache()
-        self.sourcefilename = os.path.join(self.tmpdir, modulename + '.c')
+        self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
         self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
         self.ext_package = ext_package
         self._has_source = False
@@ -97,6 +109,20 @@
     def generates_python_module(self):
         return self._vengine._gen_python_module
 
+    def make_relative_to(self, kwds, relative_to):
+        if relative_to and os.path.dirname(relative_to):
+            dirname = os.path.dirname(relative_to)
+            kwds = kwds.copy()
+            for key in ffiplatform.LIST_OF_FILE_NAMES:
+                if key in kwds:
+                    lst = kwds[key]
+                    if not isinstance(lst, (list, tuple)):
+                        raise TypeError("keyword '%s' should be a list or tuple"
+                                        % (key,))
+                    lst = [os.path.join(dirname, fn) for fn in lst]
+                    kwds[key] = lst
+        return kwds
+
     # ----------
 
     def _locate_module(self):
@@ -148,7 +174,10 @@
 
     def _load_library(self):
         assert self._has_module
-        return self._vengine.load_library()
+        if self.flags is not None:
+            return self._vengine.load_library(self.flags)
+        else:
+            return self._vengine.load_library()
 
 # ____________________________________________________________
 
@@ -181,6 +210,9 @@
 def _caller_dir_pycache():
     if _TMPDIR:
         return _TMPDIR
+    result = os.environ.get('CFFI_TMPDIR')
+    if result:
+        return result
     filename = sys._getframe(2).f_code.co_filename
     return os.path.abspath(os.path.join(os.path.dirname(filename),
                            '__pycache__'))
@@ -222,11 +254,7 @@
             pass
 
 def _get_so_suffixes():
-    suffixes = []
-    for suffix, mode, type in imp.get_suffixes():
-        if type == imp.C_EXTENSION:
-            suffixes.append(suffix)
-
+    suffixes = _extension_suffixes()
     if not suffixes:
         # bah, no C_EXTENSION available.  Occurs on pypy without cpyext
         if sys.platform == 'win32':
diff --git a/lib_pypy/greenlet.egg-info b/lib_pypy/greenlet.egg-info
--- a/lib_pypy/greenlet.egg-info
+++ b/lib_pypy/greenlet.egg-info
@@ -1,6 +1,6 @@
 Metadata-Version: 1.0
 Name: greenlet
-Version: 0.4.0
+Version: 0.4.5
 Summary: Lightweight in-process concurrent programming
 Home-page: https://github.com/python-greenlet/greenlet
 Author: Ralf Schmitt (for CPython), PyPy team
diff --git a/lib_pypy/greenlet.py b/lib_pypy/greenlet.py
--- a/lib_pypy/greenlet.py
+++ b/lib_pypy/greenlet.py
@@ -1,7 +1,7 @@
 import sys
 import _continuation
 
-__version__ = "0.4.0"
+__version__ = "0.4.5"
 
 # ____________________________________________________________
 # Exceptions
diff --git a/lib_pypy/pypy_test/test_transaction.py b/lib_pypy/pypy_test/test_transaction.py
--- a/lib_pypy/pypy_test/test_transaction.py
+++ b/lib_pypy/pypy_test/test_transaction.py
@@ -8,9 +8,10 @@
 def test_simple_random_order():
     for x in range(N):
         lst = []
-        with transaction.TransactionQueue():
-            for i in range(10):
-                transaction.add(lst.append, i)
+        tq = transaction.TransactionQueue()
+        for i in range(10):
+            tq.add(lst.append, i)
+        tq.run()
         if VERBOSE:
             print lst
         assert sorted(lst) == range(10), lst
@@ -22,9 +23,10 @@
             lst.append(i)
             i += 1
             if i < 10:
-                transaction.add(do_stuff, i)
-        with transaction.TransactionQueue():
-            transaction.add(do_stuff, 0)
+                tq.add(do_stuff, i)
+        tq = transaction.TransactionQueue()
+        tq.add(do_stuff, 0)
+        tq.run()
         if VERBOSE:
             print lst
         assert lst == range(10), lst
@@ -36,10 +38,11 @@
             lsts[i].append(j)
             j += 1
             if j < 10:
-                transaction.add(do_stuff, i, j)
-        with transaction.TransactionQueue():
-            for i in range(5):
-                transaction.add(do_stuff, i, 0)
+                tq.add(do_stuff, i, j)
+        tq = transaction.TransactionQueue()
+        for i in range(5):
+            tq.add(do_stuff, i, 0)
+        tq.run()
         if VERBOSE:
             print lsts
         assert lsts == (range(10),) * 5, lsts
@@ -53,14 +56,15 @@
             lsts[i].append(j)
             j += 1
             if j < 5:
-                transaction.add(do_stuff, i, j)
+                tq.add(do_stuff, i, j)
             else:
                 lsts[i].append('foo')
                 raise FooError
+        tq = transaction.TransactionQueue()
+        for i in range(10):
+            tq.add(do_stuff, i, 0)
         try:
-            with transaction.TransactionQueue():
-                for i in range(10):
-                    transaction.add(do_stuff, i, 0)
+            tq.run()
         except FooError:
             pass
         else:
@@ -78,19 +82,74 @@
 
 
 def test_number_of_transactions_reported():
-    py.test.skip("not reimplemented")
-    with transaction.TransactionQueue():
-        transaction.add(lambda: None)
-    assert transaction.number_of_transactions_in_last_run() == 1
+    tq = transaction.TransactionQueue()
+    tq.add(lambda: None)
+    tq.add(lambda: None)
+    tq.run()
+    assert tq.number_of_transactions_executed() == 2
+
+    tq.run()
+    assert tq.number_of_transactions_executed() == 2
+
+    tq.add(lambda: None)
+    tq.run()
+    assert tq.number_of_transactions_executed() == 3
+
+    tq.add(lambda: some_name_that_is_not_defined)
+    try:
+        tq.run()
+    except NameError:
+        pass
+    else:
+        raise AssertionError("should have raised NameError")
+    assert tq.number_of_transactions_executed() == 4
+
+    tq.add(tq.number_of_transactions_executed)
+    try:
+        tq.run()
+    except transaction.TransactionError:
+        pass
+    else:
+        raise AssertionError("should have raised TransactionError")
 
     def add_transactions(l):
         if l:
             for x in range(l[0]):
-                transaction.add(add_transactions, l[1:])
+                tq.add(add_transactions, l[1:])
 
-    with transaction.TransactionQueue():
-        transaction.add(add_transactions, [10, 10, 10])
-    assert transaction.number_of_transactions_in_last_run() == 1111
+    tq = transaction.TransactionQueue()
+    tq.add(add_transactions, [10, 10, 10])
+    tq.run()
+    assert tq.number_of_transactions_executed() == 1111
+
+def test_unexecuted_transactions_after_exception():
+    class FooError(Exception):
+        pass
+    class BarError(Exception):
+        pass
+    def raiseme(exc):
+        raise exc
+    seen = []
+    tq = transaction.TransactionQueue()
+    tq.add(raiseme, FooError)
+    tq.add(raiseme, BarError)
+    tq.add(seen.append, 42)
+    tq.add(seen.append, 42)
+    try:
+        tq.run()
+    except (FooError, BarError), e:
+        seen_exc = e.__class__
+    else:
+        raise AssertionError("should have raised FooError or BarError")
+    try:
+        tq.run()
+    except (FooError, BarError), e:
+        assert e.__class__ != seen_exc
+    else:
+        raise AssertionError("unexecuted transactions have disappeared")
+    for i in range(2):
+        tq.run()
+        assert seen == [42, 42]
 
 
 def test_stmidset():
diff --git a/lib_pypy/readline.py b/lib_pypy/readline.py
--- a/lib_pypy/readline.py
+++ b/lib_pypy/readline.py
@@ -6,4 +6,11 @@
 are only stubs at the moment.
 """
 
-from pyrepl.readline import *
+try:
+    from pyrepl.readline import *
+except ImportError:
+    import sys
+    if sys.platform == 'win32':
+        raise ImportError("the 'readline' module is not available on Windows"
+                          " (on either PyPy or CPython)")
+    raise
diff --git a/lib_pypy/transaction.py b/lib_pypy/transaction.py
--- a/lib_pypy/transaction.py
+++ b/lib_pypy/transaction.py
@@ -108,106 +108,103 @@
     pass
 
 
-def add(f, *args, **kwds):
-    """Register a new transaction that will be done by 'f(*args, **kwds)'.
-    Must be called within the transaction in the "with TransactionQueue()"
-    block, or within a transaction started by this one, directly or
-    indirectly.
-    """
-    _thread_local.pending.append((f, args, kwds))
+class TransactionQueue(object):
+    """A queue of pending transactions.
 
-


More information about the pypy-commit mailing list