[pypy-commit] pypy cpyext-jit: hg merge default
antocuni
pypy.commits at gmail.com
Fri Sep 29 13:48:36 EDT 2017
Author: Antonio Cuni <anto.cuni at gmail.com>
Branch: cpyext-jit
Changeset: r92513:820724c1b021
Date: 2017-09-29 19:48 +0200
http://bitbucket.org/pypy/pypy/changeset/820724c1b021/
Log: hg merge default
diff too long, truncating to 2000 out of 20222 lines
diff --git a/.hgignore b/.hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -25,16 +25,17 @@
^pypy/module/cpyext/test/.+\.manifest$
^pypy/module/test_lib_pypy/ctypes_tests/.+\.o$
^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$
-^pypy/module/cppyy/src/.+\.o$
-^pypy/module/cppyy/bench/.+\.so$
-^pypy/module/cppyy/bench/.+\.root$
-^pypy/module/cppyy/bench/.+\.d$
-^pypy/module/cppyy/src/.+\.errors$
-^pypy/module/cppyy/test/.+_rflx\.cpp$
-^pypy/module/cppyy/test/.+\.so$
-^pypy/module/cppyy/test/.+\.rootmap$
-^pypy/module/cppyy/test/.+\.exe$
-^pypy/module/cppyy/test/.+_cint.h$
+^pypy/module/_cppyy/src/.+\.o$
+^pypy/module/_cppyy/bench/.+\.so$
+^pypy/module/_cppyy/bench/.+\.root$
+^pypy/module/_cppyy/bench/.+\.d$
+^pypy/module/_cppyy/src/.+\.errors$
+^pypy/module/_cppyy/test/.+_rflx\.cpp$
+^pypy/module/_cppyy/test/.+\.so$
+^pypy/module/_cppyy/test/.+\.rootmap$
+^pypy/module/_cppyy/test/.+\.exe$
+^pypy/module/_cppyy/test/.+_cint.h$
+^pypy/module/_cppyy/.+/*\.pcm$
^pypy/module/test_lib_pypy/cffi_tests/__pycache__.+$
^pypy/doc/.+\.html$
^pypy/doc/config/.+\.rst$
@@ -88,6 +89,3 @@
^release/
^rpython/_cache$
-pypy/module/cppyy/.+/*\.pcm
-
-
diff --git a/LICENSE b/LICENSE
--- a/LICENSE
+++ b/LICENSE
@@ -60,8 +60,8 @@
Wim Lavrijsen
Eric van Riet Paap
Richard Emslie
+ Remi Meier
Alexander Schremmer
- Remi Meier
Dan Villiom Podlaski Christiansen
Lukas Diekmann
Sven Hager
@@ -102,6 +102,7 @@
Michael Foord
Stephan Diehl
Stefano Rivera
+ Jean-Paul Calderone
Stefan Schwarzer
Tomek Meka
Valentino Volonghi
@@ -110,14 +111,13 @@
Bob Ippolito
Bruno Gola
David Malcolm
- Jean-Paul Calderone
Squeaky
Edd Barrett
Timo Paulssen
Marius Gedminas
+ Nicolas Truessel
Alexandre Fayolle
Simon Burton
- Nicolas Truessel
Martin Matusiak
Laurence Tratt
Wenzhu Man
@@ -156,6 +156,7 @@
Stefan H. Muller
Tim Felgentreff
Eugene Oden
+ Dodan Mihai
Jeff Terrace
Henry Mason
Vasily Kuznetsov
@@ -182,11 +183,13 @@
Rocco Moretti
Gintautas Miliauskas
Lucian Branescu Mihaila
+ Mariano Anaya
anatoly techtonik
- Dodan Mihai
Karl Bartel
+ Stefan Beyer
Gabriel Lavoie
Jared Grubb
+ Alecsandru Patrascu
Olivier Dormond
Wouter van Heyst
Sebastian Pawluś
@@ -194,6 +197,7 @@
Victor Stinner
Andrews Medina
Aaron Iles
+ p_zieschang at yahoo.de
Toby Watson
Daniel Patrick
Stuart Williams
@@ -204,6 +208,7 @@
Michael Cheng
Mikael Schönenberg
Stanislaw Halik
+ Mihnea Saracin
Berkin Ilbeyi
Gasper Zejn
Faye Zhao
@@ -214,14 +219,12 @@
Jonathan David Riehl
Beatrice During
Alex Perry
- p_zieschang at yahoo.de
Robert Zaremba
Alan McIntyre
Alexander Sedov
Vaibhav Sood
Reuben Cummings
Attila Gobi
- Alecsandru Patrascu
Christopher Pope
Tristan Arthur
Christian Tismer
@@ -243,7 +246,6 @@
Jacek Generowicz
Sylvain Thenault
Jakub Stasiak
- Stefan Beyer
Andrew Dalke
Alejandro J. Cura
Vladimir Kryachko
@@ -275,6 +277,7 @@
Christoph Gerum
Miguel de Val Borro
Artur Lisiecki
+ afteryu
Toni Mattis
Laurens Van Houtven
Bobby Impollonia
@@ -305,6 +308,7 @@
Anna Katrina Dominguez
Kim Jin Su
Amber Brown
+ Anthony Sottile
Nate Bragg
Ben Darnell
Juan Francisco Cantero Hurtado
@@ -325,12 +329,14 @@
Mike Bayer
Rodrigo Araújo
Daniil Yarancev
+ Min RK
OlivierBlanvillain
Jonas Pfannschmidt
Zearin
Andrey Churin
Dan Crosta
reubano at gmail.com
+ Stanisław Halik
Julien Phalip
Roman Podoliaka
Eli Stevens
diff --git a/Makefile b/Makefile
--- a/Makefile
+++ b/Makefile
@@ -10,7 +10,7 @@
RUNINTERP = $(PYPY_EXECUTABLE)
endif
-.PHONY: cffi_imports
+.PHONY: pypy-c cffi_imports
pypy-c:
@echo
@@ -32,7 +32,7 @@
@echo "===================================================================="
@echo
@sleep 5
- $(RUNINTERP) rpython/bin/rpython -Ojit pypy/goal/targetpypystandalone.py
+ cd pypy/goal && $(RUNINTERP) ../../rpython/bin/rpython -Ojit targetpypystandalone.py
# Note: the -jN option, or MAKEFLAGS=-jN, are not usable. They are
# replaced with an opaque --jobserver option by the time this Makefile
@@ -40,4 +40,4 @@
# http://lists.gnu.org/archive/html/help-make/2010-08/msg00106.html
cffi_imports: pypy-c
- PYTHONPATH=. ./pypy-c pypy/tool/build_cffi_imports.py || /bin/true
+ PYTHONPATH=. pypy/goal/pypy-c pypy/tool/build_cffi_imports.py || /bin/true
diff --git a/lib-python/2.7/ctypes/__init__.py b/lib-python/2.7/ctypes/__init__.py
--- a/lib-python/2.7/ctypes/__init__.py
+++ b/lib-python/2.7/ctypes/__init__.py
@@ -361,17 +361,20 @@
if handle is None:
if flags & _FUNCFLAG_CDECL:
- self._handle = _ffi.CDLL(name, mode)
+ pypy_dll = _ffi.CDLL(name, mode)
else:
- self._handle = _ffi.WinDLL(name, mode)
- else:
- self._handle = handle
+ pypy_dll = _ffi.WinDLL(name, mode)
+ self.__pypy_dll__ = pypy_dll
+ handle = int(pypy_dll)
+ if _sys.maxint > 2 ** 32:
+ handle = int(handle) # long -> int
+ self._handle = handle
def __repr__(self):
- return "<%s '%s', handle %r at 0x%x>" % (
- self.__class__.__name__, self._name, self._handle,
- id(self) & (_sys.maxint * 2 + 1))
-
+ return "<%s '%s', handle %x at %x>" % \
+ (self.__class__.__name__, self._name,
+ (self._handle & (_sys.maxint*2 + 1)),
+ id(self) & (_sys.maxint*2 + 1))
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
diff --git a/lib-python/2.7/ctypes/test/test_byteswap.py b/lib-python/2.7/ctypes/test/test_byteswap.py
--- a/lib-python/2.7/ctypes/test/test_byteswap.py
+++ b/lib-python/2.7/ctypes/test/test_byteswap.py
@@ -23,7 +23,6 @@
setattr(bits, "i%s" % i, 1)
dump(bits)
- @xfail
def test_endian_short(self):
if sys.byteorder == "little":
self.assertIs(c_short.__ctype_le__, c_short)
@@ -51,7 +50,6 @@
self.assertEqual(bin(s), "3412")
self.assertEqual(s.value, 0x1234)
- @xfail
def test_endian_int(self):
if sys.byteorder == "little":
self.assertIs(c_int.__ctype_le__, c_int)
@@ -80,7 +78,6 @@
self.assertEqual(bin(s), "78563412")
self.assertEqual(s.value, 0x12345678)
- @xfail
def test_endian_longlong(self):
if sys.byteorder == "little":
self.assertIs(c_longlong.__ctype_le__, c_longlong)
@@ -109,7 +106,6 @@
self.assertEqual(bin(s), "EFCDAB9078563412")
self.assertEqual(s.value, 0x1234567890ABCDEF)
- @xfail
def test_endian_float(self):
if sys.byteorder == "little":
self.assertIs(c_float.__ctype_le__, c_float)
@@ -128,7 +124,6 @@
self.assertAlmostEqual(s.value, math.pi, 6)
self.assertEqual(bin(struct.pack(">f", math.pi)), bin(s))
- @xfail
def test_endian_double(self):
if sys.byteorder == "little":
self.assertIs(c_double.__ctype_le__, c_double)
@@ -156,7 +151,6 @@
self.assertIs(c_char.__ctype_le__, c_char)
self.assertIs(c_char.__ctype_be__, c_char)
- @xfail
def test_struct_fields_1(self):
if sys.byteorder == "little":
base = BigEndianStructure
@@ -192,7 +186,6 @@
pass
self.assertRaises(TypeError, setattr, T, "_fields_", [("x", typ)])
- @xfail
def test_struct_struct(self):
# nested structures with different byteorders
@@ -221,7 +214,6 @@
self.assertEqual(s.point.x, 1)
self.assertEqual(s.point.y, 2)
- @xfail
def test_struct_fields_2(self):
# standard packing in struct uses no alignment.
# So, we have to align using pad bytes.
@@ -245,7 +237,6 @@
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
- @xfail
def test_unaligned_nonnative_struct_fields(self):
if sys.byteorder == "little":
base = BigEndianStructure
diff --git a/lib-python/2.7/ctypes/test/test_unaligned_structures.py b/lib-python/2.7/ctypes/test/test_unaligned_structures.py
--- a/lib-python/2.7/ctypes/test/test_unaligned_structures.py
+++ b/lib-python/2.7/ctypes/test/test_unaligned_structures.py
@@ -37,10 +37,7 @@
for typ in byteswapped_structures:
## print >> sys.stderr, typ.value
self.assertEqual(typ.value.offset, 1)
- try:
- o = typ()
- except NotImplementedError as e:
- self.skipTest(str(e)) # for PyPy
+ o = typ()
o.value = 4
self.assertEqual(o.value, 4)
diff --git a/lib-python/2.7/distutils/sysconfig_pypy.py b/lib-python/2.7/distutils/sysconfig_pypy.py
--- a/lib-python/2.7/distutils/sysconfig_pypy.py
+++ b/lib-python/2.7/distutils/sysconfig_pypy.py
@@ -218,6 +218,10 @@
compiler.shared_lib_extension = so_ext
+def get_config_h_filename():
+ """Returns the path of pyconfig.h."""
+ inc_dir = get_python_inc(plat_specific=1)
+ return os.path.join(inc_dir, 'pyconfig.h')
from sysconfig_cpython import (
parse_makefile, _variable_rx, expand_makefile_vars)
diff --git a/lib-python/2.7/distutils/unixccompiler.py b/lib-python/2.7/distutils/unixccompiler.py
--- a/lib-python/2.7/distutils/unixccompiler.py
+++ b/lib-python/2.7/distutils/unixccompiler.py
@@ -226,7 +226,19 @@
return "-L" + dir
def _is_gcc(self, compiler_name):
- return "gcc" in compiler_name or "g++" in compiler_name
+ # XXX PyPy workaround, look at the big comment below for more
+ # context. On CPython, the hack below works fine because
+ # `compiler_name` contains the name of the actual compiler which was
+ # used at compile time (e.g. 'x86_64-linux-gnu-gcc' on my machine).
+ # PyPy hardcodes it to 'cc', so the hack doesn't work, and the end
+ # result is that we pass the wrong option to the compiler.
+ #
+ # The workaround is to *always* pretend to be GCC if we are on Linux:
+ # this should cover the vast majority of real systems, including the
+ # ones which use clang (which understands the '-Wl,-rpath' syntax as
+ # well)
+ return (sys.platform == "linux2" or
+ "gcc" in compiler_name or "g++" in compiler_name)
def runtime_library_dir_option(self, dir):
# XXX Hackish, at the very least. See Python bug #445902:
diff --git a/lib-python/2.7/inspect.py b/lib-python/2.7/inspect.py
--- a/lib-python/2.7/inspect.py
+++ b/lib-python/2.7/inspect.py
@@ -203,7 +203,7 @@
f_locals local namespace seen by this frame
f_restricted 0 or 1 if frame is in restricted execution mode
f_trace tracing function for this frame, or None"""
- return isinstance(object, types.FrameType)
+ return isinstance(object, (types.FrameType, types.FakeFrameType))
def iscode(object):
"""Return true if the object is a code object.
diff --git a/lib-python/2.7/multiprocessing/heap.py b/lib-python/2.7/multiprocessing/heap.py
--- a/lib-python/2.7/multiprocessing/heap.py
+++ b/lib-python/2.7/multiprocessing/heap.py
@@ -62,7 +62,7 @@
self.size = size
self.name = 'pym-%d-%d' % (os.getpid(), Arena._counter.next())
self.buffer = mmap.mmap(-1, self.size, tagname=self.name)
- assert win32.GetLastError() == 0, 'tagname already in use'
+ #assert win32.GetLastError() == 0, 'tagname already in use'
self._state = (self.size, self.name)
def __getstate__(self):
@@ -72,7 +72,7 @@
def __setstate__(self, state):
self.size, self.name = self._state = state
self.buffer = mmap.mmap(-1, self.size, tagname=self.name)
- assert win32.GetLastError() == win32.ERROR_ALREADY_EXISTS
+ #assert win32.GetLastError() == win32.ERROR_ALREADY_EXISTS
else:
diff --git a/lib-python/2.7/string.py b/lib-python/2.7/string.py
--- a/lib-python/2.7/string.py
+++ b/lib-python/2.7/string.py
@@ -75,7 +75,7 @@
for i in range(256):
buf[i] = i
for i in range(n):
- buf[ord(fromstr[i])] = tostr[i]
+ buf[ord(fromstr[i])] = ord(tostr[i])
return str(buf)
diff --git a/lib-python/2.7/types.py b/lib-python/2.7/types.py
--- a/lib-python/2.7/types.py
+++ b/lib-python/2.7/types.py
@@ -71,6 +71,12 @@
FrameType = type(tb.tb_frame)
del tb
+# PyPy extension
+try:
+ FakeFrameType = type(next(sys._current_frames().itervalues()))
+except (AttributeError, StopIteration):
+ FakeFrameType = FrameType
+
SliceType = slice
EllipsisType = type(Ellipsis)
diff --git a/lib_pypy/_ctypes/basics.py b/lib_pypy/_ctypes/basics.py
--- a/lib_pypy/_ctypes/basics.py
+++ b/lib_pypy/_ctypes/basics.py
@@ -82,7 +82,7 @@
return False
def in_dll(self, dll, name):
- return self.from_address(dll._handle.getaddressindll(name))
+ return self.from_address(dll.__pypy_dll__.getaddressindll(name))
def from_buffer(self, obj, offset=0):
size = self._sizeofinstances()
diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py
--- a/lib_pypy/_ctypes/function.py
+++ b/lib_pypy/_ctypes/function.py
@@ -430,7 +430,7 @@
ffires = restype.get_ffi_argtype()
return _ffi.FuncPtr.fromaddr(ptr, '', ffiargs, ffires, self._flags_)
- cdll = self.dll._handle
+ cdll = self.dll.__pypy_dll__
try:
ffi_argtypes = [argtype.get_ffi_argtype() for argtype in argtypes]
ffi_restype = restype.get_ffi_argtype()
diff --git a/lib_pypy/_ctypes/pointer.py b/lib_pypy/_ctypes/pointer.py
--- a/lib_pypy/_ctypes/pointer.py
+++ b/lib_pypy/_ctypes/pointer.py
@@ -142,6 +142,10 @@
ptr._buffer = tp._ffiarray(1, autofree=True)
ptr._buffer[0] = obj._buffer
result = ptr
+ elif isinstance(obj, bytes):
+ result = tp()
+ result._buffer[0] = buffer(obj)._pypy_raw_address()
+ return result
elif not (isinstance(obj, _CData) and type(obj)._is_pointer_like()):
raise TypeError("cast() argument 1 must be a pointer, not %s"
% (type(obj),))
diff --git a/lib_pypy/_ctypes/primitive.py b/lib_pypy/_ctypes/primitive.py
--- a/lib_pypy/_ctypes/primitive.py
+++ b/lib_pypy/_ctypes/primitive.py
@@ -61,6 +61,54 @@
pyobj_container = GlobalPyobjContainer()
+def swap_bytes(value, sizeof, typeof, get_or_set):
+ def swap_2():
+ return ((value >> 8) & 0x00FF) | ((value << 8) & 0xFF00)
+
+ def swap_4():
+ return ((value & 0x000000FF) << 24) | \
+ ((value & 0x0000FF00) << 8) | \
+ ((value & 0x00FF0000) >> 8) | \
+ ((value >> 24) & 0xFF)
+
+ def swap_8():
+ return ((value & 0x00000000000000FFL) << 56) | \
+ ((value & 0x000000000000FF00L) << 40) | \
+ ((value & 0x0000000000FF0000L) << 24) | \
+ ((value & 0x00000000FF000000L) << 8) | \
+ ((value & 0x000000FF00000000L) >> 8) | \
+ ((value & 0x0000FF0000000000L) >> 24) | \
+ ((value & 0x00FF000000000000L) >> 40) | \
+ ((value >> 56) & 0xFF)
+
+ def swap_double_float(typ):
+ from struct import pack, unpack
+ if get_or_set == 'set':
+ if sys.byteorder == 'little':
+ st = pack(''.join(['>', typ]), value)
+ else:
+ st = pack(''.join(['<', typ]), value)
+ return unpack(typ, st)[0]
+ else:
+ packed = pack(typ, value)
+ if sys.byteorder == 'little':
+ st = unpack(''.join(['>', typ]), packed)
+ else:
+ st = unpack(''.join(['<', typ]), packed)
+ return st[0]
+
+ if typeof in ('c_float', 'c_float_le', 'c_float_be'):
+ return swap_double_float('f')
+ elif typeof in ('c_double', 'c_double_le', 'c_double_be'):
+ return swap_double_float('d')
+ else:
+ if sizeof == 2:
+ return swap_2()
+ elif sizeof == 4:
+ return swap_4()
+ elif sizeof == 8:
+ return swap_8()
+
def generic_xxx_p_from_param(cls, value):
if value is None:
return cls(None)
@@ -271,6 +319,31 @@
def _as_ffi_pointer_(self, ffitype):
return as_ffi_pointer(self, ffitype)
result._as_ffi_pointer_ = _as_ffi_pointer_
+ if name[-2:] != '_p' and name[-3:] not in ('_le', '_be') \
+ and name not in ('c_wchar', '_SimpleCData', 'c_longdouble', 'c_bool', 'py_object'):
+ from sys import byteorder
+ if byteorder == 'big':
+ name += '_le'
+ swapped = self.__new__(self, name, bases, dct)
+ result.__ctype_le__ = swapped
+ result.__ctype_be__ = result
+ swapped.__ctype_be__ = result
+ swapped.__ctype_le__ = swapped
+ else:
+ name += '_be'
+ swapped = self.__new__(self, name, bases, dct)
+ result.__ctype_be__ = swapped
+ result.__ctype_le__ = result
+ swapped.__ctype_le__ = result
+ swapped.__ctype_be__ = swapped
+ from _ctypes import sizeof
+ def _getval(self):
+ return swap_bytes(self._buffer[0], sizeof(self), name, 'get')
+ def _setval(self, value):
+ d = result()
+ d.value = value
+ self._buffer[0] = swap_bytes(d.value, sizeof(self), name, 'set')
+ swapped.value = property(_getval, _setval)
return result
diff --git a/lib_pypy/_ctypes/structure.py b/lib_pypy/_ctypes/structure.py
--- a/lib_pypy/_ctypes/structure.py
+++ b/lib_pypy/_ctypes/structure.py
@@ -40,6 +40,22 @@
else:
rawfields.append((f[0], f[1]._ffishape_))
+ # hack for duplicate field names
+ already_seen = set()
+ names1 = names
+ names = []
+ for f in names1:
+ if f not in already_seen:
+ names.append(f)
+ already_seen.add(f)
+ already_seen = set()
+ for i in reversed(range(len(rawfields))):
+ if rawfields[i][0] in already_seen:
+ rawfields[i] = (('$DUP%d$%s' % (i, rawfields[i][0]),)
+ + rawfields[i][1:])
+ already_seen.add(rawfields[i][0])
+ # /hack
+
_set_shape(self, rawfields, self._is_union)
fields = {}
@@ -130,6 +146,7 @@
obj._buffer.__setattr__(self.name, arg)
+
def _set_shape(tp, rawfields, is_union=False):
tp._ffistruct_ = _rawffi.Structure(rawfields, is_union,
getattr(tp, '_pack_', 0))
@@ -224,19 +241,27 @@
res.__dict__['_index'] = -1
return res
-
class StructOrUnion(_CData):
__metaclass__ = StructOrUnionMeta
def __new__(cls, *args, **kwds):
from _ctypes import union
- self = super(_CData, cls).__new__(cls)
- if ('_abstract_' in cls.__dict__ or cls is Structure
+ if ('_abstract_' in cls.__dict__ or cls is Structure
or cls is union.Union):
raise TypeError("abstract class")
if hasattr(cls, '_swappedbytes_'):
- raise NotImplementedError("missing in PyPy: structure/union with "
- "swapped (non-native) byte ordering")
+ fields = [None] * len(cls._fields_)
+ for i in range(len(cls._fields_)):
+ if cls._fields_[i][1] == cls._fields_[i][1].__dict__.get('__ctype_be__', None):
+ swapped = cls._fields_[i][1].__dict__.get('__ctype_le__', cls._fields_[i][1])
+ else:
+ swapped = cls._fields_[i][1].__dict__.get('__ctype_be__', cls._fields_[i][1])
+ if len(cls._fields_[i]) < 3:
+ fields[i] = (cls._fields_[i][0], swapped)
+ else:
+ fields[i] = (cls._fields_[i][0], swapped, cls._fields_[i][2])
+ names_and_fields(cls, fields, _CData, cls.__dict__.get('_anonymous_', None))
+ self = super(_CData, cls).__new__(cls)
if hasattr(cls, '_ffistruct_'):
self.__dict__['_buffer'] = self._ffistruct_(autofree=True)
return self
diff --git a/lib_pypy/_tkinter/tklib_build.py b/lib_pypy/_tkinter/tklib_build.py
--- a/lib_pypy/_tkinter/tklib_build.py
+++ b/lib_pypy/_tkinter/tklib_build.py
@@ -22,12 +22,27 @@
linklibs = ['tcl', 'tk']
libdirs = []
else:
- for _ver in ['', '8.6', '8.5', '']:
+ # On some Linux distributions, the tcl and tk libraries are
+ # stored in /usr/include, so we must check this case also
+ libdirs = []
+ found = False
+ for _ver in ['', '8.6', '8.5']:
incdirs = ['/usr/include/tcl' + _ver]
linklibs = ['tcl' + _ver, 'tk' + _ver]
- libdirs = []
if os.path.isdir(incdirs[0]):
+ found = True
break
+ if not found:
+ for _ver in ['8.6', '8.5', '']:
+ incdirs = []
+ linklibs = ['tcl' + _ver, 'tk' + _ver]
+ if os.path.isfile(''.join(['/usr/lib/lib', linklibs[1], '.so'])):
+ found = True
+ break
+ if not found:
+ sys.stderr.write("*** TCL libraries not found! Falling back...\n")
+ incdirs = []
+ linklibs = ['tcl', 'tk']
config_ffi = FFI()
config_ffi.cdef("""
diff --git a/lib_pypy/cPickle.py b/lib_pypy/cPickle.py
--- a/lib_pypy/cPickle.py
+++ b/lib_pypy/cPickle.py
@@ -116,10 +116,20 @@
@builtinify
def dump(obj, file, protocol=None):
+ if protocol > HIGHEST_PROTOCOL:
+ # use cPickle error message, not pickle.py one
+ raise ValueError("pickle protocol %d asked for; "
+ "the highest available protocol is %d" % (
+ protocol, HIGHEST_PROTOCOL))
Pickler(file, protocol).dump(obj)
@builtinify
def dumps(obj, protocol=None):
+ if protocol > HIGHEST_PROTOCOL:
+ # use cPickle error message, not pickle.py one
+ raise ValueError("pickle protocol %d asked for; "
+ "the highest available protocol is %d" % (
+ protocol, HIGHEST_PROTOCOL))
file = StringIO()
Pickler(file, protocol).dump(obj)
return file.getvalue()
diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO
--- a/lib_pypy/cffi.egg-info/PKG-INFO
+++ b/lib_pypy/cffi.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: cffi
-Version: 1.11.0
+Version: 1.11.1
Summary: Foreign Function Interface for Python calling C code.
Home-page: http://cffi.readthedocs.org
Author: Armin Rigo, Maciej Fijalkowski
diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py
--- a/lib_pypy/cffi/__init__.py
+++ b/lib_pypy/cffi/__init__.py
@@ -4,8 +4,8 @@
from .api import FFI
from .error import CDefError, FFIError, VerificationError, VerificationMissing
-__version__ = "1.11.0"
-__version_info__ = (1, 11, 0)
+__version__ = "1.11.1"
+__version_info__ = (1, 11, 1)
# The verifier module file names are based on the CRC32 of a string that
# contains the following version number. It may be older than __version__
diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h
--- a/lib_pypy/cffi/_cffi_include.h
+++ b/lib_pypy/cffi/_cffi_include.h
@@ -95,6 +95,7 @@
#define _cffi_from_c_ulong PyLong_FromUnsignedLong
#define _cffi_from_c_longlong PyLong_FromLongLong
#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
+#define _cffi_from_c__Bool PyBool_FromLong
#define _cffi_to_c_double PyFloat_AsDouble
#define _cffi_to_c_float PyFloat_AsDouble
diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h
--- a/lib_pypy/cffi/_embedding.h
+++ b/lib_pypy/cffi/_embedding.h
@@ -1,7 +1,12 @@
/***** Support code for embedding *****/
-#if defined(_MSC_VER)
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#if defined(_WIN32)
# define CFFI_DLLEXPORT __declspec(dllexport)
#elif defined(__GNUC__)
# define CFFI_DLLEXPORT __attribute__((visibility("default")))
@@ -242,7 +247,7 @@
if (f != NULL && f != Py_None) {
PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
- "\ncompiled with cffi version: 1.11.0"
+ "\ncompiled with cffi version: 1.11.1"
"\n_cffi_backend module: ", f);
modules = PyImport_GetModuleDict();
mod = PyDict_GetItemString(modules, "_cffi_backend");
@@ -525,3 +530,7 @@
#undef cffi_compare_and_swap
#undef cffi_write_barrier
#undef cffi_read_barrier
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py
--- a/lib_pypy/cffi/api.py
+++ b/lib_pypy/cffi/api.py
@@ -394,12 +394,17 @@
replace_with = ' ' + replace_with
return self._backend.getcname(cdecl, replace_with)
- def gc(self, cdata, destructor):
+ def gc(self, cdata, destructor, size=0):
"""Return a new cdata object that points to the same
data. Later, when this new cdata object is garbage-collected,
'destructor(old_cdata_object)' will be called.
+
+ The optional 'size' gives an estimate of the size, used to
+ trigger the garbage collection more eagerly. So far only used
+ on PyPy. It tells the GC that the returned object keeps alive
+ roughly 'size' bytes of external memory.
"""
- return self._backend.gcp(cdata, destructor)
+ return self._backend.gcp(cdata, destructor, size)
def _get_cached_btype(self, type):
assert self._lock.acquire(False) is False
diff --git a/lib_pypy/cffi/backend_ctypes.py b/lib_pypy/cffi/backend_ctypes.py
--- a/lib_pypy/cffi/backend_ctypes.py
+++ b/lib_pypy/cffi/backend_ctypes.py
@@ -1002,7 +1002,7 @@
_weakref_cache_ref = None
- def gcp(self, cdata, destructor):
+ def gcp(self, cdata, destructor, size=0):
if self._weakref_cache_ref is None:
import weakref
class MyRef(weakref.ref):
diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py
--- a/lib_pypy/cffi/recompiler.py
+++ b/lib_pypy/cffi/recompiler.py
@@ -412,6 +412,9 @@
prnt(' }')
prnt(' p[0] = (const void *)0x%x;' % self._version)
prnt(' p[1] = &_cffi_type_context;')
+ prnt('#if PY_MAJOR_VERSION >= 3')
+ prnt(' return NULL;')
+ prnt('#endif')
prnt('}')
# on Windows, distutils insists on putting init_cffi_xyz in
# 'export_symbols', so instead of fighting it, just give up and
@@ -578,7 +581,7 @@
def _convert_expr_from_c(self, tp, var, context):
if isinstance(tp, model.BasePrimitiveType):
- if tp.is_integer_type():
+ if tp.is_integer_type() and tp.name != '_Bool':
return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
elif isinstance(tp, model.UnknownFloatType):
return '_cffi_from_c_double(%s)' % (var,)
diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py
--- a/lib_pypy/cffi/vengine_cpy.py
+++ b/lib_pypy/cffi/vengine_cpy.py
@@ -296,7 +296,7 @@
def _convert_expr_from_c(self, tp, var, context):
if isinstance(tp, model.PrimitiveType):
- if tp.is_integer_type():
+ if tp.is_integer_type() and tp.name != '_Bool':
return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
elif tp.name != 'long double':
return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var)
@@ -872,6 +872,7 @@
#define _cffi_from_c_ulong PyLong_FromUnsignedLong
#define _cffi_from_c_longlong PyLong_FromLongLong
#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
+#define _cffi_from_c__Bool PyBool_FromLong
#define _cffi_to_c_double PyFloat_AsDouble
#define _cffi_to_c_float PyFloat_AsDouble
diff --git a/lib_pypy/pyrepl/historical_reader.py b/lib_pypy/pyrepl/historical_reader.py
--- a/lib_pypy/pyrepl/historical_reader.py
+++ b/lib_pypy/pyrepl/historical_reader.py
@@ -17,7 +17,7 @@
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-from pyrepl import reader, commands
+from pyrepl import reader, commands, input
from pyrepl.reader import Reader as R
isearch_keymap = tuple(
@@ -214,7 +214,6 @@
isearch_forwards, isearch_backwards, operate_and_get_next]:
self.commands[c.__name__] = c
self.commands[c.__name__.replace('_', '-')] = c
- from pyrepl import input
self.isearch_trans = input.KeymapTranslator(
isearch_keymap, invalid_cls=isearch_end,
character_cls=isearch_add_character)
diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
--- a/pypy/config/pypyoption.py
+++ b/pypy/config/pypyoption.py
@@ -36,7 +36,7 @@
"cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array",
"binascii", "_multiprocessing", '_warnings', "_collections",
"_multibytecodec", "micronumpy", "_continuation", "_cffi_backend",
- "_csv", "cppyy", "_pypyjson", "_jitlog"
+ "_csv", "_cppyy", "_pypyjson", "_jitlog"
])
from rpython.jit.backend import detect_cpu
@@ -67,10 +67,12 @@
if name in translation_modules:
translation_modules.remove(name)
- if "cppyy" in working_modules:
- working_modules.remove("cppyy") # not tested on win32
+ if "_cppyy" in working_modules:
+ working_modules.remove("_cppyy") # not tested on win32
if "faulthandler" in working_modules:
working_modules.remove("faulthandler") # missing details
+ if "_vmprof" in working_modules:
+ working_modules.remove("_vmprof") # FIXME: missing details
# The _locale module is needed by site.py on Windows
default_modules.add("_locale")
@@ -79,8 +81,8 @@
working_modules.remove('fcntl') # LOCK_NB not defined
working_modules.remove("_minimal_curses")
working_modules.remove("termios")
- if "cppyy" in working_modules:
- working_modules.remove("cppyy") # depends on ctypes
+ if "_cppyy" in working_modules:
+ working_modules.remove("_cppyy") # depends on ctypes
#if sys.platform.startswith("linux"):
# _mach = os.popen('uname -m', 'r').read().strip()
@@ -92,7 +94,7 @@
'_multiprocessing': [('objspace.usemodules.time', True),
('objspace.usemodules.thread', True)],
'cpyext': [('objspace.usemodules.array', True)],
- 'cppyy': [('objspace.usemodules.cpyext', True)],
+ '_cppyy': [('objspace.usemodules.cpyext', True)],
'faulthandler': [('objspace.usemodules._vmprof', True)],
}
module_suggests = {
@@ -224,11 +226,6 @@
"use specialised tuples",
default=False),
- BoolOption("withcelldict",
- "use dictionaries that are optimized for being used as module dicts",
- default=False,
- requires=[("objspace.honor__builtins__", False)]),
-
BoolOption("withliststrategies",
"enable optimized ways to store lists of primitives ",
default=True),
@@ -288,7 +285,7 @@
# extra optimizations with the JIT
if level == 'jit':
- config.objspace.std.suggest(withcelldict=True)
+ pass # none at the moment
def enable_allworkingmodules(config):
diff --git a/pypy/doc/build.rst b/pypy/doc/build.rst
--- a/pypy/doc/build.rst
+++ b/pypy/doc/build.rst
@@ -10,6 +10,18 @@
minutes on a fast machine -- and RAM-hungry. You will need **at least** 2 GB
of memory on a 32-bit machine and 4GB on a 64-bit machine.
+Before you start
+----------------
+
+Our normal development workflow avoids a full translation by using test-driven
+development. You can read more about how to develop PyPy here_, and latest
+translated (hopefully functional) binary packages are available on our
+buildbot's `nightly builds`_
+
+.. _here: getting-started-dev.html
+.. _`nightly builds`: http://buildbot.pypy.org/nightly
+
+You will need the build dependencies below to run the tests.
Clone the repository
--------------------
@@ -107,8 +119,15 @@
To run untranslated tests, you need the Boehm garbage collector libgc.
-On Debian and Ubuntu, this is the command to install all build-time
-dependencies::
+On recent Debian and Ubuntu (like 17.04), this is the command to install
+all build-time dependencies::
+
+ apt-get install gcc make libffi-dev pkg-config zlib1g-dev libbz2-dev \
+ libsqlite3-dev libncurses5-dev libexpat1-dev libssl-dev libgdbm-dev \
+ tk-dev libgc-dev python-cffi \
+ liblzma-dev libncursesw5-dev # these two only needed on PyPy3
+
+On older Debian and Ubuntu (12.04 to 16.04)::
apt-get install gcc make libffi-dev pkg-config libz-dev libbz2-dev \
libsqlite3-dev libncurses-dev libexpat1-dev libssl-dev libgdbm-dev \
@@ -140,22 +159,61 @@
Run the translation
-------------------
+We usually translate in the ``pypy/goal`` directory, so all the following
+commands assume your ``$pwd`` is there.
+
Translate with JIT::
- cd pypy/goal
pypy ../../rpython/bin/rpython --opt=jit
Translate without JIT::
- cd pypy/goal
pypy ../../rpython/bin/rpython --opt=2
+Note this translates pypy via the ``targetpypystandalone.py`` file, so these
+are shorthand for::
+
+ pypy ../../rpython/bin/rpython <rpython options> targetpypystandalone.py <pypy options>
+
+More help is availabe via ``--help`` at either option position, and more info
+can be found in the :doc:`config/index` section.
+
(You can use ``python`` instead of ``pypy`` here, which will take longer
but works too.)
-If everything works correctly this will create an executable ``pypy-c`` in the
-current directory. The executable behaves mostly like a normal Python
-interpreter (see :doc:`cpython_differences`).
+If everything works correctly this will:
+
+1. Run the rpython `translation chain`_, producing a database of the
+ entire pypy interpreter. This step is currently singe threaded, and RAM
+ hungry. As part of this step, the chain creates a large number of C code
+ files and a Makefile to compile them in a
+ directory controlled by the ``PYPY_USESSION_DIR`` environment variable.
+2. Create an executable ``pypy-c`` by running the Makefile. This step can
+ utilize all possible cores on the machine.
+3. Copy the needed binaries to the current directory.
+4. Generate c-extension modules for any cffi-based stdlib modules.
+
+
+The resulting executable behaves mostly like a normal Python
+interpreter (see :doc:`cpython_differences`), and is ready for testing, for
+use as a base interpreter for a new virtualenv, or for packaging into a binary
+suitable for installation on another machine running the same OS as the build
+machine.
+
+Note that step 4 is merely done as a convenience, any of the steps may be rerun
+without rerunning the previous steps.
+
+.. _`translation chain`: https://rpython.readthedocs.io/en/latest/translation.html
+
+
+Making a debug build of PyPy
+----------------------------
+
+If the Makefile is rerun with the lldebug or lldebug0 target, appropriate
+compilation flags are added to add debug info and reduce compiler optimizations
+to ``-O0`` respectively. If you stop in a debugger, you will see the
+very wordy machine-generated C code from the rpython translation step, which
+takes a little bit of reading to relate back to the rpython code.
Build cffi import libraries for the stdlib
------------------------------------------
@@ -169,14 +227,6 @@
.. _`out-of-line API mode`: http://cffi.readthedocs.org/en/latest/overview.html#real-example-api-level-out-of-line
-Translating with non-standard options
--------------------------------------
-
-It is possible to have non-standard features enabled for translation,
-but they are not really tested any more. Look, for example, at the
-:doc:`objspace proxies <objspace-proxies>` document.
-
-
Packaging (preparing for installation)
--------------------------------------
@@ -205,14 +255,16 @@
* PyPy 2.5.1 or earlier: normal users would see permission errors.
Installers need to run ``pypy -c "import gdbm"`` and other similar
- commands at install time; the exact list is in `package.py`_. Users
+ commands at install time; the exact list is in
+ :source:`pypy/tool/release/package.py <package.py>`. Users
seeing a broken installation of PyPy can fix it after-the-fact if they
have sudo rights, by running once e.g. ``sudo pypy -c "import gdbm``.
* PyPy 2.6 and later: anyone would get ``ImportError: no module named
_gdbm_cffi``. Installers need to run ``pypy _gdbm_build.py`` in the
``lib_pypy`` directory during the installation process (plus others;
- see the exact list in `package.py`_). Users seeing a broken
+ see the exact list in :source:`pypy/tool/release/package.py <package.py>`).
+ Users seeing a broken
installation of PyPy can fix it after-the-fact, by running ``pypy
/path/to/lib_pypy/_gdbm_build.py``. This command produces a file
called ``_gdbm_cffi.pypy-41.so`` locally, which is a C extension
diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withcelldict.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Enable cell-dicts. This optimization is not helpful without the JIT. In the
-presence of the JIT, it greatly helps looking up globals.
diff --git a/pypy/doc/configuration.rst b/pypy/doc/configuration.rst
--- a/pypy/doc/configuration.rst
+++ b/pypy/doc/configuration.rst
@@ -188,4 +188,6 @@
can be found on the ``config`` attribute of all ``TranslationContext``
instances and are described in :source:`rpython/config/translationoption.py`. The interpreter options
are attached to the object space, also under the name ``config`` and are
-described in :source:`pypy/config/pypyoption.py`.
+described in :source:`pypy/config/pypyoption.py`. Both set of options are
+documented in the :doc:`config/index` section.
+
diff --git a/pypy/doc/contributor.rst b/pypy/doc/contributor.rst
--- a/pypy/doc/contributor.rst
+++ b/pypy/doc/contributor.rst
@@ -27,8 +27,8 @@
Wim Lavrijsen
Eric van Riet Paap
Richard Emslie
+ Remi Meier
Alexander Schremmer
- Remi Meier
Dan Villiom Podlaski Christiansen
Lukas Diekmann
Sven Hager
@@ -69,6 +69,7 @@
Michael Foord
Stephan Diehl
Stefano Rivera
+ Jean-Paul Calderone
Stefan Schwarzer
Tomek Meka
Valentino Volonghi
@@ -77,14 +78,13 @@
Bob Ippolito
Bruno Gola
David Malcolm
- Jean-Paul Calderone
Squeaky
Edd Barrett
Timo Paulssen
Marius Gedminas
+ Nicolas Truessel
Alexandre Fayolle
Simon Burton
- Nicolas Truessel
Martin Matusiak
Laurence Tratt
Wenzhu Man
@@ -123,6 +123,7 @@
Stefan H. Muller
Tim Felgentreff
Eugene Oden
+ Dodan Mihai
Jeff Terrace
Henry Mason
Vasily Kuznetsov
@@ -149,11 +150,13 @@
Rocco Moretti
Gintautas Miliauskas
Lucian Branescu Mihaila
+ Mariano Anaya
anatoly techtonik
- Dodan Mihai
Karl Bartel
+ Stefan Beyer
Gabriel Lavoie
Jared Grubb
+ Alecsandru Patrascu
Olivier Dormond
Wouter van Heyst
Sebastian Pawluś
@@ -161,6 +164,7 @@
Victor Stinner
Andrews Medina
Aaron Iles
+ p_zieschang at yahoo.de
Toby Watson
Daniel Patrick
Stuart Williams
@@ -171,6 +175,7 @@
Michael Cheng
Mikael Schönenberg
Stanislaw Halik
+ Mihnea Saracin
Berkin Ilbeyi
Gasper Zejn
Faye Zhao
@@ -181,14 +186,12 @@
Jonathan David Riehl
Beatrice During
Alex Perry
- p_zieschang at yahoo.de
Robert Zaremba
Alan McIntyre
Alexander Sedov
Vaibhav Sood
Reuben Cummings
Attila Gobi
- Alecsandru Patrascu
Christopher Pope
Tristan Arthur
Christian Tismer
@@ -210,7 +213,6 @@
Jacek Generowicz
Sylvain Thenault
Jakub Stasiak
- Stefan Beyer
Andrew Dalke
Alejandro J. Cura
Vladimir Kryachko
@@ -242,6 +244,7 @@
Christoph Gerum
Miguel de Val Borro
Artur Lisiecki
+ afteryu
Toni Mattis
Laurens Van Houtven
Bobby Impollonia
@@ -272,6 +275,7 @@
Anna Katrina Dominguez
Kim Jin Su
Amber Brown
+ Anthony Sottile
Nate Bragg
Ben Darnell
Juan Francisco Cantero Hurtado
@@ -292,12 +296,14 @@
Mike Bayer
Rodrigo Araújo
Daniil Yarancev
+ Min RK
OlivierBlanvillain
Jonas Pfannschmidt
Zearin
Andrey Churin
Dan Crosta
reubano at gmail.com
+ Stanisław Halik
Julien Phalip
Roman Podoliaka
Eli Stevens
diff --git a/pypy/doc/cppyy.rst b/pypy/doc/cppyy.rst
deleted file mode 100644
--- a/pypy/doc/cppyy.rst
+++ /dev/null
@@ -1,672 +0,0 @@
-cppyy: C++ bindings for PyPy
-============================
-
-The cppyy module delivers dynamic Python-C++ bindings.
-It is designed for automation, high performance, scale, interactivity, and
-handling all of modern C++ (11, 14, etc.).
-It is based on `Cling`_ which, through `LLVM`_/`clang`_, provides C++
-reflection and interactivity.
-Reflection information is extracted from C++ header files.
-Cppyy itself is built into PyPy (an alternative exists for CPython), but
-it requires a `backend`_, installable through pip, to interface with Cling.
-
-.. _Cling: https://root.cern.ch/cling
-.. _LLVM: http://llvm.org/
-.. _clang: http://clang.llvm.org/
-.. _backend: https://pypi.python.org/pypi/PyPy-cppyy-backend
-
-
-Installation
-------------
-
-This assumes PyPy2.7 v5.7 or later; earlier versions use a Reflex-based cppyy
-module, which is no longer supported.
-Both the tooling and user-facing Python codes are very backwards compatible,
-however.
-Further dependencies are cmake (for general build), Python2.7 (for LLVM), and
-a modern C++ compiler (one that supports at least C++11).
-
-Assuming you have a recent enough version of PyPy installed, use pip to
-complete the installation of cppyy::
-
- $ MAKE_NPROCS=4 pypy-c -m pip install --verbose PyPy-cppyy-backend
-
-Set the number of parallel builds ('4' in this example, through the MAKE_NPROCS
-environment variable) to a number appropriate for your machine.
-The building process may take quite some time as it includes a customized
-version of LLVM as part of Cling, which is why --verbose is recommended so that
-you can see the build progress.
-
-The default installation will be under
-$PYTHONHOME/site-packages/cppyy_backend/lib,
-which needs to be added to your dynamic loader path (LD_LIBRARY_PATH).
-If you need the dictionary and class map generation tools (used in the examples
-below), you need to add $PYTHONHOME/site-packages/cppyy_backend/bin to your
-executable path (PATH).
-
-
-Basic bindings example
-----------------------
-
-These examples assume that cppyy_backend is pointed to by the environment
-variable CPPYYHOME, and that CPPYYHOME/lib is added to LD_LIBRARY_PATH and
-CPPYYHOME/bin to PATH.
-
-Let's first test with a trivial example whether all packages are properly
-installed and functional.
-Create a C++ header file with some class in it (all functions are made inline
-for convenience; if you have out-of-line code, link with it as appropriate)::
-
- $ cat MyClass.h
- class MyClass {
- public:
- MyClass(int i = -99) : m_myint(i) {}
-
- int GetMyInt() { return m_myint; }
- void SetMyInt(int i) { m_myint = i; }
-
- public:
- int m_myint;
- };
-
-Then, generate the bindings using ``genreflex`` (installed under
-cppyy_backend/bin in site_packages), and compile the code::
-
- $ genreflex MyClass.h
- $ g++ -std=c++11 -fPIC -rdynamic -O2 -shared -I$CPPYYHOME/include MyClass_rflx.cpp -o libMyClassDict.so -L$CPPYYHOME/lib -lCling
-
-Next, make sure that the library can be found through the dynamic lookup path
-(the ``LD_LIBRARY_PATH`` environment variable on Linux, ``PATH`` on Windows),
-for example by adding ".".
-Now you're ready to use the bindings.
-Since the bindings are designed to look pythonistic, it should be
-straightforward::
-
- $ pypy-c
- >>>> import cppyy
- >>>> cppyy.load_reflection_info("libMyClassDict.so")
- <CPPLibrary object at 0xb6fd7c4c>
- >>>> myinst = cppyy.gbl.MyClass(42)
- >>>> print myinst.GetMyInt()
- 42
- >>>> myinst.SetMyInt(33)
- >>>> print myinst.m_myint
- 33
- >>>> myinst.m_myint = 77
- >>>> print myinst.GetMyInt()
- 77
- >>>> help(cppyy.gbl.MyClass) # shows that normal python introspection works
-
-That's all there is to it!
-
-
-Automatic class loader
-----------------------
-
-There is one big problem in the code above, that prevents its use in a (large
-scale) production setting: the explicit loading of the reflection library.
-Clearly, if explicit load statements such as these show up in code downstream
-from the ``MyClass`` package, then that prevents the ``MyClass`` author from
-repackaging or even simply renaming the dictionary library.
-
-The solution is to make use of an automatic class loader, so that downstream
-code never has to call ``load_reflection_info()`` directly.
-The class loader makes use of so-called rootmap files, which ``genreflex``
-can produce.
-These files contain the list of available C++ classes and specify the library
-that needs to be loaded for their use (as an aside, this listing allows for a
-cross-check to see whether reflection info is generated for all classes that
-you expect).
-By convention, the rootmap files should be located next to the reflection info
-libraries, so that they can be found through the normal shared library search
-path.
-They can be concatenated together, or consist of a single rootmap file per
-library.
-For example::
-
- $ genreflex MyClass.h --rootmap=libMyClassDict.rootmap --rootmap-lib=libMyClassDict.so
- $ g++ -std=c++11 -fPIC -rdynamic -O2 -shared -I$CPPYYHOME/include MyClass_rflx.cpp -o libMyClassDict.so -L$CPPYYHOME/lib -lCling
-
-where the first option (``--rootmap``) specifies the output file name, and the
-second option (``--rootmap-lib``) the name of the reflection library where
-``MyClass`` will live.
-It is necessary to provide that name explicitly, since it is only in the
-separate linking step where this name is fixed.
-If the second option is not given, the library is assumed to be libMyClass.so,
-a name that is derived from the name of the header file.
-
-With the rootmap file in place, the above example can be rerun without explicit
-loading of the reflection info library::
-
- $ pypy-c
- >>>> import cppyy
- >>>> myinst = cppyy.gbl.MyClass(42)
- >>>> print myinst.GetMyInt()
- 42
- >>>> # etc. ...
-
-As a caveat, note that the class loader is currently limited to classes only.
-
-
-Advanced example
-----------------
-
-The following snippet of C++ is very contrived, to allow showing that such
-pathological code can be handled and to show how certain features play out in
-practice::
-
- $ cat MyAdvanced.h
- #include <string>
-
- class Base1 {
- public:
- Base1(int i) : m_i(i) {}
- virtual ~Base1() {}
- int m_i;
- };
-
- class Base2 {
- public:
- Base2(double d) : m_d(d) {}
- virtual ~Base2() {}
- double m_d;
- };
-
- class C;
-
- class Derived : public virtual Base1, public virtual Base2 {
- public:
- Derived(const std::string& name, int i, double d) : Base1(i), Base2(d), m_name(name) {}
- virtual C* gimeC() { return (C*)0; }
- std::string m_name;
- };
-
- Base2* BaseFactory(const std::string& name, int i, double d) {
- return new Derived(name, i, d);
- }
-
-This code is still only in a header file, with all functions inline, for
-convenience of the example.
-If the implementations live in a separate source file or shared library, the
-only change needed is to link those in when building the reflection library.
-
-If you were to run ``genreflex`` like above in the basic example, you will
-find that not all classes of interest will be reflected, nor will be the
-global factory function.
-In particular, ``std::string`` will be missing, since it is not defined in
-this header file, but in a header file that is included.
-In practical terms, general classes such as ``std::string`` should live in a
-core reflection set, but for the moment assume we want to have it in the
-reflection library that we are building for this example.
-
-The ``genreflex`` script can be steered using a so-called `selection file`_
-(see "Generating Reflex Dictionaries")
-which is a simple XML file specifying, either explicitly or by using a
-pattern, which classes, variables, namespaces, etc. to select from the given
-header file.
-With the aid of a selection file, a large project can be easily managed:
-simply ``#include`` all relevant headers into a single header file that is
-handed to ``genreflex``.
-In fact, if you hand multiple header files to ``genreflex``, then a selection
-file is almost obligatory: without it, only classes from the last header will
-be selected.
-Then, apply a selection file to pick up all the relevant classes.
-For our purposes, the following rather straightforward selection will do
-(the name ``lcgdict`` for the root is historical, but required)::
-
- $ cat MyAdvanced.xml
- <lcgdict>
- <class pattern="Base?" />
- <class name="Derived" />
- <class name="std::string" />
- <function name="BaseFactory" />
- </lcgdict>
-
-.. _selection file: https://root.cern.ch/how/how-use-reflex
-
-Now the reflection info can be generated and compiled::
-
- $ genreflex MyAdvanced.h --selection=MyAdvanced.xml
- $ g++ -std=c++11 -fPIC -rdynamic -O2 -shared -I$CPPYYHOME/include MyAdvanced_rflx.cpp -o libAdvExDict.so -L$CPPYYHOME/lib -lCling
-
-and subsequently be used from PyPy::
-
- >>>> import cppyy
- >>>> cppyy.load_reflection_info("libAdvExDict.so")
- <CPPLibrary object at 0x00007fdb48fc8120>
- >>>> d = cppyy.gbl.BaseFactory("name", 42, 3.14)
- >>>> type(d)
- <class '__main__.Derived'>
- >>>> isinstance(d, cppyy.gbl.Base1)
- True
- >>>> isinstance(d, cppyy.gbl.Base2)
- True
- >>>> d.m_i, d.m_d
- (42, 3.14)
- >>>> d.m_name == "name"
- True
- >>>>
-
-Again, that's all there is to it!
-
-A couple of things to note, though.
-If you look back at the C++ definition of the ``BaseFactory`` function,
-you will see that it declares the return type to be a ``Base2``, yet the
-bindings return an object of the actual type ``Derived``?
-This choice is made for a couple of reasons.
-First, it makes method dispatching easier: if bound objects are always their
-most derived type, then it is easy to calculate any offsets, if necessary.
-Second, it makes memory management easier: the combination of the type and
-the memory address uniquely identifies an object.
-That way, it can be recycled and object identity can be maintained if it is
-entered as a function argument into C++ and comes back to PyPy as a return
-value.
-Last, but not least, casting is decidedly unpythonistic.
-By always providing the most derived type known, casting becomes unnecessary.
-For example, the data member of ``Base2`` is simply directly available.
-Note also that the unreflected ``gimeC`` method of ``Derived`` does not
-preclude its use.
-It is only the ``gimeC`` method that is unusable as long as class ``C`` is
-unknown to the system.
-
-
-Features
---------
-
-The following is not meant to be an exhaustive list, since cppyy is still
-under active development.
-Furthermore, the intention is that every feature is as natural as possible on
-the python side, so if you find something missing in the list below, simply
-try it out.
-It is not always possible to provide exact mapping between python and C++
-(active memory management is one such case), but by and large, if the use of a
-feature does not strike you as obvious, it is more likely to simply be a bug.
-That is a strong statement to make, but also a worthy goal.
-For the C++ side of the examples, refer to this :doc:`example code <cppyy_example>`, which was
-bound using::
-
- $ genreflex example.h --deep --rootmap=libexampleDict.rootmap --rootmap-lib=libexampleDict.so
- $ g++ -std=c++11 -fPIC -rdynamic -O2 -shared -I$CPPYYHOME/include example_rflx.cpp -o libexampleDict.so -L$CPPYYHOME/lib -lCling
-
-* **abstract classes**: Are represented as python classes, since they are
- needed to complete the inheritance hierarchies, but will raise an exception
- if an attempt is made to instantiate from them.
- Example::
-
- >>>> from cppyy.gbl import AbstractClass, ConcreteClass
- >>>> a = AbstractClass()
- Traceback (most recent call last):
- File "<console>", line 1, in <module>
- TypeError: cannot instantiate abstract class 'AbstractClass'
- >>>> issubclass(ConcreteClass, AbstractClass)
- True
- >>>> c = ConcreteClass()
- >>>> isinstance(c, AbstractClass)
- True
- >>>>
-
-* **arrays**: Supported for builtin data types only, as used from module
- ``array``.
- Out-of-bounds checking is limited to those cases where the size is known at
- compile time (and hence part of the reflection info).
- Example::
-
- >>>> from cppyy.gbl import ConcreteClass
- >>>> from array import array
- >>>> c = ConcreteClass()
- >>>> c.array_method(array('d', [1., 2., 3., 4.]), 4)
- 1 2 3 4
- >>>>
-
-* **builtin data types**: Map onto the expected equivalent python types, with
- the caveat that there may be size differences, and thus it is possible that
- exceptions are raised if an overflow is detected.
-
-* **casting**: Is supposed to be unnecessary.
- Object pointer returns from functions provide the most derived class known
- in the hierarchy of the object being returned.
- This is important to preserve object identity as well as to make casting,
- a pure C++ feature after all, superfluous.
- Example::
-
- >>>> from cppyy.gbl import AbstractClass, ConcreteClass
- >>>> c = ConcreteClass()
- >>>> ConcreteClass.show_autocast.__doc__
- 'AbstractClass* ConcreteClass::show_autocast()'
- >>>> d = c.show_autocast()
- >>>> type(d)
- <class '__main__.ConcreteClass'>
- >>>>
-
- However, if need be, you can perform C++-style reinterpret_casts (i.e.
- without taking offsets into account), by taking and rebinding the address
- of an object::
-
- >>>> from cppyy import addressof, bind_object
- >>>> e = bind_object(addressof(d), AbstractClass)
- >>>> type(e)
- <class '__main__.AbstractClass'>
- >>>>
-
-* **classes and structs**: Get mapped onto python classes, where they can be
- instantiated as expected.
- If classes are inner classes or live in a namespace, their naming and
- location will reflect that.
- Example::
-
- >>>> from cppyy.gbl import ConcreteClass, Namespace
- >>>> ConcreteClass == Namespace.ConcreteClass
- False
- >>>> n = Namespace.ConcreteClass.NestedClass()
- >>>> type(n)
- <class '__main__.Namespace::ConcreteClass::NestedClass'>
- >>>>
-
-* **data members**: Public data members are represented as python properties
- and provide read and write access on instances as expected.
- Private and protected data members are not accessible.
- Example::
-
- >>>> from cppyy.gbl import ConcreteClass
- >>>> c = ConcreteClass()
- >>>> c.m_int
- 42
- >>>>
-
-* **default arguments**: C++ default arguments work as expected, but python
- keywords are not supported.
- It is technically possible to support keywords, but for the C++ interface,
- the formal argument names have no meaning and are not considered part of the
- API, hence it is not a good idea to use keywords.
- Example::
-
- >>>> from cppyy.gbl import ConcreteClass
- >>>> c = ConcreteClass() # uses default argument
- >>>> c.m_int
- 42
- >>>> c = ConcreteClass(13)
- >>>> c.m_int
- 13
- >>>>
-
-* **doc strings**: The doc string of a method or function contains the C++
- arguments and return types of all overloads of that name, as applicable.
- Example::
-
- >>>> from cppyy.gbl import ConcreteClass
- >>>> print ConcreteClass.array_method.__doc__
- void ConcreteClass::array_method(int*, int)
- void ConcreteClass::array_method(double*, int)
- >>>>
-
-* **enums**: Are translated as ints with no further checking.
-
-* **functions**: Work as expected and live in their appropriate namespace
- (which can be the global one, ``cppyy.gbl``).
-
-* **inheritance**: All combinations of inheritance on the C++ (single,
- multiple, virtual) are supported in the binding.
- However, new python classes can only use single inheritance from a bound C++
- class.
- Multiple inheritance would introduce two "this" pointers in the binding.
- This is a current, not a fundamental, limitation.
- The C++ side will not see any overridden methods on the python side, as
- cross-inheritance is planned but not yet supported.
- Example::
-
- >>>> from cppyy.gbl import ConcreteClass
- >>>> help(ConcreteClass)
- Help on class ConcreteClass in module __main__:
-
- class ConcreteClass(AbstractClass)
- | Method resolution order:
- | ConcreteClass
- | AbstractClass
- | cppyy.CPPObject
- | __builtin__.CPPInstance
- | __builtin__.object
- |
- | Methods defined here:
- |
- | ConcreteClass(self, *args)
- | ConcreteClass::ConcreteClass(const ConcreteClass&)
- | ConcreteClass::ConcreteClass(int)
- | ConcreteClass::ConcreteClass()
- |
- etc. ....
-
-* **memory**: C++ instances created by calling their constructor from python
- are owned by python.
- You can check/change the ownership with the _python_owns flag that every
- bound instance carries.
- Example::
-
- >>>> from cppyy.gbl import ConcreteClass
- >>>> c = ConcreteClass()
- >>>> c._python_owns # True: object created in Python
- True
- >>>>
-
-* **methods**: Are represented as python methods and work as expected.
- They are first class objects and can be bound to an instance.
- Virtual C++ methods work as expected.
- To select a specific virtual method, do like with normal python classes
- that override methods: select it from the class that you need, rather than
- calling the method on the instance.
- To select a specific overload, use the __dispatch__ special function, which
- takes the name of the desired method and its signature (which can be
- obtained from the doc string) as arguments.
-
-* **namespaces**: Are represented as python classes.
- Namespaces are more open-ended than classes, so sometimes initial access may
- result in updates as data and functions are looked up and constructed
- lazily.
- Thus the result of ``dir()`` on a namespace shows the classes available,
- even if they may not have been created yet.
- It does not show classes that could potentially be loaded by the class
- loader.
- Once created, namespaces are registered as modules, to allow importing from
- them.
- Namespace currently do not work with the class loader.
- Fixing these bootstrap problems is on the TODO list.
- The global namespace is ``cppyy.gbl``.
-
-* **NULL**: Is represented as ``cppyy.gbl.nullptr``.
- In C++11, the keyword ``nullptr`` is used to represent ``NULL``.
- For clarity of intent, it is recommended to use this instead of ``None``
- (or the integer ``0``, which can serve in some cases), as ``None`` is better
- understood as ``void`` in C++.
-
-* **operator conversions**: If defined in the C++ class and a python
- equivalent exists (i.e. all builtin integer and floating point types, as well
- as ``bool``), it will map onto that python conversion.
- Note that ``char*`` is mapped onto ``__str__``.
- Example::
-
- >>>> from cppyy.gbl import ConcreteClass
- >>>> print ConcreteClass()
- Hello operator const char*!
- >>>>
-
-* **operator overloads**: If defined in the C++ class and if a python
- equivalent is available (not always the case, think e.g. of ``operator||``),
- then they work as expected.
- Special care needs to be taken for global operator overloads in C++: first,
- make sure that they are actually reflected, especially for the global
- overloads for ``operator==`` and ``operator!=`` of STL vector iterators in
- the case of gcc (note that they are not needed to iterate over a vector).
- Second, make sure that reflection info is loaded in the proper order.
- I.e. that these global overloads are available before use.
-
-* **pointers**: For builtin data types, see arrays.
- For objects, a pointer to an object and an object looks the same, unless
- the pointer is a data member.
- In that case, assigning to the data member will cause a copy of the pointer
- and care should be taken about the object's life time.
- If a pointer is a global variable, the C++ side can replace the underlying
- object and the python side will immediately reflect that.
-
-* **PyObject***: Arguments and return types of ``PyObject*`` can be used, and
- passed on to CPython API calls.
- Since these CPython-like objects need to be created and tracked (this all
- happens through ``cpyext``) this interface is not particularly fast.
-
-* **static data members**: Are represented as python property objects on the
- class and the meta-class.
- Both read and write access is as expected.
-
-* **static methods**: Are represented as python's ``staticmethod`` objects
- and can be called both from the class as well as from instances.
-
-* **strings**: The std::string class is considered a builtin C++ type and
- mixes quite well with python's str.
- Python's str can be passed where a ``const char*`` is expected, and an str
- will be returned if the return type is ``const char*``.
-
-* **templated classes**: Are represented in a meta-class style in python.
- This may look a little bit confusing, but conceptually is rather natural.
- For example, given the class ``std::vector<int>``, the meta-class part would
- be ``std.vector``.
- Then, to get the instantiation on ``int``, do ``std.vector(int)`` and to
- create an instance of that class, do ``std.vector(int)()``::
-
- >>>> import cppyy
- >>>> cppyy.load_reflection_info('libexampleDict.so')
- >>>> cppyy.gbl.std.vector # template metatype
- <cppyy.CppyyTemplateType object at 0x00007fcdd330f1a0>
- >>>> cppyy.gbl.std.vector(int) # instantiates template -> class
- <class '__main__.std::vector<int>'>
- >>>> cppyy.gbl.std.vector(int)() # instantiates class -> object
- <__main__.std::vector<int> object at 0x00007fe480ba4bc0>
- >>>>
-
- Note that templates can be build up by handing actual types to the class
- instantiation (as done in this vector example), or by passing in the list of
- template arguments as a string.
- The former is a lot easier to work with if you have template instantiations
- using classes that themselves are templates in the arguments (think e.g a
- vector of vectors).
- All template classes must already exist in the loaded reflection info, they
- do not work (yet) with the class loader.
-
- For compatibility with other bindings generators, use of square brackets
- instead of parenthesis to instantiate templates is supported as well.
-
-* **templated functions**: Automatically participate in overloading and are
- used in the same way as other global functions.
-
-* **templated methods**: For now, require an explicit selection of the
- template parameters.
- This will be changed to allow them to participate in overloads as expected.
-
-* **typedefs**: Are simple python references to the actual classes to which
- they refer.
-
-* **unary operators**: Are supported if a python equivalent exists, and if the
- operator is defined in the C++ class.
-
-You can always find more detailed examples and see the full of supported
-features by looking at the tests in pypy/module/cppyy/test.
-
-If a feature or reflection info is missing, this is supposed to be handled
-gracefully.
-In fact, there are unit tests explicitly for this purpose (even as their use
-becomes less interesting over time, as the number of missing features
-decreases).
-Only when a missing feature is used, should there be an exception.
-For example, if no reflection info is available for a return type, then a
-class that has a method with that return type can still be used.
-Only that one specific method can not be used.
-
-
-Templates
----------
-
-Templates can be automatically instantiated, assuming the appropriate header
-files have been loaded or are accessible to the class loader.
-This is the case for example for all of STL.
-For example::
-
- $ cat MyTemplate.h
- #include <vector>
-
- class MyClass {
- public:
- MyClass(int i = -99) : m_i(i) {}
- MyClass(const MyClass& s) : m_i(s.m_i) {}
- MyClass& operator=(const MyClass& s) { m_i = s.m_i; return *this; }
- ~MyClass() {}
- int m_i;
- };
-
-Run the normal ``genreflex`` and compilation steps::
-
- $ genreflex MyTemplate.h --selection=MyTemplate.xml
- $ g++ -std=c++11 -fPIC -rdynamic -O2 -shared -I$CPPYYHOME/include MyTemplate_rflx.cpp -o libTemplateDict.so -L$CPPYYHOME/lib -lCling
-
-Subsequent use should be as expected.
-Note the meta-class style of "instantiating" the template::
-
- >>>> import cppyy
- >>>> cppyy.load_reflection_info("libTemplateDict.so")
- >>>> std = cppyy.gbl.std
- >>>> MyClass = cppyy.gbl.MyClass
- >>>> v = std.vector(MyClass)()
- >>>> v += [MyClass(1), MyClass(2), MyClass(3)]
- >>>> for m in v:
- .... print m.m_i,
- ....
- 1 2 3
- >>>>
-
-The arguments to the template instantiation can either be a string with the
-full list of arguments, or the explicit classes.
-The latter makes for easier code writing if the classes passed to the
-instantiation are themselves templates.
-
-
-The fast lane
--------------
-
-By default, cppyy will use direct function pointers through `CFFI`_ whenever
-possible. If this causes problems for you, you can disable it by setting the
-CPPYY_DISABLE_FASTPATH environment variable.
-
-.. _CFFI: https://cffi.readthedocs.io/en/latest/
-
-
-CPython
--------
-
-Most of the ideas in cppyy come originally from the `PyROOT`_ project, which
-contains a CPython-based cppyy.py module (with similar dependencies as the
-one that comes with PyPy).
-A standalone pip-installable version is planned, but for now you can install
-ROOT through your favorite distribution installer (available in the science
-section).
-
-.. _PyROOT: https://root.cern.ch/pyroot
-
-There are a couple of minor differences between the two versions of cppyy
-(the CPython version has a few more features).
-Work is on-going to integrate the nightly tests of both to make sure their
-feature sets are equalized.
-
-
-Python3
--------
-
-The CPython version of cppyy supports Python3, assuming your packager has
-build the backend for it.
-The cppyy module has not been tested with the `Py3k`_ version of PyPy.
-Note that the generated reflection information (from ``genreflex``) is fully
-independent of Python, and does not need to be rebuild when switching versions
-or interpreters.
-
-.. _Py3k: https://bitbucket.org/pypy/pypy/src/py3k
-
-
-.. toctree::
- :hidden:
-
- cppyy_example
diff --git a/pypy/doc/cppyy_example.rst b/pypy/doc/cppyy_example.rst
deleted file mode 100644
--- a/pypy/doc/cppyy_example.rst
+++ /dev/null
@@ -1,59 +0,0 @@
-File example.h
-==============
-
-::
-
- #include <iostream>
- #include <vector>
-
- class AbstractClass {
- public:
- virtual ~AbstractClass() {}
- virtual void abstract_method() = 0;
- };
-
- class ConcreteClass : AbstractClass {
- public:
- ConcreteClass(int n=42) : m_int(n) {}
- ~ConcreteClass() {}
-
- virtual void abstract_method() {
- std::cout << "called concrete method" << std::endl;
- }
-
- void array_method(int* ad, int size) {
- for (int i=0; i < size; ++i)
- std::cout << ad[i] << ' ';
- std::cout << std::endl;
- }
-
- void array_method(double* ad, int size) {
- for (int i=0; i < size; ++i)
- std::cout << ad[i] << ' ';
- std::cout << std::endl;
- }
-
- AbstractClass* show_autocast() {
- return this;
- }
-
- operator const char*() {
- return "Hello operator const char*!";
- }
-
- public:
- int m_int;
- };
-
- namespace Namespace {
-
- class ConcreteClass {
- public:
- class NestedClass {
- public:
- std::vector<int> m_v;
- };
-
- };
-
- } // namespace Namespace
diff --git a/pypy/doc/cpython_differences.rst b/pypy/doc/cpython_differences.rst
--- a/pypy/doc/cpython_differences.rst
+++ b/pypy/doc/cpython_differences.rst
@@ -330,6 +330,8 @@
- ``frozenset`` (empty frozenset only)
+ - unbound method objects (for Python 2 only)
+
This change requires some changes to ``id`` as well. ``id`` fulfills the
following condition: ``x is y <=> id(x) == id(y)``. Therefore ``id`` of the
above types will return a value that is computed from the argument, and can
@@ -427,7 +429,8 @@
* the ``__builtins__`` name is always referencing the ``__builtin__`` module,
never a dictionary as it sometimes is in CPython. Assigning to
- ``__builtins__`` has no effect.
+ ``__builtins__`` has no effect. (For usages of tools like
+ RestrictedPython, see `issue #2653`_.)
* directly calling the internal magic methods of a few built-in types
with invalid arguments may have a slightly different result. For
@@ -533,7 +536,12 @@
or ``float`` subtypes. Currently PyPy does not support the
``__class__`` attribute assignment for any non heaptype subtype.
+* In PyPy, module and class dictionaries are optimized under the assumption
+ that deleting attributes from them are rare. Because of this, e.g.
+ ``del foo.bar`` where ``foo`` is a module (or class) that contains the
+ function ``bar``, is significantly slower than CPython.
+
.. _`is ignored in PyPy`: http://bugs.python.org/issue14621
.. _`little point`: http://events.ccc.de/congress/2012/Fahrplan/events/5152.en.html
.. _`#2072`: https://bitbucket.org/pypy/pypy/issue/2072/
-
+.. _`issue #2653`: https://bitbucket.org/pypy/pypy/issues/2653/
diff --git a/pypy/doc/extending.rst b/pypy/doc/extending.rst
--- a/pypy/doc/extending.rst
+++ b/pypy/doc/extending.rst
@@ -12,7 +12,7 @@
* Write them in pure Python and use ctypes_.
-* Write them in C++ and bind them through :doc:`cppyy <cppyy>` using Cling.
+* Write them in C++ and bind them through cppyy_ using Cling.
* Write them as `RPython mixed modules`_.
@@ -61,29 +61,22 @@
.. _libffi: http://sourceware.org/libffi/
-Cling and cppyy
----------------
+cppyy
+-----
-The builtin :doc:`cppyy <cppyy>` module uses reflection information, provided by
-`Cling`_ (which needs to be `installed separately`_), of C/C++ code to
-automatically generate bindings at runtime.
-In Python, classes and functions are always runtime structures, so when they
-are generated matters not for performance.
-However, if the backend itself is capable of dynamic behavior, it is a much
-better functional match, allowing tighter integration and more natural
-language mappings.
+For C++, _cppyy_ is an automated bindings generator available for both
+PyPy and CPython.
More information about the pypy-commit
mailing list