[pypy-commit] pypy py3k-update: hg merge default
rlamy
pypy.commits at gmail.com
Sun May 1 13:11:32 EDT 2016
Author: Ronan Lamy <ronan.lamy at gmail.com>
Branch: py3k-update
Changeset: r84107:db30c99ce18e
Date: 2016-05-01 18:10 +0100
http://bitbucket.org/pypy/pypy/changeset/db30c99ce18e/
Log: hg merge default
diff --git a/.hgtags b/.hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -21,3 +21,4 @@
246c9cf22037b11dc0e8c29ce3f291d3b8c5935a release-5.0
bbd45126bc691f669c4ebdfbd74456cd274c6b92 release-5.0.1
3260adbeba4a8b6659d1cc0d0b41f266769b74da release-5.1
+b0a649e90b6642251fb4a765fe5b27a97b1319a9 release-5.1.1
diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst
--- a/pypy/doc/faq.rst
+++ b/pypy/doc/faq.rst
@@ -117,13 +117,22 @@
On which platforms does PyPy run?
---------------------------------
-PyPy is regularly and extensively tested on Linux machines. It mostly
+PyPy currently supports:
+
+ * **x86** machines on most common operating systems
+ (Linux 32/64 bits, Mac OS X 64 bits, Windows 32 bits, OpenBSD, FreeBSD),
+
+ * newer **ARM** hardware (ARMv6 or ARMv7, with VFPv3) running Linux,
+
+ * big- and little-endian variants of **PPC64** running Linux,
+
+ * **s390x** running Linux
+
+PyPy is regularly and extensively tested on Linux machines. It
works on Mac and Windows: it is tested there, but most of us are running
-Linux so fixes may depend on 3rd-party contributions. PyPy's JIT
-works on x86 (32-bit or 64-bit) and on ARM (ARMv6 or ARMv7).
-Support for POWER (64-bit) is stalled at the moment.
+Linux so fixes may depend on 3rd-party contributions.
-To bootstrap from sources, PyPy can use either CPython (2.6 or 2.7) or
+To bootstrap from sources, PyPy can use either CPython 2.7 or
another (e.g. older) PyPy. Cross-translation is not really supported:
e.g. to build a 32-bit PyPy, you need to have a 32-bit environment.
Cross-translation is only explicitly supported between a 32-bit Intel
diff --git a/pypy/doc/release-5.1.1.rst b/pypy/doc/release-5.1.1.rst
new file mode 100644
--- /dev/null
+++ b/pypy/doc/release-5.1.1.rst
@@ -0,0 +1,45 @@
+==========
+PyPy 5.1.1
+==========
+
+We have released a bugfix for PyPy 5.1, due to a regression_ in
+installing third-party packages dependant on numpy (using our numpy fork
+available at https://bitbucket.org/pypy/numpy ).
+
+Thanks to those who reported the issue. We also fixed a regression in
+translating PyPy which increased the memory required to translate. Improvement
+will be noticed by downstream packagers and those who translate rather than
+download pre-built binaries.
+
+.. _regression: https://bitbucket.org/pypy/pypy/issues/2282
+
+What is PyPy?
+=============
+
+PyPy is a very compliant Python interpreter, almost a drop-in replacement for
+CPython 2.7. It's fast (`PyPy and CPython 2.7.x`_ performance comparison)
+due to its integrated tracing JIT compiler.
+
+We also welcome developers of other
+`dynamic languages`_ to see what RPython can do for them.
+
+This release supports:
+
+ * **x86** machines on most common operating systems
+ (Linux 32/64, Mac OS X 64, Windows 32, OpenBSD, FreeBSD),
+
+ * newer **ARM** hardware (ARMv6 or ARMv7, with VFPv3) running Linux,
+
+ * big- and little-endian variants of **PPC64** running Linux,
+
+ * **s390x** running Linux
+
+.. _`PyPy and CPython 2.7.x`: http://speed.pypy.org
+.. _`dynamic languages`: http://pypyjs.org
+
+Please update, and continue to help us make PyPy better.
+
+Cheers
+
+The PyPy Team
+
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -24,7 +24,11 @@
remove-objspace-options.
.. branch: cpyext-for-merge
-Update cpyext C-API support:
+
+Update cpyext C-API support After this branch, we are almost able to support
+upstream numpy via cpyext, so we created (yet another) fork of numpy at
+github.com/pypy/numpy with the needed changes. Among the significant changes
+to cpyext:
- allow c-snippet tests to be run with -A so we can verify we are compatible
- fix many edge cases exposed by fixing tests to run with -A
- issequence() logic matches cpython
@@ -40,6 +44,20 @@
- rewrite slot assignment for typeobjects
- improve tracking of PyObject to rpython object mapping
- support tp_as_{number, sequence, mapping, buffer} slots
-After this branch, we are almost able to support upstream numpy via cpyext, so
-we created (yet another) fork of numpy at github.com/pypy/numpy with the needed
-changes
+
+(makes the pypy-c bigger; this was fixed subsequently by the
+share-cpyext-cpython-api branch)
+
+.. branch: share-mapdict-methods-2
+
+Reduce generated code for subclasses by using the same function objects in all
+generated subclasses.
+
+.. branch: share-cpyext-cpython-api
+
+.. branch: cpyext-auto-gil
+
+CPyExt tweak: instead of "GIL not held when a CPython C extension module
+calls PyXxx", we now silently acquire/release the GIL. Helps with
+CPython C extension modules that call some PyXxx() functions without
+holding the GIL (arguably, they are theorically buggy).
diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py
--- a/pypy/interpreter/app_main.py
+++ b/pypy/interpreter/app_main.py
@@ -87,7 +87,11 @@
"""
try:
# run it
- f(*fargs, **fkwds)
+ try:
+ f(*fargs, **fkwds)
+ finally:
+ sys.settrace(None)
+ sys.setprofile(None)
except SystemExit as e:
handle_sys_exit(e)
except BaseException as e:
diff --git a/pypy/interpreter/test/test_typedef.py b/pypy/interpreter/test/test_typedef.py
--- a/pypy/interpreter/test/test_typedef.py
+++ b/pypy/interpreter/test/test_typedef.py
@@ -364,6 +364,26 @@
""")
assert seen == [1]
+ def test_mapdict_number_of_slots(self):
+ space = self.space
+ a, b, c = space.unpackiterable(space.appexec([], """():
+ class A(object):
+ pass
+ a = A()
+ a.x = 1
+ class B:
+ pass
+ b = B()
+ b.x = 1
+ class C(int):
+ pass
+ c = C(1)
+ c.x = 1
+ return a, b, c
+ """), 3)
+ assert not hasattr(a, "storage")
+ assert not hasattr(b, "storage")
+ assert hasattr(c, "storage")
class AppTestTypeDef:
diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py
--- a/pypy/interpreter/typedef.py
+++ b/pypy/interpreter/typedef.py
@@ -103,44 +103,63 @@
# we need two subclasses of the app-level type, one to add mapdict, and then one
# to add del to not slow down the GC.
-def get_unique_interplevel_subclass(config, cls, needsdel=False):
+def get_unique_interplevel_subclass(space, cls, needsdel=False):
"NOT_RPYTHON: initialization-time only"
if hasattr(cls, '__del__') and getattr(cls, "handle_del_manually", False):
needsdel = False
assert cls.typedef.acceptable_as_base_class
- key = config, cls, needsdel
+ key = space, cls, needsdel
try:
return _subclass_cache[key]
except KeyError:
# XXX can save a class if cls already has a __del__
if needsdel:
- cls = get_unique_interplevel_subclass(config, cls, False)
- subcls = _getusercls(config, cls, needsdel)
+ cls = get_unique_interplevel_subclass(space, cls, False)
+ subcls = _getusercls(space, cls, needsdel)
assert key not in _subclass_cache
_subclass_cache[key] = subcls
return subcls
get_unique_interplevel_subclass._annspecialcase_ = "specialize:memo"
_subclass_cache = {}
-def _getusercls(config, cls, wants_del, reallywantdict=False):
+def _getusercls(space, cls, wants_del, reallywantdict=False):
from rpython.rlib import objectmodel
+ from pypy.objspace.std.objectobject import W_ObjectObject
+ from pypy.module.__builtin__.interp_classobj import W_InstanceObject
from pypy.objspace.std.mapdict import (BaseUserClassMapdict,
MapdictDictSupport, MapdictWeakrefSupport,
- _make_storage_mixin_size_n)
+ _make_storage_mixin_size_n, MapdictStorageMixin)
typedef = cls.typedef
name = cls.__name__ + "User"
- mixins_needed = [BaseUserClassMapdict, _make_storage_mixin_size_n()]
+ mixins_needed = []
+ if cls is W_ObjectObject or cls is W_InstanceObject:
+ mixins_needed.append(_make_storage_mixin_size_n())
+ else:
+ mixins_needed.append(MapdictStorageMixin)
+ copy_methods = [BaseUserClassMapdict]
if reallywantdict or not typedef.hasdict:
# the type has no dict, mapdict to provide the dict
- mixins_needed.append(MapdictDictSupport)
+ copy_methods.append(MapdictDictSupport)
name += "Dict"
if not typedef.weakrefable:
# the type does not support weakrefs yet, mapdict to provide weakref
# support
- mixins_needed.append(MapdictWeakrefSupport)
+ copy_methods.append(MapdictWeakrefSupport)
name += "Weakrefable"
if wants_del:
+ # This subclass comes with an app-level __del__. To handle
+ # it, we make an RPython-level __del__ method. This
+ # RPython-level method is called directly by the GC and it
+ # cannot do random things (calling the app-level __del__ would
+ # be "random things"). So instead, we just call here
+ # enqueue_for_destruction(), and the app-level __del__ will be
+ # called later at a safe point (typically between bytecodes).
+ # If there is also an inherited RPython-level __del__, it is
+ # called afterwards---not immediately! This base
+ # RPython-level __del__ is supposed to run only when the
+ # object is not reachable any more. NOTE: it doesn't fully
+ # work: see issue #2287.
name += "Del"
parent_destructor = getattr(cls, '__del__', None)
def call_parent_del(self):
@@ -148,14 +167,14 @@
parent_destructor(self)
def call_applevel_del(self):
assert isinstance(self, subcls)
- self.space.userdel(self)
+ space.userdel(self)
class Proto(object):
def __del__(self):
self.clear_all_weakrefs()
- self.enqueue_for_destruction(self.space, call_applevel_del,
+ self.enqueue_for_destruction(space, call_applevel_del,
'method __del__ of ')
if parent_destructor is not None:
- self.enqueue_for_destruction(self.space, call_parent_del,
+ self.enqueue_for_destruction(space, call_parent_del,
'internal destructor of ')
mixins_needed.append(Proto)
@@ -163,10 +182,17 @@
user_overridden_class = True
for base in mixins_needed:
objectmodel.import_from_mixin(base)
+ for copycls in copy_methods:
+ _copy_methods(copycls, subcls)
del subcls.base
subcls.__name__ = name
return subcls
+def _copy_methods(copycls, subcls):
+ for key, value in copycls.__dict__.items():
+ if (not key.startswith('__') or key == '__del__'):
+ setattr(subcls, key, value)
+
# ____________________________________________________________
diff --git a/pypy/module/_io/test/test_bufferedio.py b/pypy/module/_io/test/test_bufferedio.py
--- a/pypy/module/_io/test/test_bufferedio.py
+++ b/pypy/module/_io/test/test_bufferedio.py
@@ -318,7 +318,6 @@
class MyIO(_io.BufferedWriter):
def __del__(self):
record.append(1)
- super(MyIO, self).__del__()
def close(self):
record.append(2)
super(MyIO, self).close()
diff --git a/pypy/module/_io/test/test_io.py b/pypy/module/_io/test/test_io.py
--- a/pypy/module/_io/test/test_io.py
+++ b/pypy/module/_io/test/test_io.py
@@ -88,7 +88,6 @@
class MyIO(io.IOBase):
def __del__(self):
record.append(1)
- super(MyIO, self).__del__()
def close(self):
record.append(2)
super(MyIO, self).close()
diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py
--- a/pypy/module/cpyext/api.py
+++ b/pypy/module/cpyext/api.py
@@ -10,6 +10,7 @@
from rpython.rtyper.lltypesystem import ll2ctypes
from rpython.rtyper.annlowlevel import llhelper
from rpython.rlib.objectmodel import we_are_translated, keepalive_until_here
+from rpython.rlib.objectmodel import dont_inline
from rpython.translator import cdir
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.translator.gensupp import NameManager
@@ -255,7 +256,7 @@
class ApiFunction:
def __init__(self, argtypes, restype, callable, error=_NOT_SPECIFIED,
- c_name=None, gil=None, result_borrowed=False):
+ c_name=None, gil=None, result_borrowed=False, result_is_ll=False):
self.argtypes = argtypes
self.restype = restype
self.functype = lltype.Ptr(lltype.FuncType(argtypes, restype))
@@ -276,6 +277,9 @@
assert len(self.argnames) == len(self.argtypes)
self.gil = gil
self.result_borrowed = result_borrowed
+ self.result_is_ll = result_is_ll
+ if result_is_ll: # means 'returns a low-level PyObject pointer'
+ assert is_PyObject(restype)
#
def get_llhelper(space):
return llhelper(self.functype, self.get_wrapper(space))
@@ -300,7 +304,7 @@
DEFAULT_HEADER = 'pypy_decl.h'
def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header=DEFAULT_HEADER,
- gil=None, result_borrowed=False):
+ gil=None, result_borrowed=False, result_is_ll=False):
"""
Declares a function to be exported.
- `argtypes`, `restype` are lltypes and describe the function signature.
@@ -339,7 +343,8 @@
c_name = func_name
api_function = ApiFunction(argtypes, restype, func, error,
c_name=c_name, gil=gil,
- result_borrowed=result_borrowed)
+ result_borrowed=result_borrowed,
+ result_is_ll=result_is_ll)
func.api_func = api_function
if error is _NOT_SPECIFIED:
@@ -614,6 +619,9 @@
def is_PyObject(TYPE):
if not isinstance(TYPE, lltype.Ptr):
return False
+ if TYPE == PyObject:
+ return True
+ assert not isinstance(TYPE.TO, lltype.ForwardReference)
return hasattr(TYPE.TO, 'c_ob_refcnt') and hasattr(TYPE.TO, 'c_ob_type')
# a pointer to PyObject
@@ -670,37 +678,161 @@
pypy_debug_catch_fatal_exception = rffi.llexternal('pypy_debug_catch_fatal_exception', [], lltype.Void)
+
+# ____________________________________________________________
+
+
+class WrapperCache(object):
+ def __init__(self, space):
+ self.space = space
+ self.wrapper_gens = {} # {signature: WrapperGen()}
+ self.stats = [0, 0]
+
+class WrapperGen(object):
+ wrapper_second_level = None
+
+ def __init__(self, space, signature):
+ self.space = space
+ self.signature = signature
+ self.callable2name = []
+
+ def make_wrapper(self, callable):
+ self.callable2name.append((callable, callable.__name__))
+ if self.wrapper_second_level is None:
+ self.wrapper_second_level = make_wrapper_second_level(
+ self.space, self.callable2name, *self.signature)
+ wrapper_second_level = self.wrapper_second_level
+
+ def wrapper(*args):
+ # no GC here, not even any GC object
+ args += (callable,)
+ return wrapper_second_level(*args)
+
+ wrapper.__name__ = "wrapper for %r" % (callable, )
+ return wrapper
+
+
# Make the wrapper for the cases (1) and (2)
def make_wrapper(space, callable, gil=None):
"NOT_RPYTHON"
+ # This logic is obscure, because we try to avoid creating one
+ # big wrapper() function for every callable. Instead we create
+ # only one per "signature".
+
+ argnames = callable.api_func.argnames
+ argtypesw = zip(callable.api_func.argtypes,
+ [_name.startswith("w_") for _name in argnames])
+ error_value = getattr(callable.api_func, "error_value", CANNOT_FAIL)
+ if (isinstance(callable.api_func.restype, lltype.Ptr)
+ and error_value is not CANNOT_FAIL):
+ assert lltype.typeOf(error_value) == callable.api_func.restype
+ assert not error_value # only support error=NULL
+ error_value = 0 # because NULL is not hashable
+
+ if callable.api_func.result_is_ll:
+ result_kind = "L"
+ elif callable.api_func.result_borrowed:
+ result_kind = "B" # note: 'result_borrowed' is ignored if we also
+ else: # say 'result_is_ll=True' (in this case it's
+ result_kind = "." # up to you to handle refcounting anyway)
+
+ signature = (tuple(argtypesw),
+ callable.api_func.restype,
+ result_kind,
+ error_value,
+ gil)
+
+ cache = space.fromcache(WrapperCache)
+ cache.stats[1] += 1
+ try:
+ wrapper_gen = cache.wrapper_gens[signature]
+ except KeyError:
+ print signature
+ wrapper_gen = cache.wrapper_gens[signature] = WrapperGen(space,
+ signature)
+ cache.stats[0] += 1
+ #print 'Wrapper cache [wrappers/total]:', cache.stats
+ return wrapper_gen.make_wrapper(callable)
+
+
+ at dont_inline
+def deadlock_error(funcname):
+ fatalerror_notb("GIL deadlock detected when a CPython C extension "
+ "module calls '%s'" % (funcname,))
+
+ at dont_inline
+def no_gil_error(funcname):
+ fatalerror_notb("GIL not held when a CPython C extension "
+ "module calls '%s'" % (funcname,))
+
+ at dont_inline
+def not_supposed_to_fail(funcname):
+ raise SystemError("The function '%s' was not supposed to fail"
+ % (funcname,))
+
+ at dont_inline
+def unexpected_exception(funcname, e, tb):
+ print 'Fatal error in cpyext, CPython compatibility layer, calling',funcname
+ print 'Either report a bug or consider not using this particular extension'
+ if not we_are_translated():
+ if tb is None:
+ tb = sys.exc_info()[2]
+ import traceback
+ traceback.print_exc()
+ if sys.stdout == sys.__stdout__:
+ import pdb; pdb.post_mortem(tb)
+ # we can't do much here, since we're in ctypes, swallow
+ else:
+ print str(e)
+ pypy_debug_catch_fatal_exception()
+ assert False
+
+def make_wrapper_second_level(space, callable2name, argtypesw, restype,
+ result_kind, error_value, gil):
from rpython.rlib import rgil
- names = callable.api_func.argnames
- argtypes_enum_ui = unrolling_iterable(enumerate(zip(callable.api_func.argtypes,
- [name.startswith("w_") for name in names])))
- fatal_value = callable.api_func.restype._defl()
+ argtypes_enum_ui = unrolling_iterable(enumerate(argtypesw))
+ fatal_value = restype._defl()
+ gil_auto_workaround = (gil is None) # automatically detect when we don't
+ # have the GIL, and acquire/release it
gil_acquire = (gil == "acquire" or gil == "around")
gil_release = (gil == "release" or gil == "around")
pygilstate_ensure = (gil == "pygilstate_ensure")
pygilstate_release = (gil == "pygilstate_release")
assert (gil is None or gil_acquire or gil_release
or pygilstate_ensure or pygilstate_release)
- deadlock_error = ("GIL deadlock detected when a CPython C extension "
- "module calls %r" % (callable.__name__,))
- no_gil_error = ("GIL not held when a CPython C extension "
- "module calls %r" % (callable.__name__,))
+ expected_nb_args = len(argtypesw) + pygilstate_ensure
- @specialize.ll()
- def wrapper(*args):
+ if isinstance(restype, lltype.Ptr) and error_value == 0:
+ error_value = lltype.nullptr(restype.TO)
+ if error_value is not CANNOT_FAIL:
+ assert lltype.typeOf(error_value) == lltype.typeOf(fatal_value)
+
+ def invalid(err):
+ "NOT_RPYTHON: translation-time crash if this ends up being called"
+ raise ValueError(err)
+ invalid.__name__ = 'invalid_%s' % (callable2name[0][1],)
+
+ def nameof(callable):
+ for c, n in callable2name:
+ if c is callable:
+ return n
+ return '<unknown function>'
+ nameof._dont_inline_ = True
+
+ def wrapper_second_level(*args):
from pypy.module.cpyext.pyobject import make_ref, from_ref, is_pyobj
from pypy.module.cpyext.pyobject import as_pyobj
# we hope that malloc removal removes the newtuple() that is
# inserted exactly here by the varargs specializer
+ callable = args[-1]
+ args = args[:-1]
# see "Handling of the GIL" above (careful, we don't have the GIL here)
tid = rthread.get_or_make_ident()
- if gil_acquire:
+ _gil_auto = (gil_auto_workaround and cpyext_glob_tid_ptr[0] != tid)
+ if gil_acquire or _gil_auto:
if cpyext_glob_tid_ptr[0] == tid:
- fatalerror_notb(deadlock_error)
+ deadlock_error(nameof(callable))
rgil.acquire()
assert cpyext_glob_tid_ptr[0] == 0
elif pygilstate_ensure:
@@ -713,7 +845,7 @@
args += (pystate.PyGILState_UNLOCKED,)
else:
if cpyext_glob_tid_ptr[0] != tid:
- fatalerror_notb(no_gil_error)
+ no_gil_error(nameof(callable))
cpyext_glob_tid_ptr[0] = 0
rffi.stackcounter.stacks_counter += 1
@@ -724,8 +856,7 @@
try:
if not we_are_translated() and DEBUG_WRAPPER:
print >>sys.stderr, callable,
- assert len(args) == (len(callable.api_func.argtypes) +
- pygilstate_ensure)
+ assert len(args) == expected_nb_args
for i, (typ, is_wrapped) in argtypes_enum_ui:
arg = args[i]
if is_PyObject(typ) and is_wrapped:
@@ -759,41 +890,31 @@
failed = False
if failed:
- error_value = callable.api_func.error_value
if error_value is CANNOT_FAIL:
- raise SystemError("The function '%s' was not supposed to fail"
- % (callable.__name__,))
+ raise not_supposed_to_fail(nameof(callable))
retval = error_value
- elif is_PyObject(callable.api_func.restype):
+ elif is_PyObject(restype):
if is_pyobj(result):
- retval = result
+ if result_kind != "L":
+ raise invalid("missing result_is_ll=True")
else:
- if result is not None:
- if callable.api_func.result_borrowed:
- retval = as_pyobj(space, result)
- else:
- retval = make_ref(space, result)
- retval = rffi.cast(callable.api_func.restype, retval)
+ if result_kind == "L":
+ raise invalid("result_is_ll=True but not ll PyObject")
+ if result_kind == "B": # borrowed
+ result = as_pyobj(space, result)
else:
- retval = lltype.nullptr(PyObject.TO)
- elif callable.api_func.restype is not lltype.Void:
- retval = rffi.cast(callable.api_func.restype, result)
+ result = make_ref(space, result)
+ retval = rffi.cast(restype, result)
+
+ elif restype is not lltype.Void:
+ retval = rffi.cast(restype, result)
+
except Exception, e:
- print 'Fatal error in cpyext, CPython compatibility layer, calling', callable.__name__
- print 'Either report a bug or consider not using this particular extension'
- if not we_are_translated():
- if tb is None:
- tb = sys.exc_info()[2]
- import traceback
- traceback.print_exc()
- if sys.stdout == sys.__stdout__:
- import pdb; pdb.post_mortem(tb)
- # we can't do much here, since we're in ctypes, swallow
- else:
- print str(e)
- pypy_debug_catch_fatal_exception()
- assert False
+ unexpected_exception(nameof(callable), e, tb)
+ return fatal_value
+
+ assert lltype.typeOf(retval) == restype
rffi.stackcounter.stacks_counter -= 1
# see "Handling of the GIL" above
@@ -803,16 +924,16 @@
arg = rffi.cast(lltype.Signed, args[-1])
unlock = (arg == pystate.PyGILState_UNLOCKED)
else:
- unlock = gil_release
+ unlock = gil_release or _gil_auto
if unlock:
rgil.release()
else:
cpyext_glob_tid_ptr[0] = tid
return retval
- callable._always_inline_ = 'try'
- wrapper.__name__ = "wrapper for %r" % (callable, )
- return wrapper
+
+ wrapper_second_level._dont_inline_ = True
+ return wrapper_second_level
def process_va_name(name):
return name.replace('*', '_star')
diff --git a/pypy/module/cpyext/bytesobject.py b/pypy/module/cpyext/bytesobject.py
--- a/pypy/module/cpyext/bytesobject.py
+++ b/pypy/module/cpyext/bytesobject.py
@@ -6,7 +6,7 @@
from pypy.module.cpyext.pyerrors import PyErr_BadArgument
from pypy.module.cpyext.pyobject import (
PyObject, PyObjectP, Py_DecRef, make_ref, from_ref, track_reference,
- make_typedescr, get_typedescr, as_pyobj, Py_IncRef)
+ make_typedescr, get_typedescr, as_pyobj, Py_IncRef, get_w_obj_and_decref)
##
## Implementation of PyBytesObject
@@ -124,7 +124,7 @@
#_______________________________________________________________________
- at cpython_api([CONST_STRING, Py_ssize_t], PyObject)
+ at cpython_api([CONST_STRING, Py_ssize_t], PyObject, result_is_ll=True)
def PyBytes_FromStringAndSize(space, char_p, length):
if char_p:
s = rffi.charpsize2str(char_p, length)
@@ -221,7 +221,7 @@
def _PyBytes_Eq(space, w_str1, w_str2):
return space.eq_w(w_str1, w_str2)
- at cpython_api([PyObjectP, PyObject], lltype.Void)
+ at cpython_api([PyObjectP, PyObject], lltype.Void, error=None)
def PyBytes_Concat(space, ref, w_newpart):
"""Create a new string object in *string containing the contents of newpart
appended to string; the caller will own the new reference. The reference to
@@ -229,25 +229,25 @@
the old reference to string will still be discarded and the value of
*string will be set to NULL; the appropriate exception will be set."""
- if not ref[0]:
+ old = ref[0]
+ if not old:
return
- if w_newpart is None or not PyBytes_Check(space, ref[0]) or \
- not PyBytes_Check(space, w_newpart):
- Py_DecRef(space, ref[0])
- ref[0] = lltype.nullptr(PyObject.TO)
- return
- w_str = from_ref(space, ref[0])
- w_newstr = space.add(w_str, w_newpart)
- ref[0] = make_ref(space, w_newstr)
- Py_IncRef(space, ref[0])
+ ref[0] = lltype.nullptr(PyObject.TO)
+ w_str = get_w_obj_and_decref(space, old)
+ if w_newpart is not None and PyBytes_Check(space, old):
+ # XXX: should use buffer protocol
+ w_newstr = space.add(w_str, w_newpart)
+ ref[0] = make_ref(space, w_newstr)
- at cpython_api([PyObjectP, PyObject], lltype.Void)
+ at cpython_api([PyObjectP, PyObject], lltype.Void, error=None)
def PyBytes_ConcatAndDel(space, ref, newpart):
"""Create a new string object in *string containing the contents of newpart
appended to string. This version decrements the reference count of newpart."""
- PyBytes_Concat(space, ref, newpart)
- Py_DecRef(space, newpart)
+ try:
+ PyBytes_Concat(space, ref, newpart)
+ finally:
+ Py_DecRef(space, newpart)
@cpython_api([PyObject, PyObject], PyObject)
def _PyBytes_Join(space, w_sep, w_seq):
diff --git a/pypy/module/cpyext/frameobject.py b/pypy/module/cpyext/frameobject.py
--- a/pypy/module/cpyext/frameobject.py
+++ b/pypy/module/cpyext/frameobject.py
@@ -67,7 +67,8 @@
track_reference(space, py_obj, w_obj)
return w_obj
- at cpython_api([PyThreadState, PyCodeObject, PyObject, PyObject], PyFrameObject)
+ at cpython_api([PyThreadState, PyCodeObject, PyObject, PyObject], PyFrameObject,
+ result_is_ll=True)
def PyFrame_New(space, tstate, w_code, w_globals, w_locals):
typedescr = get_typedescr(PyFrame.typedef)
py_obj = typedescr.allocate(space, space.gettypeobject(PyFrame.typedef))
diff --git a/pypy/module/cpyext/ndarrayobject.py b/pypy/module/cpyext/ndarrayobject.py
--- a/pypy/module/cpyext/ndarrayobject.py
+++ b/pypy/module/cpyext/ndarrayobject.py
@@ -239,9 +239,7 @@
gufunctype = lltype.Ptr(ufuncs.GenericUfunc)
-# XXX single rffi.CArrayPtr(gufunctype) does not work, this does, is there
-# a problem with casting function pointers?
- at cpython_api([rffi.CArrayPtr(rffi.CArrayPtr(gufunctype)), rffi.VOIDP, rffi.CCHARP, Py_ssize_t, Py_ssize_t,
+ at cpython_api([rffi.CArrayPtr(gufunctype), rffi.VOIDP, rffi.CCHARP, Py_ssize_t, Py_ssize_t,
Py_ssize_t, Py_ssize_t, rffi.CCHARP, rffi.CCHARP, Py_ssize_t,
rffi.CCHARP], PyObject, header=HEADER)
def PyUFunc_FromFuncAndDataAndSignature(space, funcs, data, types, ntypes,
@@ -256,7 +254,7 @@
funcs_w = [None] * ntypes
dtypes_w = [None] * ntypes * (nin + nout)
for i in range(ntypes):
- funcs_w[i] = ufuncs.W_GenericUFuncCaller(rffi.cast(gufunctype, funcs[i]), data)
+ funcs_w[i] = ufuncs.W_GenericUFuncCaller(funcs[i], data)
for i in range(ntypes*(nin+nout)):
dtypes_w[i] = get_dtype_cache(space).dtypes_by_num[ord(types[i])]
w_funcs = space.newlist(funcs_w)
@@ -268,7 +266,7 @@
w_signature, w_identity, w_name, w_doc, stack_inputs=True)
return ufunc_generic
- at cpython_api([rffi.CArrayPtr(rffi.CArrayPtr(gufunctype)), rffi.VOIDP, rffi.CCHARP, Py_ssize_t, Py_ssize_t,
+ at cpython_api([rffi.CArrayPtr(gufunctype), rffi.VOIDP, rffi.CCHARP, Py_ssize_t, Py_ssize_t,
Py_ssize_t, Py_ssize_t, rffi.CCHARP, rffi.CCHARP, Py_ssize_t], PyObject, header=HEADER)
def PyUFunc_FromFuncAndData(space, funcs, data, types, ntypes,
nin, nout, identity, name, doc, check_return):
diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py
--- a/pypy/module/cpyext/object.py
+++ b/pypy/module/cpyext/object.py
@@ -38,11 +38,11 @@
def PyObject_Free(space, ptr):
lltype.free(ptr, flavor='raw')
- at cpython_api([PyTypeObjectPtr], PyObject)
+ at cpython_api([PyTypeObjectPtr], PyObject, result_is_ll=True)
def _PyObject_New(space, type):
return _PyObject_NewVar(space, type, 0)
- at cpython_api([PyTypeObjectPtr, Py_ssize_t], PyObject)
+ at cpython_api([PyTypeObjectPtr, Py_ssize_t], PyObject, result_is_ll=True)
def _PyObject_NewVar(space, type, itemcount):
w_type = from_ref(space, rffi.cast(PyObject, type))
assert isinstance(w_type, W_TypeObject)
@@ -67,7 +67,7 @@
if pto.c_tp_flags & Py_TPFLAGS_HEAPTYPE:
Py_DecRef(space, rffi.cast(PyObject, pto))
- at cpython_api([PyTypeObjectPtr], PyObject)
+ at cpython_api([PyTypeObjectPtr], PyObject, result_is_ll=True)
def _PyObject_GC_New(space, type):
return _PyObject_New(space, type)
@@ -201,7 +201,7 @@
space.delitem(w_obj, w_key)
return 0
- at cpython_api([PyObject, PyTypeObjectPtr], PyObject)
+ at cpython_api([PyObject, PyTypeObjectPtr], PyObject, result_is_ll=True)
def PyObject_Init(space, obj, type):
"""Initialize a newly-allocated object op with its type and initial
reference. Returns the initialized object. If type indicates that the
@@ -215,7 +215,7 @@
obj.c_ob_refcnt = 1
return obj
- at cpython_api([PyVarObject, PyTypeObjectPtr, Py_ssize_t], PyObject)
+ at cpython_api([PyVarObject, PyTypeObjectPtr, Py_ssize_t], PyObject, result_is_ll=True)
def PyObject_InitVar(space, py_obj, type, size):
"""This does everything PyObject_Init() does, and also initializes the
length information for a variable-size object."""
@@ -305,7 +305,7 @@
w_res = PyObject_RichCompare(space, ref1, ref2, opid)
return int(space.is_true(w_res))
- at cpython_api([PyObject], PyObject)
+ at cpython_api([PyObject], PyObject, result_is_ll=True)
def PyObject_SelfIter(space, ref):
"""Undocumented function, this is what CPython does."""
Py_IncRef(space, ref)
diff --git a/pypy/module/cpyext/pystate.py b/pypy/module/cpyext/pystate.py
--- a/pypy/module/cpyext/pystate.py
+++ b/pypy/module/cpyext/pystate.py
@@ -172,8 +172,16 @@
py_fatalerror("PyThreadState_Get: no current thread")
return ts
- at cpython_api([], PyObject, error=CANNOT_FAIL)
+ at cpython_api([], PyObject, result_is_ll=True, error=CANNOT_FAIL)
def PyThreadState_GetDict(space):
+ """Return a dictionary in which extensions can store thread-specific state
+ information. Each extension should use a unique key to use to store state in
+ the dictionary. It is okay to call this function when no current thread state
+ is available. If this function returns NULL, no exception has been raised and
+ the caller should assume no current thread state is available.
+
+ Previously this could only be called when a current thread is active, and NULL
+ meant that an exception was raised."""
state = space.fromcache(InterpreterState)
return state.get_thread_state(space).c_dict
diff --git a/pypy/module/cpyext/test/test_bytesobject.py b/pypy/module/cpyext/test/test_bytesobject.py
--- a/pypy/module/cpyext/test/test_bytesobject.py
+++ b/pypy/module/cpyext/test/test_bytesobject.py
@@ -3,7 +3,7 @@
from pypy.module.cpyext.test.test_api import BaseApiTest
from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase
from pypy.module.cpyext.bytesobject import new_empty_str, PyBytesObject
-from pypy.module.cpyext.api import PyObjectP, PyObject, Py_ssize_tP
+from pypy.module.cpyext.api import PyObjectP, PyObject, Py_ssize_tP, generic_cpy_call
from pypy.module.cpyext.pyobject import Py_DecRef, from_ref, make_ref
from pypy.module.cpyext.typeobjectdefs import PyTypeObjectPtr
@@ -145,6 +145,7 @@
"""
PyObject ** v;
PyObject * left = PyTuple_GetItem(args, 0);
+ Py_INCREF(left); /* the reference will be stolen! */
v = &left;
PyBytes_Concat(v, PyTuple_GetItem(args, 1));
return *v;
@@ -221,6 +222,7 @@
assert space.bytes_w(from_ref(space, ptr[0])) == 'abcdef'
api.PyBytes_Concat(ptr, space.w_None)
assert not ptr[0]
+ api.PyErr_Clear()
ptr[0] = lltype.nullptr(PyObject.TO)
api.PyBytes_Concat(ptr, space.wrapbytes('def')) # should not crash
lltype.free(ptr, flavor='raw')
diff --git a/pypy/module/cpyext/test/test_datetime.py b/pypy/module/cpyext/test/test_datetime.py
--- a/pypy/module/cpyext/test/test_datetime.py
+++ b/pypy/module/cpyext/test/test_datetime.py
@@ -109,7 +109,7 @@
Py_RETURN_NONE;
"""
)
- ])
+ ], prologue='#include "datetime.h"\n')
import datetime
assert module.get_types() == (datetime.date,
datetime.datetime,
diff --git a/pypy/module/cpyext/test/test_dictobject.py b/pypy/module/cpyext/test/test_dictobject.py
--- a/pypy/module/cpyext/test/test_dictobject.py
+++ b/pypy/module/cpyext/test/test_dictobject.py
@@ -185,6 +185,7 @@
if (!PyArg_ParseTuple(args, "O", &dict))
return NULL;
proxydict = PyDictProxy_New(dict);
+#ifdef PYPY_VERSION // PyDictProxy_Check[Exact] are PyPy-specific.
if (!PyDictProxy_Check(proxydict)) {
Py_DECREF(proxydict);
PyErr_SetNone(PyExc_ValueError);
@@ -195,6 +196,7 @@
PyErr_SetNone(PyExc_ValueError);
return NULL;
}
+#endif // PYPY_VERSION
i = PyObject_Size(proxydict);
Py_DECREF(proxydict);
return PyLong_FromLong(i);
diff --git a/pypy/module/cpyext/test/test_ndarrayobject.py b/pypy/module/cpyext/test/test_ndarrayobject.py
--- a/pypy/module/cpyext/test/test_ndarrayobject.py
+++ b/pypy/module/cpyext/test/test_ndarrayobject.py
@@ -368,7 +368,7 @@
def test_ufunc(self):
if self.runappdirect:
from numpy import arange
- py.test.xfail('why does this segfault on cpython?')
+ py.test.xfail('segfaults on cpython: PyUFunc_API == NULL?')
else:
from _numpypy.multiarray import arange
mod = self.import_extension('foo', [
diff --git a/pypy/module/cpyext/test/test_pyerrors.py b/pypy/module/cpyext/test/test_pyerrors.py
--- a/pypy/module/cpyext/test/test_pyerrors.py
+++ b/pypy/module/cpyext/test/test_pyerrors.py
@@ -355,6 +355,8 @@
assert "in test_PyErr_Display\n" in output
assert "ZeroDivisionError" in output
+ @pytest.mark.skipif(True, reason=
+ "XXX seems to pass, but doesn't: 'py.test -s' shows errors in PyObject_Free")
def test_GetSetExcInfo(self):
import sys
if self.runappdirect and (sys.version_info.major < 3 or
diff --git a/pypy/module/cpyext/test/test_thread.py b/pypy/module/cpyext/test/test_thread.py
--- a/pypy/module/cpyext/test/test_thread.py
+++ b/pypy/module/cpyext/test/test_thread.py
@@ -1,9 +1,12 @@
-import py
+import sys
+
+import py, pytest
from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase
class AppTestThread(AppTestCpythonExtensionBase):
+ @pytest.mark.skipif('__pypy__' not in sys.builtin_module_names, reason='pypy only test')
def test_get_thread_ident(self):
module = self.import_extension('foo', [
("get_thread_ident", "METH_NOARGS",
@@ -30,6 +33,7 @@
assert results[0][0] != results[1][0]
+ @pytest.mark.skipif('__pypy__' not in sys.builtin_module_names, reason='pypy only test')
def test_acquire_lock(self):
module = self.import_extension('foo', [
("test_acquire_lock", "METH_NOARGS",
@@ -53,13 +57,14 @@
])
module.test_acquire_lock()
+ @pytest.mark.skipif('__pypy__' not in sys.builtin_module_names, reason='pypy only test')
def test_release_lock(self):
module = self.import_extension('foo', [
("test_release_lock", "METH_NOARGS",
"""
#ifndef PyThread_release_lock
#error "seems we are not accessing PyPy's functions"
-#endif
+#endif
PyThread_type_lock lock = PyThread_allocate_lock();
PyThread_acquire_lock(lock, 1);
PyThread_release_lock(lock);
@@ -74,6 +79,7 @@
])
module.test_release_lock()
+ @pytest.mark.skipif('__pypy__' not in sys.builtin_module_names, reason='pypy only test')
def test_tls(self):
module = self.import_extension('foo', [
("create_key", "METH_NOARGS",
diff --git a/pypy/module/cpyext/test/test_tupleobject.py b/pypy/module/cpyext/test/test_tupleobject.py
--- a/pypy/module/cpyext/test/test_tupleobject.py
+++ b/pypy/module/cpyext/test/test_tupleobject.py
@@ -84,7 +84,14 @@
"""
PyObject *item = PyTuple_New(0);
PyObject *t = PyTuple_New(1);
- if (t->ob_refcnt != 1 || item->ob_refcnt != 1) {
+#ifdef PYPY_VERSION
+ // PyPy starts even empty tuples with a refcount of 1.
+ const int initial_item_refcount = 1;
+#else
+ // CPython can cache ().
+ const int initial_item_refcount = item->ob_refcnt;
+#endif // PYPY_VERSION
+ if (t->ob_refcnt != 1 || item->ob_refcnt != initial_item_refcount) {
PyErr_SetString(PyExc_SystemError, "bad initial refcnt");
return NULL;
}
@@ -94,8 +101,8 @@
PyErr_SetString(PyExc_SystemError, "SetItem: t refcnt != 1");
return NULL;
}
- if (item->ob_refcnt != 1) {
- PyErr_SetString(PyExc_SystemError, "SetItem: item refcnt != 1");
+ if (item->ob_refcnt != initial_item_refcount) {
+ PyErr_SetString(PyExc_SystemError, "GetItem: item refcnt != initial_item_refcount");
return NULL;
}
@@ -109,8 +116,8 @@
PyErr_SetString(PyExc_SystemError, "GetItem: t refcnt != 1");
return NULL;
}
- if (item->ob_refcnt != 1) {
- PyErr_SetString(PyExc_SystemError, "GetItem: item refcnt != 1");
+ if (item->ob_refcnt != initial_item_refcount) {
+ PyErr_SetString(PyExc_SystemError, "GetItem: item refcnt != initial_item_refcount");
return NULL;
}
return t;
diff --git a/pypy/module/cpyext/test/test_unicodeobject.py b/pypy/module/cpyext/test/test_unicodeobject.py
--- a/pypy/module/cpyext/test/test_unicodeobject.py
+++ b/pypy/module/cpyext/test/test_unicodeobject.py
@@ -24,8 +24,11 @@
if(PyUnicode_GetSize(s) != 11) {
result = -PyUnicode_GetSize(s);
}
+#ifdef PYPY_VERSION
+ // Slightly silly test that tp_basicsize is reasonable.
if(s->ob_type->tp_basicsize != sizeof(void*)*6)
result = s->ob_type->tp_basicsize;
+#endif // PYPY_VERSION
Py_DECREF(s);
return PyLong_FromLong(result);
"""),
diff --git a/pypy/module/cpyext/test/test_version.py b/pypy/module/cpyext/test/test_version.py
--- a/pypy/module/cpyext/test/test_version.py
+++ b/pypy/module/cpyext/test/test_version.py
@@ -29,8 +29,6 @@
PyModule_AddIntConstant(m, "py_major_version", PY_MAJOR_VERSION);
PyModule_AddIntConstant(m, "py_minor_version", PY_MINOR_VERSION);
PyModule_AddIntConstant(m, "py_micro_version", PY_MICRO_VERSION);
- PyModule_AddStringConstant(m, "pypy_version", PYPY_VERSION);
- PyModule_AddIntConstant(m, "pypy_version_num", PYPY_VERSION_NUM);
return m;
}
"""
@@ -39,6 +37,18 @@
assert module.py_major_version == sys.version_info.major
assert module.py_minor_version == sys.version_info.minor
assert module.py_micro_version == sys.version_info.micro
+
+ @pytest.mark.skipif('__pypy__' not in sys.builtin_module_names, reason='pypy only test')
+ def test_pypy_versions(self):
+ import sys
+ init = """
+ if (Py_IsInitialized()) {
+ PyObject *m = Py_InitModule("foo", NULL);
+ PyModule_AddStringConstant(m, "pypy_version", PYPY_VERSION);
+ PyModule_AddIntConstant(m, "pypy_version_num", PYPY_VERSION_NUM);
+ }
+ """
+ module = self.import_module(name='foo', init=init)
v = sys.pypy_version_info
s = '%d.%d.%d' % (v[0], v[1], v[2])
if v.releaselevel != 'final':
diff --git a/pypy/module/cpyext/tupleobject.py b/pypy/module/cpyext/tupleobject.py
--- a/pypy/module/cpyext/tupleobject.py
+++ b/pypy/module/cpyext/tupleobject.py
@@ -127,7 +127,7 @@
#_______________________________________________________________________
- at cpython_api([Py_ssize_t], PyObject)
+ at cpython_api([Py_ssize_t], PyObject, result_is_ll=True)
def PyTuple_New(space, size):
return rffi.cast(PyObject, new_empty_tuple(space, size))
@@ -150,7 +150,8 @@
decref(space, old_ref)
return 0
- at cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True)
+ at cpython_api([PyObject, Py_ssize_t], PyObject,
+ result_borrowed=True, result_is_ll=True)
def PyTuple_GetItem(space, ref, index):
if not tuple_check_ref(space, ref):
PyErr_BadInternalCall(space)
diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py
--- a/pypy/module/cpyext/typeobject.py
+++ b/pypy/module/cpyext/typeobject.py
@@ -701,7 +701,7 @@
w_type2 = from_ref(space, rffi.cast(PyObject, b))
return int(abstract_issubclass_w(space, w_type1, w_type2)) #XXX correct?
- at cpython_api([PyTypeObjectPtr, Py_ssize_t], PyObject)
+ at cpython_api([PyTypeObjectPtr, Py_ssize_t], PyObject, result_is_ll=True)
def PyType_GenericAlloc(space, type, nitems):
from pypy.module.cpyext.object import _PyObject_NewVar
return _PyObject_NewVar(space, type, nitems)
diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py
--- a/pypy/module/cpyext/unicodeobject.py
+++ b/pypy/module/cpyext/unicodeobject.py
@@ -337,7 +337,7 @@
return unicodeobject.encode_object(space, w_unicode, 'unicode-escape', 'strict')
- at cpython_api([CONST_WSTRING, Py_ssize_t], PyObject)
+ at cpython_api([CONST_WSTRING, Py_ssize_t], PyObject, result_is_ll=True)
def PyUnicode_FromUnicode(space, wchar_p, length):
"""Create a Unicode Object from the Py_UNICODE buffer u of the given size. u
may be NULL which causes the contents to be undefined. It is the user's
@@ -351,14 +351,14 @@
else:
return rffi.cast(PyObject, new_empty_unicode(space, length))
- at cpython_api([CONST_WSTRING, Py_ssize_t], PyObject)
+ at cpython_api([CONST_WSTRING, Py_ssize_t], PyObject, result_is_ll=True)
def PyUnicode_FromWideChar(space, wchar_p, length):
"""Create a Unicode object from the wchar_t buffer w of the given size.
Return NULL on failure."""
# PyPy supposes Py_UNICODE == wchar_t
return PyUnicode_FromUnicode(space, wchar_p, length)
- at cpython_api([PyObject, CONST_STRING], PyObject)
+ at cpython_api([PyObject, CONST_STRING], PyObject, result_is_ll=True)
def _PyUnicode_AsDefaultEncodedString(space, w_unicode, errors):
return PyUnicode_AsEncodedString(space, w_unicode, lltype.nullptr(rffi.CCHARP.TO), errors)
@@ -532,7 +532,7 @@
w_str = PyUnicode_FromString(space, s)
return space.new_interned_w_str(w_str)
- at cpython_api([CONST_STRING, Py_ssize_t], PyObject)
+ at cpython_api([CONST_STRING, Py_ssize_t], PyObject, result_is_ll=True)
def PyUnicode_FromStringAndSize(space, s, size):
"""Create a Unicode Object from the char buffer u. The bytes will be
interpreted as being UTF-8 encoded. u may also be NULL which causes the
diff --git a/pypy/module/unicodedata/interp_ucd.py b/pypy/module/unicodedata/interp_ucd.py
--- a/pypy/module/unicodedata/interp_ucd.py
+++ b/pypy/module/unicodedata/interp_ucd.py
@@ -4,7 +4,7 @@
from pypy.interpreter.gateway import interp2app, unwrap_spec
from pypy.interpreter.baseobjspace import W_Root
-from pypy.interpreter.error import OperationError
+from pypy.interpreter.error import OperationError, oefmt
from pypy.interpreter.typedef import TypeDef, interp_attrproperty
from rpython.rlib.rarithmetic import r_longlong
from rpython.rlib.objectmodel import we_are_translated
@@ -34,8 +34,9 @@
# Target is wide build
def unichr_to_code_w(space, w_unichr):
if not space.isinstance_w(w_unichr, space.w_unicode):
- raise OperationError(space.w_TypeError, space.wrap(
- 'argument 1 must be unicode'))
+ raise oefmt(
+ space.w_TypeError, 'argument 1 must be unicode, not %T',
+ w_unichr)
if not we_are_translated() and sys.maxunicode == 0xFFFF:
# Host CPython is narrow build, accept surrogates
@@ -54,8 +55,9 @@
# Target is narrow build
def unichr_to_code_w(space, w_unichr):
if not space.isinstance_w(w_unichr, space.w_unicode):
- raise OperationError(space.w_TypeError, space.wrap(
- 'argument 1 must be unicode'))
+ raise oefmt(
+ space.w_TypeError, 'argument 1 must be unicode, not %T',
+ w_unichr)
if not we_are_translated() and sys.maxunicode > 0xFFFF:
# Host CPython is wide build, forbid surrogates
@@ -187,7 +189,9 @@
@unwrap_spec(form=str)
def normalize(self, space, form, w_unistr):
if not space.isinstance_w(w_unistr, space.w_unicode):
- raise OperationError(space.w_TypeError, space.wrap('argument 2 must be unicode'))
+ raise oefmt(
+ space.w_TypeError, 'argument 2 must be unicode, not %T',
+ w_unistr)
if form == 'NFC':
composed = True
decomposition = self._canon_decomposition
diff --git a/pypy/module/unicodedata/test/test_unicodedata.py b/pypy/module/unicodedata/test/test_unicodedata.py
--- a/pypy/module/unicodedata/test/test_unicodedata.py
+++ b/pypy/module/unicodedata/test/test_unicodedata.py
@@ -78,10 +78,15 @@
import unicodedata
assert unicodedata.lookup("GOTHIC LETTER FAIHU") == '\U00010346'
- def test_normalize(self):
+ def test_normalize_bad_argcount(self):
import unicodedata
raises(TypeError, unicodedata.normalize, 'x')
+ def test_normalize_nonunicode(self):
+ import unicodedata
+ exc_info = raises(TypeError, unicodedata.normalize, 'NFC', b'x')
+ assert 'must be unicode, not' in str(exc_info.value)
+
@py.test.mark.skipif("sys.maxunicode < 0x10ffff")
def test_normalize_wide(self):
import unicodedata
@@ -103,9 +108,10 @@
# For no reason, unicodedata.mirrored() returns an int, not a bool
assert repr(unicodedata.mirrored(' ')) == '0'
- def test_bidirectional(self):
+ def test_bidirectional_not_one_character(self):
import unicodedata
- raises(TypeError, unicodedata.bidirectional, 'xx')
+ exc_info = raises(TypeError, unicodedata.bidirectional, u'xx')
+ assert str(exc_info.value) == 'need a single Unicode character as parameter'
def test_aliases(self):
import unicodedata
diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py
--- a/pypy/objspace/std/mapdict.py
+++ b/pypy/objspace/std/mapdict.py
@@ -277,7 +277,7 @@
def copy(self, obj):
result = Object()
result.space = self.space
- result._init_empty(self)
+ result._mapdict_init_empty(self)
return result
def length(self):
@@ -286,7 +286,7 @@
def set_terminator(self, obj, terminator):
result = Object()
result.space = self.space
- result._init_empty(terminator)
+ result._mapdict_init_empty(terminator)
return result
def remove_dict_entries(self, obj):
@@ -304,7 +304,7 @@
def materialize_r_dict(self, space, obj, dict_w):
result = Object()
result.space = space
- result._init_empty(self.devolved_dict_terminator)
+ result._mapdict_init_empty(self.devolved_dict_terminator)
return result
@@ -417,11 +417,6 @@
def __repr__(self):
return "<PlainAttribute %s %s %s %r>" % (self.name, self.index, self.storageindex, self.back)
-def _become(w_obj, new_obj):
- # this is like the _become method, really, but we cannot use that due to
- # RPython reasons
- w_obj._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
-
class MapAttrCache(object):
def __init__(self, space):
SIZE = 1 << space.config.objspace.std.methodcachesizeexp
@@ -457,22 +452,12 @@
# everything that's needed to use mapdict for a user subclass at all.
# This immediately makes slots possible.
- # assumes presence of _init_empty, _mapdict_read_storage,
+ # assumes presence of _get_mapdict_map, _set_mapdict_map
+ # _mapdict_init_empty, _mapdict_read_storage,
# _mapdict_write_storage, _mapdict_storage_length,
# _set_mapdict_storage_and_map
# _____________________________________________
- # methods needed for mapdict
-
- def _become(self, new_obj):
- self._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
-
- def _get_mapdict_map(self):
- return jit.promote(self.map)
- def _set_mapdict_map(self, map):
- self.map = map
-
- # _____________________________________________
# objspace interface
# class access
@@ -482,13 +467,13 @@
def setclass(self, space, w_cls):
new_obj = self._get_mapdict_map().set_terminator(self, w_cls.terminator)
- self._become(new_obj)
+ self._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
def user_setup(self, space, w_subtype):
self.space = space
assert (not self.typedef.hasdict or
isinstance(w_subtype.terminator, NoDictTerminator))
- self._init_empty(w_subtype.terminator)
+ self._mapdict_init_empty(w_subtype.terminator)
# methods needed for slots
@@ -506,7 +491,7 @@
new_obj = self._get_mapdict_map().delete(self, "slot", index)
if new_obj is None:
return False
- self._become(new_obj)
+ self._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
return True
@@ -547,7 +532,7 @@
new_obj = self._get_mapdict_map().delete(self, attrname, DICT)
if new_obj is None:
return False
- self._become(new_obj)
+ self._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
return True
def getdict(self, space):
@@ -597,7 +582,12 @@
assert flag
class MapdictStorageMixin(object):
- def _init_empty(self, map):
+ def _get_mapdict_map(self):
+ return jit.promote(self.map)
+ def _set_mapdict_map(self, map):
+ self.map = map
+
+ def _mapdict_init_empty(self, map):
from rpython.rlib.debug import make_sure_not_resized
self.map = map
self.storage = make_sure_not_resized([None] * map.size_estimate())
@@ -611,6 +601,7 @@
def _mapdict_storage_length(self):
return len(self.storage)
+
def _set_mapdict_storage_and_map(self, storage, map):
self.storage = storage
self.map = map
@@ -641,7 +632,11 @@
rangenmin1 = unroll.unrolling_iterable(range(nmin1))
valnmin1 = "_value%s" % nmin1
class subcls(object):
- def _init_empty(self, map):
+ def _get_mapdict_map(self):
+ return jit.promote(self.map)
+ def _set_mapdict_map(self, map):
+ self.map = map
+ def _mapdict_init_empty(self, map):
for i in rangenmin1:
setattr(self, "_value%s" % i, None)
setattr(self, valnmin1, erase_item(None))
@@ -729,7 +724,7 @@
def get_empty_storage(self):
w_result = Object()
terminator = self.space.fromcache(get_terminator_for_dicts)
- w_result._init_empty(terminator)
+ w_result._mapdict_init_empty(terminator)
return self.erase(w_result)
def switch_to_object_strategy(self, w_dict):
@@ -809,7 +804,7 @@
def clear(self, w_dict):
w_obj = self.unerase(w_dict.dstorage)
new_obj = w_obj._get_mapdict_map().remove_dict_entries(w_obj)
- _become(w_obj, new_obj)
+ w_obj._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
def popitem(self, w_dict):
curr = self.unerase(w_dict.dstorage)._get_mapdict_map().search(DICT)
@@ -834,7 +829,7 @@
def materialize_r_dict(space, obj, dict_w):
map = obj._get_mapdict_map()
new_obj = map.materialize_r_dict(space, obj, dict_w)
- _become(obj, new_obj)
+ obj._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
class MapDictIteratorKeys(BaseKeyIterator):
def __init__(self, space, strategy, w_dict):
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -374,7 +374,7 @@
cls = cls.typedef.applevel_subclasses_base
#
subcls = get_unique_interplevel_subclass(
- self.config, cls, w_subtype.needsdel)
+ self, cls, w_subtype.needsdel)
instance = instantiate(subcls)
assert isinstance(instance, cls)
instance.user_setup(self, w_subtype)
diff --git a/pypy/tool/release/repackage.sh b/pypy/tool/release/repackage.sh
--- a/pypy/tool/release/repackage.sh
+++ b/pypy/tool/release/repackage.sh
@@ -1,7 +1,7 @@
# Edit these appropriately before running this script
maj=5
min=1
-rev=0
+rev=1
branchname=release-$maj.x # ==OR== release-$maj.$min.x
tagname=release-$maj.$min # ==OR== release-$maj.$min.$rev
diff --git a/rpython/rtyper/lltypesystem/ll2ctypes.py b/rpython/rtyper/lltypesystem/ll2ctypes.py
--- a/rpython/rtyper/lltypesystem/ll2ctypes.py
+++ b/rpython/rtyper/lltypesystem/ll2ctypes.py
@@ -231,17 +231,7 @@
assert max_n >= 0
ITEM = A.OF
ctypes_item = get_ctypes_type(ITEM, delayed_builders)
- # Python 2.5 ctypes can raise OverflowError on 64-bit builds
- for n in [maxint, 2**31]:
- MAX_SIZE = n/64
- try:
- PtrType = ctypes.POINTER(MAX_SIZE * ctypes_item)
- except (OverflowError, AttributeError), e:
- pass # ^^^ bah, blame ctypes
- else:
- break
- else:
- raise e
+ ctypes_item_ptr = ctypes.POINTER(ctypes_item)
class CArray(ctypes.Structure):
if is_emulated_long:
@@ -265,35 +255,9 @@
bigarray.length = n
return bigarray
- _ptrtype = None
-
- @classmethod
- def _get_ptrtype(cls):
- if cls._ptrtype:
- return cls._ptrtype
- # ctypes can raise OverflowError on 64-bit builds
- # on windows it raises AttributeError even for 2**31 (_length_ missing)
- if _MS_WINDOWS:
- other_limit = 2**31-1
- else:
- other_limit = 2**31
- for n in [maxint, other_limit]:
- cls.MAX_SIZE = n / ctypes.sizeof(ctypes_item)
- try:
- cls._ptrtype = ctypes.POINTER(cls.MAX_SIZE * ctypes_item)
- except (OverflowError, AttributeError), e:
- pass
- else:
- break
- else:
- raise e
- return cls._ptrtype
-
def _indexable(self, index):
- PtrType = self._get_ptrtype()
- assert index + 1 < self.MAX_SIZE
- p = ctypes.cast(ctypes.pointer(self.items), PtrType)
- return p.contents
+ p = ctypes.cast(self.items, ctypes_item_ptr)
+ return p
def _getitem(self, index, boundscheck=True):
if boundscheck:
@@ -1045,12 +1009,22 @@
container = _array_of_known_length(T.TO)
container._storage = type(cobj)(cobj.contents)
elif isinstance(T.TO, lltype.FuncType):
+ # cobj is a CFunctionType object. We naively think
+ # that it should be a function pointer. No no no. If
+ # it was read out of an array, say, then it is a *pointer*
+ # to a function pointer. In other words, the read doesn't
+ # read anything, it just takes the address of the function
+ # pointer inside the array. If later the array is modified
+ # or goes out of scope, then we crash. CTypes is fun.
+ # It works if we cast it now to an int and back.
cobjkey = intmask(ctypes.cast(cobj, ctypes.c_void_p).value)
if cobjkey in _int2obj:
container = _int2obj[cobjkey]
else:
+ name = getattr(cobj, '__name__', '?')
+ cobj = ctypes.cast(cobjkey, type(cobj))
_callable = get_ctypes_trampoline(T.TO, cobj)
- return lltype.functionptr(T.TO, getattr(cobj, '__name__', '?'),
+ return lltype.functionptr(T.TO, name,
_callable=_callable)
elif isinstance(T.TO, lltype.OpaqueType):
if T == llmemory.GCREF:
diff --git a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py
--- a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py
+++ b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py
@@ -1405,6 +1405,45 @@
a2 = ctypes2lltype(lltype.Ptr(A), lltype2ctypes(a))
assert a2._obj.getitem(0)._obj._parentstructure() is a2._obj
+ def test_array_of_function_pointers(self):
+ c_source = py.code.Source(r"""
+ #include "src/precommondefs.h"
+ #include <stdio.h>
+
+ typedef int(*funcptr_t)(void);
+ static int forty_two(void) { return 42; }
+ static int forty_three(void) { return 43; }
+ static funcptr_t testarray[2];
+ RPY_EXPORTED void runtest(void cb(funcptr_t *)) {
+ testarray[0] = &forty_two;
+ testarray[1] = &forty_three;
+ fprintf(stderr, "&forty_two = %p\n", testarray[0]);
+ fprintf(stderr, "&forty_three = %p\n", testarray[1]);
+ cb(testarray);
+ testarray[0] = 0;
+ testarray[1] = 0;
+ }
+ """)
+ eci = ExternalCompilationInfo(include_dirs=[cdir],
+ separate_module_sources=[c_source])
+
+ PtrF = lltype.Ptr(lltype.FuncType([], rffi.INT))
+ ArrayPtrF = rffi.CArrayPtr(PtrF)
+ CALLBACK = rffi.CCallback([ArrayPtrF], lltype.Void)
+
+ runtest = rffi.llexternal('runtest', [CALLBACK], lltype.Void,
+ compilation_info=eci)
+ seen = []
+
+ def callback(testarray):
+ seen.append(testarray[0]) # read a PtrF out of testarray
+ seen.append(testarray[1])
+
+ runtest(callback)
+ assert seen[0]() == 42
+ assert seen[1]() == 43
+
+
class TestPlatform(object):
def test_lib_on_libpaths(self):
from rpython.translator.platform import platform
diff --git a/rpython/rtyper/rpbc.py b/rpython/rtyper/rpbc.py
--- a/rpython/rtyper/rpbc.py
+++ b/rpython/rtyper/rpbc.py
@@ -544,6 +544,21 @@
ll_compress = compression_function(r_set)
return llops.gendirectcall(ll_compress, v)
+class __extend__(pairtype(FunctionReprBase, FunctionReprBase)):
+ def rtype_is_((robj1, robj2), hop):
+ if hop.s_result.is_constant():
+ return inputconst(Bool, hop.s_result.const)
+ s_pbc = annmodel.unionof(robj1.s_pbc, robj2.s_pbc)
+ r_pbc = hop.rtyper.getrepr(s_pbc)
+ v1, v2 = hop.inputargs(r_pbc, r_pbc)
+ assert v1.concretetype == v2.concretetype
+ if v1.concretetype == Char:
+ return hop.genop('char_eq', [v1, v2], resulttype=Bool)
+ elif isinstance(v1.concretetype, Ptr):
+ return hop.genop('ptr_eq', [v1, v2], resulttype=Bool)
+ else:
+ raise TyperError("unknown type %r" % (v1.concretetype,))
+
def conversion_table(r_from, r_to):
if r_to in r_from._conversion_tables:
diff --git a/rpython/rtyper/test/test_rpbc.py b/rpython/rtyper/test/test_rpbc.py
--- a/rpython/rtyper/test/test_rpbc.py
+++ b/rpython/rtyper/test/test_rpbc.py
@@ -1497,6 +1497,47 @@
res = self.interpret(f, [2])
assert res == False
+ def test_is_among_functions_2(self):
+ def g1(): pass
+ def g2(): pass
+ def f(n):
+ if n > 5:
+ g = g2
+ else:
+ g = g1
+ g()
+ return g is g2
+ res = self.interpret(f, [2])
+ assert res == False
+ res = self.interpret(f, [8])
+ assert res == True
+
+ def test_is_among_functions_3(self):
+ def g0(): pass
+ def g1(): pass
+ def g2(): pass
+ def g3(): pass
+ def g4(): pass
+ def g5(): pass
+ def g6(): pass
+ def g7(): pass
+ glist = [g0, g1, g2, g3, g4, g5, g6, g7]
+ def f(n):
+ if n > 5:
+ g = g2
+ else:
+ g = g1
+ h = glist[n]
+ g()
+ h()
+ return g is h
+ res = self.interpret(f, [2])
+ assert res == False
+ res = self.interpret(f, [1])
+ assert res == True
+ res = self.interpret(f, [6])
+ assert res == False
+
def test_shrink_pbc_set(self):
def g1():
return 10
More information about the pypy-commit
mailing list