From pypy.commits at gmail.com Mon Feb 1 03:12:08 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 01 Feb 2016 00:12:08 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: catchup with default Message-ID: <56af1358.890bc30a.6174b.fffffcfa@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82023:f3b48b1063f6 Date: 2016-02-01 09:11 +0100 http://bitbucket.org/pypy/pypy/changeset/f3b48b1063f6/ Log: catchup with default diff too long, truncating to 2000 out of 2165 lines diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst --- a/pypy/doc/faq.rst +++ b/pypy/doc/faq.rst @@ -54,7 +54,8 @@ It is quite common nowadays that xyz is available on PyPI_ and installable with ``pip install xyz``. The simplest solution is to `use virtualenv (as documented here)`_. Then enter (activate) the virtualenv -and type: ``pip install xyz``. +and type: ``pip install xyz``. If you don't know or don't want virtualenv, +you can also install ``pip`` globally by saying ``pypy -m ensurepip``. If you get errors from the C compiler, the module is a CPython C Extension module using unsupported features. `See below.`_ diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -133,6 +133,13 @@ `rpython/jit/metainterp/optimizeopt/pure.py`, which can result in better codegen for traces containing a large number of pure getfield operations. +.. branch: exctrans + +Try to ensure that no new functions get annotated during the 'source_c' phase. +Refactor sandboxing to operate at a higher level. + +.. branch: cpyext-bootstrap + .. branch: memop-simplify3 Further simplifying the backend operations malloc_cond_varsize and zero_array. diff --git a/pypy/module/cpyext/__init__.py b/pypy/module/cpyext/__init__.py --- a/pypy/module/cpyext/__init__.py +++ b/pypy/module/cpyext/__init__.py @@ -36,7 +36,6 @@ import pypy.module.cpyext.object import pypy.module.cpyext.stringobject import pypy.module.cpyext.tupleobject -import pypy.module.cpyext.ndarrayobject import pypy.module.cpyext.setobject import pypy.module.cpyext.dictobject import pypy.module.cpyext.intobject diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -143,7 +143,7 @@ target.chmod(0444) # make the file read-only, to make sure that nobody # edits it by mistake -def copy_header_files(dstdir): +def copy_header_files(dstdir, copy_numpy_headers): # XXX: 20 lines of code to recursively copy a directory, really?? assert dstdir.check(dir=True) headers = include_dir.listdir('*.h') + include_dir.listdir('*.inl') @@ -151,15 +151,16 @@ headers.append(udir.join(name)) _copy_header_files(headers, dstdir) - try: - dstdir.mkdir('numpy') - except py.error.EEXIST: - pass - numpy_dstdir = dstdir / 'numpy' + if copy_numpy_headers: + try: + dstdir.mkdir('numpy') + except py.error.EEXIST: + pass + numpy_dstdir = dstdir / 'numpy' - numpy_include_dir = include_dir / 'numpy' - numpy_headers = numpy_include_dir.listdir('*.h') + numpy_include_dir.listdir('*.inl') - _copy_header_files(numpy_headers, numpy_dstdir) + numpy_include_dir = include_dir / 'numpy' + numpy_headers = numpy_include_dir.listdir('*.h') + numpy_include_dir.listdir('*.inl') + _copy_header_files(numpy_headers, numpy_dstdir) class NotSpecified(object): @@ -442,8 +443,8 @@ TYPES = {} GLOBALS = { # this needs to include all prebuilt pto, otherwise segfaults occur '_Py_NoneStruct#': ('PyObject*', 'space.w_None'), - '_Py_TrueStruct#': ('PyObject*', 'space.w_True'), - '_Py_ZeroStruct#': ('PyObject*', 'space.w_False'), + '_Py_TrueStruct#': ('PyIntObject*', 'space.w_True'), + '_Py_ZeroStruct#': ('PyIntObject*', 'space.w_False'), '_Py_NotImplementedStruct#': ('PyObject*', 'space.w_NotImplemented'), '_Py_EllipsisObject#': ('PyObject*', 'space.w_Ellipsis'), 'PyDateTimeAPI': ('PyDateTime_CAPI*', 'None'), @@ -482,7 +483,6 @@ "PyComplex_Type": "space.w_complex", "PyByteArray_Type": "space.w_bytearray", "PyMemoryView_Type": "space.w_memoryview", - "PyArray_Type": "space.gettypeobject(W_NDimArray.typedef)", "PyBaseObject_Type": "space.w_object", 'PyNone_Type': 'space.type(space.w_None)', 'PyNotImplemented_Type': 'space.type(space.w_NotImplemented)', @@ -506,7 +506,9 @@ def get_structtype_for_ctype(ctype): from pypy.module.cpyext.typeobjectdefs import PyTypeObjectPtr from pypy.module.cpyext.cdatetime import PyDateTime_CAPI + from pypy.module.cpyext.intobject import PyIntObject return {"PyObject*": PyObject, "PyTypeObject*": PyTypeObjectPtr, + "PyIntObject*": PyIntObject, "PyDateTime_CAPI*": lltype.Ptr(PyDateTime_CAPI)}[ctype] PyTypeObject = lltype.ForwardReference() @@ -771,6 +773,8 @@ "NOT_RPYTHON" from pypy.module.cpyext.pyobject import make_ref + use_micronumpy = setup_micronumpy(space) + export_symbols = list(FUNCTIONS) + SYMBOLS_C + list(GLOBALS) from rpython.translator.c.database import LowLevelDatabase db = LowLevelDatabase() @@ -828,6 +832,7 @@ space.fromcache(State).install_dll(eci) # populate static data + builder = StaticObjectBuilder(space) for name, (typ, expr) in GLOBALS.iteritems(): from pypy.module import cpyext w_obj = eval(expr) @@ -852,7 +857,7 @@ assert False, "Unknown static pointer: %s %s" % (typ, name) ptr.value = ctypes.cast(ll2ctypes.lltype2ctypes(value), ctypes.c_void_p).value - elif typ in ('PyObject*', 'PyTypeObject*'): + elif typ in ('PyObject*', 'PyTypeObject*', 'PyIntObject*'): if name.startswith('PyPyExc_') or name.startswith('cpyexttestExc_'): # we already have the pointer in_dll = ll2ctypes.get_ctypes_type(PyObject).in_dll(bridge, name) @@ -861,17 +866,10 @@ # we have a structure, get its address in_dll = ll2ctypes.get_ctypes_type(PyObject.TO).in_dll(bridge, name) py_obj = ll2ctypes.ctypes2lltype(PyObject, ctypes.pointer(in_dll)) - from pypy.module.cpyext.pyobject import ( - track_reference, get_typedescr) - w_type = space.type(w_obj) - typedescr = get_typedescr(w_type.instancetypedef) - py_obj.c_ob_refcnt = 1 - py_obj.c_ob_type = rffi.cast(PyTypeObjectPtr, - make_ref(space, w_type)) - typedescr.attach(space, py_obj, w_obj) - track_reference(space, py_obj, w_obj) + builder.prepare(py_obj, w_obj) else: assert False, "Unknown static object: %s %s" % (typ, name) + builder.attach_all() pypyAPI = ctypes.POINTER(ctypes.c_void_p).in_dll(bridge, 'pypyAPI') @@ -888,6 +886,36 @@ setup_init_functions(eci, translating=False) return modulename.new(ext='') + +class StaticObjectBuilder: + def __init__(self, space): + self.space = space + self.to_attach = [] + + def prepare(self, py_obj, w_obj): + from pypy.module.cpyext.pyobject import track_reference + py_obj.c_ob_refcnt = 1 + track_reference(self.space, py_obj, w_obj) + self.to_attach.append((py_obj, w_obj)) + + def attach_all(self): + from pypy.module.cpyext.pyobject import get_typedescr, make_ref + from pypy.module.cpyext.typeobject import finish_type_1, finish_type_2 + space = self.space + space._cpyext_type_init = [] + for py_obj, w_obj in self.to_attach: + w_type = space.type(w_obj) + typedescr = get_typedescr(w_type.instancetypedef) + py_obj.c_ob_type = rffi.cast(PyTypeObjectPtr, + make_ref(space, w_type)) + typedescr.attach(space, py_obj, w_obj) + cpyext_type_init = space._cpyext_type_init + del space._cpyext_type_init + for pto, w_type in cpyext_type_init: + finish_type_1(space, pto) + finish_type_2(space, pto, w_type) + + def mangle_name(prefix, name): if name.startswith('Py'): return prefix + name[2:] @@ -983,6 +1011,24 @@ pypy_decl_h.write('\n'.join(pypy_decls)) return functions +separate_module_files = [source_dir / "varargwrapper.c", + source_dir / "pyerrors.c", + source_dir / "modsupport.c", + source_dir / "getargs.c", + source_dir / "abstract.c", + source_dir / "stringobject.c", + source_dir / "mysnprintf.c", + source_dir / "pythonrun.c", + source_dir / "sysmodule.c", + source_dir / "bufferobject.c", + source_dir / "cobject.c", + source_dir / "structseq.c", + source_dir / "capsule.c", + source_dir / "pysignals.c", + source_dir / "pythread.c", + source_dir / "missing.c", + ] + def build_eci(building_bridge, export_symbols, code): "NOT_RPYTHON" # Build code and get pointer to the structure @@ -1036,24 +1082,7 @@ eci = ExternalCompilationInfo( include_dirs=include_dirs, - separate_module_files=[source_dir / "varargwrapper.c", - source_dir / "pyerrors.c", - source_dir / "modsupport.c", - source_dir / "getargs.c", - source_dir / "abstract.c", - source_dir / "stringobject.c", - source_dir / "mysnprintf.c", - source_dir / "pythonrun.c", - source_dir / "sysmodule.c", - source_dir / "bufferobject.c", - source_dir / "cobject.c", - source_dir / "structseq.c", - source_dir / "capsule.c", - source_dir / "pysignals.c", - source_dir / "pythread.c", - source_dir / "ndarrayobject.c", - source_dir / "missing.c", - ], + separate_module_files= separate_module_files, separate_module_sources=separate_module_sources, compile_extra=compile_extra, **kwds @@ -1061,10 +1090,22 @@ return eci +def setup_micronumpy(space): + use_micronumpy = space.config.objspace.usemodules.micronumpy + if not use_micronumpy: + return use_micronumpy + # import to register api functions by side-effect + import pypy.module.cpyext.ndarrayobject + global GLOBALS, SYMBOLS_C, separate_module_files + GLOBALS["PyArray_Type#"]= ('PyTypeObject*', "space.gettypeobject(W_NDimArray.typedef)") + SYMBOLS_C += ['PyArray_Type', '_PyArray_FILLWBYTE', '_PyArray_ZEROS'] + separate_module_files.append(source_dir / "ndarrayobject.c") + return use_micronumpy def setup_library(space): "NOT_RPYTHON" from pypy.module.cpyext.pyobject import make_ref + use_micronumpy = setup_micronumpy(space) export_symbols = list(FUNCTIONS) + SYMBOLS_C + list(GLOBALS) from rpython.translator.c.database import LowLevelDatabase @@ -1082,14 +1123,33 @@ run_bootstrap_functions(space) setup_va_functions(eci) + from pypy.module import cpyext # for eval() below + + # Set up the types. Needs a special case, because of the + # immediate cycle involving 'c_ob_type', and because we don't + # want these types to be Py_TPFLAGS_HEAPTYPE. + static_types = {} + for name, (typ, expr) in GLOBALS.items(): + if typ == 'PyTypeObject*': + pto = lltype.malloc(PyTypeObject, immortal=True, + zero=True, flavor='raw') + pto.c_ob_refcnt = 1 + pto.c_tp_basicsize = -1 + static_types[name] = pto + builder = StaticObjectBuilder(space) + for name, pto in static_types.items(): + pto.c_ob_type = static_types['PyType_Type#'] + w_type = eval(GLOBALS[name][1]) + builder.prepare(rffi.cast(PyObject, pto), w_type) + builder.attach_all() + # populate static data for name, (typ, expr) in GLOBALS.iteritems(): name = name.replace("#", "") if name.startswith('PyExc_'): name = '_' + name - from pypy.module import cpyext w_obj = eval(expr) - if typ in ('PyObject*', 'PyTypeObject*'): + if typ in ('PyObject*', 'PyTypeObject*', 'PyIntObject*'): struct_ptr = make_ref(space, w_obj) elif typ == 'PyDateTime_CAPI*': continue @@ -1106,7 +1166,7 @@ setup_init_functions(eci, translating=True) trunk_include = pypydir.dirpath() / 'include' - copy_header_files(trunk_include) + copy_header_files(trunk_include, use_micronumpy) def _load_from_cffi(space, name, path, initptr): from pypy.module._cffi_backend import cffi1_module diff --git a/pypy/module/cpyext/include/stringobject.h b/pypy/module/cpyext/include/stringobject.h --- a/pypy/module/cpyext/include/stringobject.h +++ b/pypy/module/cpyext/include/stringobject.h @@ -7,8 +7,8 @@ extern "C" { #endif -#define PyString_GET_SIZE(op) PyString_Size(op) -#define PyString_AS_STRING(op) PyString_AsString(op) +#define PyString_GET_SIZE(op) PyString_Size((PyObject*)(op)) +#define PyString_AS_STRING(op) PyString_AsString((PyObject*)(op)) typedef struct { PyObject_HEAD diff --git a/pypy/module/cpyext/test/test_api.py b/pypy/module/cpyext/test/test_api.py --- a/pypy/module/cpyext/test/test_api.py +++ b/pypy/module/cpyext/test/test_api.py @@ -98,7 +98,7 @@ def test_copy_header_files(tmpdir): - api.copy_header_files(tmpdir) + api.copy_header_files(tmpdir, True) def check(name): f = tmpdir.join(name) assert f.check(file=True) diff --git a/pypy/module/cpyext/test/test_typeobject.py b/pypy/module/cpyext/test/test_typeobject.py --- a/pypy/module/cpyext/test/test_typeobject.py +++ b/pypy/module/cpyext/test/test_typeobject.py @@ -374,6 +374,11 @@ module = self.import_extension('foo', [ ("test_type", "METH_O", ''' + /* "args->ob_type" is a strange way to get at 'type', + which should have a different tp_getattro/tp_setattro + than its tp_base, which is 'object'. + */ + if (!args->ob_type->tp_setattro) { PyErr_SetString(PyExc_ValueError, "missing tp_setattro"); @@ -382,8 +387,12 @@ if (args->ob_type->tp_setattro == args->ob_type->tp_base->tp_setattro) { - PyErr_SetString(PyExc_ValueError, "recursive tp_setattro"); - return NULL; + /* Note that unlike CPython, in PyPy 'type.tp_setattro' + is the same function as 'object.tp_setattro'. This + test used to check that it was not, but that was an + artifact of the bootstrap logic only---in the final + C sources I checked and they are indeed the same. + So we ignore this problem here. */ } if (!args->ob_type->tp_getattro) { diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -146,7 +146,7 @@ assert len(slot_names) == 2 struct = getattr(pto, slot_names[0]) if not struct: - assert not space.config.translating + #assert not space.config.translating assert not pto.c_tp_flags & Py_TPFLAGS_HEAPTYPE if slot_names[0] == 'c_tp_as_number': STRUCT_TYPE = PyNumberMethods @@ -310,55 +310,6 @@ realize=type_realize, dealloc=type_dealloc) - # some types are difficult to create because of cycles. - # - object.ob_type = type - # - type.ob_type = type - # - tuple.ob_type = type - # - type.tp_base = object - # - tuple.tp_base = object - # - type.tp_bases is a tuple - # - object.tp_bases is a tuple - # - tuple.tp_bases is a tuple - - # insert null placeholders to please create_ref() - track_reference(space, lltype.nullptr(PyObject.TO), space.w_type) - track_reference(space, lltype.nullptr(PyObject.TO), space.w_object) - track_reference(space, lltype.nullptr(PyObject.TO), space.w_tuple) - track_reference(space, lltype.nullptr(PyObject.TO), space.w_str) - - # create the objects - py_type = create_ref(space, space.w_type) - py_object = create_ref(space, space.w_object) - py_tuple = create_ref(space, space.w_tuple) - py_str = create_ref(space, space.w_str) - # XXX py_str is not initialized here correctly, because we are - # not tracking it, it gets an empty c_ob_type from py_basestring - - # form cycles - pto_type = rffi.cast(PyTypeObjectPtr, py_type) - py_type.c_ob_type = pto_type - py_object.c_ob_type = pto_type - py_tuple.c_ob_type = pto_type - - pto_object = rffi.cast(PyTypeObjectPtr, py_object) - pto_type.c_tp_base = pto_object - pto_tuple = rffi.cast(PyTypeObjectPtr, py_tuple) - pto_tuple.c_tp_base = pto_object - - pto_type.c_tp_bases.c_ob_type = pto_tuple - pto_object.c_tp_bases.c_ob_type = pto_tuple - pto_tuple.c_tp_bases.c_ob_type = pto_tuple - - for typ in (py_type, py_object, py_tuple, py_str): - heaptype = rffi.cast(PyHeapTypeObject, typ) - heaptype.c_ht_name.c_ob_type = pto_type - - # Restore the mapping - track_reference(space, py_type, space.w_type, replace=True) - track_reference(space, py_object, space.w_object, replace=True) - track_reference(space, py_tuple, space.w_tuple, replace=True) - track_reference(space, py_str, space.w_str, replace=True) - @cpython_api([PyObject], lltype.Void, external=False) def subtype_dealloc(space, obj): @@ -476,6 +427,8 @@ pto.c_tp_as_sequence = heaptype.c_as_sequence pto.c_tp_as_mapping = heaptype.c_as_mapping pto.c_tp_as_buffer = heaptype.c_as_buffer + pto.c_tp_basicsize = -1 # hopefully this makes malloc bail out + pto.c_tp_itemsize = 0 return rffi.cast(PyObject, heaptype) @@ -511,8 +464,6 @@ pto.c_tp_name = PyString_AsString(space, heaptype.c_ht_name) else: pto.c_tp_name = rffi.str2charp(w_type.name) - pto.c_tp_basicsize = -1 # hopefully this makes malloc bail out - pto.c_tp_itemsize = 0 # uninitialized fields: # c_tp_print, c_tp_getattr, c_tp_setattr # XXX implement @@ -520,8 +471,11 @@ w_base = best_base(space, w_type.bases_w) pto.c_tp_base = rffi.cast(PyTypeObjectPtr, make_ref(space, w_base)) - finish_type_1(space, pto) - finish_type_2(space, pto, w_type) + if hasattr(space, '_cpyext_type_init'): + space._cpyext_type_init.append((pto, w_type)) + else: + finish_type_1(space, pto) + finish_type_2(space, pto, w_type) pto.c_tp_basicsize = rffi.sizeof(typedescr.basestruct) if pto.c_tp_base: diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -116,7 +116,7 @@ def _find_map_attr(self, name, index): while isinstance(self, PlainAttribute): - if name == self.name and index == self.index: + if index == self.index and name == self.name: return self self = self.back return None @@ -156,7 +156,6 @@ jit.isconstant(name) and jit.isconstant(index)) def add_attr(self, obj, name, index, w_value): - # grumble, jit needs this attr = self._get_new_attr(name, index) oldattr = obj._get_mapdict_map() if not jit.we_are_jitted(): @@ -296,7 +295,7 @@ new_obj._get_mapdict_map().add_attr(new_obj, self.name, self.index, w_value) def delete(self, obj, name, index): - if name == self.name and index == self.index: + if index == self.index and name == self.name: # ok, attribute is deleted if not self.ever_mutated: self.ever_mutated = True diff --git a/pypy/objspace/std/setobject.py b/pypy/objspace/std/setobject.py --- a/pypy/objspace/std/setobject.py +++ b/pypy/objspace/std/setobject.py @@ -942,7 +942,7 @@ return False if w_set.length() == 0: return True - # it's possible to have 0-lenght strategy that's not empty + # it's possible to have 0-length strategy that's not empty if w_set.strategy is w_other.strategy: return self._issubset_unwrapped(w_set, w_other) if not self.may_contain_equal_elements(w_other.strategy): diff --git a/rpython/memory/gctransform/boehm.py b/rpython/memory/gctransform/boehm.py --- a/rpython/memory/gctransform/boehm.py +++ b/rpython/memory/gctransform/boehm.py @@ -74,7 +74,7 @@ def gct_fv_gc_malloc_varsize(self, hop, flags, TYPE, v_length, c_const_size, c_item_size, c_offset_to_length): - # XXX same behavior for zero=True: in theory that's wrong + # XXX same behavior for zero=True: in theory that's wrong if c_offset_to_length is None: v_raw = hop.genop("direct_call", [self.malloc_varsize_no_length_ptr, v_length, @@ -156,6 +156,11 @@ resulttype = lltype.Signed) hop.genop('int_invert', [v_int], resultvar=hop.spaceop.result) + def gcheader_initdata(self, defnode): + hdr = lltype.malloc(self.HDR, immortal=True) + hdr.hash = lltype.identityhash_nocache(defnode.obj._as_ptr()) + return hdr._obj + ########## weakrefs ########## # Boehm: weakref objects are small structures containing only a Boehm diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py --- a/rpython/memory/gctransform/framework.py +++ b/rpython/memory/gctransform/framework.py @@ -288,7 +288,6 @@ s_gcref = SomePtr(llmemory.GCREF) gcdata = self.gcdata - translator = self.translator #use the GC flag to find which malloc method to use #malloc_zero_filled == Ture -> malloc_fixedsize/varsize_clear #malloc_zero_filled == Flase -> malloc_fixedsize/varsize @@ -322,7 +321,7 @@ GCClass.malloc_varsize.im_func, [s_gc, s_typeid16] + [annmodel.SomeInteger(nonneg=True) for i in range(4)], s_gcref) - + self.collect_ptr = getfn(GCClass.collect.im_func, [s_gc, annmodel.SomeInteger()], annmodel.s_None) self.can_move_ptr = getfn(GCClass.can_move.im_func, @@ -1385,7 +1384,7 @@ [v] + previous_steps + [c_name, c_null]) else: llops.genop('bare_setfield', [v, c_name, c_null]) - + return elif isinstance(TYPE, lltype.Array): ITEM = TYPE.OF @@ -1412,6 +1411,25 @@ resulttype=llmemory.Address) llops.genop('raw_memclear', [v_adr, v_totalsize]) + def gcheader_initdata(self, defnode): + o = lltype.top_container(defnode.obj) + needs_hash = self.get_prebuilt_hash(o) is not None + hdr = self.gc_header_for(o, needs_hash) + return hdr._obj + + def get_prebuilt_hash(self, obj): + # for prebuilt objects that need to have their hash stored and + # restored. Note that only structures that are StructNodes all + # the way have their hash stored (and not e.g. structs with var- + # sized arrays at the end). 'obj' must be the top_container. + TYPE = lltype.typeOf(obj) + if not isinstance(TYPE, lltype.GcStruct): + return None + if TYPE._is_varsize(): + return None + return getattr(obj, '_hash_cache_', None) + + class TransformerLayoutBuilder(gctypelayout.TypeLayoutBuilder): diff --git a/rpython/memory/gctransform/refcounting.py b/rpython/memory/gctransform/refcounting.py --- a/rpython/memory/gctransform/refcounting.py +++ b/rpython/memory/gctransform/refcounting.py @@ -285,3 +285,7 @@ resulttype=llmemory.Address) hop.genop("direct_call", [self.identityhash_ptr, v_adr], resultvar=hop.spaceop.result) + + def gcheader_initdata(self, defnode): + top = lltype.top_container(defnode.obj) + return self.gcheaderbuilder.header_of_object(top)._obj diff --git a/rpython/memory/gctransform/test/test_framework.py b/rpython/memory/gctransform/test/test_framework.py --- a/rpython/memory/gctransform/test/test_framework.py +++ b/rpython/memory/gctransform/test/test_framework.py @@ -40,7 +40,7 @@ t.config.translation.gc = "minimark" cbuild = CStandaloneBuilder(t, entrypoint, t.config, gcpolicy=FrameworkGcPolicy2) - db = cbuild.generate_graphs_for_llinterp() + db = cbuild.build_database() entrypointptr = cbuild.getentrypointptr() entrygraph = entrypointptr._obj.graph @@ -69,7 +69,7 @@ return -x t = rtype(g, [int]) gg = graphof(t, g) - assert not CollectAnalyzer(t).analyze_direct_call(gg) + assert not CollectAnalyzer(t).analyze_direct_call(gg) def test_cancollect_external(): fext1 = rffi.llexternal('fext1', [], lltype.Void, releasegil=False) @@ -110,12 +110,12 @@ def entrypoint(argv): return g() + 2 - + t = rtype(entrypoint, [s_list_of_strings]) t.config.translation.gc = "minimark" cbuild = CStandaloneBuilder(t, entrypoint, t.config, gcpolicy=FrameworkGcPolicy2) - db = cbuild.generate_graphs_for_llinterp() + db = cbuild.build_database() def test_no_collect_detection(): from rpython.rlib import rgc @@ -134,12 +134,13 @@ def entrypoint(argv): return g() + 2 - + t = rtype(entrypoint, [s_list_of_strings]) t.config.translation.gc = "minimark" cbuild = CStandaloneBuilder(t, entrypoint, t.config, gcpolicy=FrameworkGcPolicy2) - f = py.test.raises(Exception, cbuild.generate_graphs_for_llinterp) + with py.test.raises(Exception) as f: + cbuild.build_database() expected = "'no_collect' function can trigger collection: ") def test_has_0002(): + if platform.name == 'msvc': + py.test.skip('no m.lib in msvc') assert rffi_platform.has("pow", "#include ", libraries=["m"]) def test_has_0003(): """multiple libraries""" + if platform.name == 'msvc': + py.test.skip('no m.lib in msvc') assert rffi_platform.has("pow", "#include ", libraries=["m", "c"]) def test_has_0004(): diff --git a/rpython/translator/c/database.py b/rpython/translator/c/database.py --- a/rpython/translator/c/database.py +++ b/rpython/translator/c/database.py @@ -1,3 +1,4 @@ +from collections import OrderedDict from rpython.rtyper.lltypesystem.lltype import (Primitive, Ptr, typeOf, RuntimeTypeInfo, Struct, Array, FuncType, Void, ContainerType, OpaqueType, @@ -8,9 +9,9 @@ from rpython.rtyper.lltypesystem import llgroup from rpython.tool.sourcetools import valid_identifier from rpython.translator.c.primitive import PrimitiveName, PrimitiveType -from rpython.translator.c.node import StructDefNode, ArrayDefNode -from rpython.translator.c.node import FixedSizeArrayDefNode, BareBoneArrayDefNode -from rpython.translator.c.node import ContainerNodeFactory, ExtTypeOpaqueDefNode +from rpython.translator.c.node import ( + StructDefNode, ArrayDefNode, FixedSizeArrayDefNode, BareBoneArrayDefNode, + ContainerNodeFactory, ExtTypeOpaqueDefNode, FuncNode) from rpython.translator.c.support import cdecl, CNameManager from rpython.translator.c.support import log, barebonearray from rpython.translator.c.extfunc import do_the_getting @@ -28,6 +29,7 @@ def __init__(self, translator=None, standalone=False, gcpolicyclass=None, + exctransformer=None, thread_enabled=False, sandbox=False): self.translator = translator @@ -36,6 +38,7 @@ if gcpolicyclass is None: gcpolicyclass = gc.RefcountingGcPolicy self.gcpolicy = gcpolicyclass(self, thread_enabled) + self.exctransformer = exctransformer self.structdefnodes = {} self.pendingsetupnodes = [] @@ -45,7 +48,7 @@ self.delayedfunctionptrs = [] self.completedcontainers = 0 self.containerstats = {} - self.helper2ptr = {} + self.helpers = OrderedDict() # late_initializations is for when the value you want to # assign to a constant object is something C doesn't think is @@ -53,12 +56,8 @@ self.late_initializations = [] self.namespace = CNameManager() - if translator is None or translator.rtyper is None: - self.exctransformer = None - else: - self.exctransformer = translator.getexceptiontransformer() if translator is not None: - self.gctransformer = self.gcpolicy.gettransformer() + self.gctransformer = self.gcpolicy.gettransformer(translator) self.completed = False self.instrument_ncounter = 0 @@ -348,6 +347,8 @@ assert not self.delayedfunctionptrs self.completed = True + if self.gctransformer is not None and self.gctransformer.inline: + self.gctransformer.inline_helpers(self.all_graphs()) if show_progress: dump() log.database("Completed") @@ -379,30 +380,10 @@ produce(node) return result - def need_sandboxing(self, fnobj): - if not self.sandbox: - return False - if hasattr(fnobj, '_safe_not_sandboxed'): - return not fnobj._safe_not_sandboxed - elif getattr(getattr(fnobj, '_callable', None), - '_sandbox_external_name', None): - return True - else: - return "if_external" - - def prepare_inline_helpers(self): - all_nodes = self.globalcontainers() - funcnodes = [node for node in all_nodes if node.nodekind == 'func'] - graphs = [] - for node in funcnodes: - for graph in node.graphs_to_patch(): - graphs.append(graph) - self.gctransformer.prepare_inline_helpers(graphs) - def all_graphs(self): graphs = [] for node in self.containerlist: - if node.nodekind == 'func': + if isinstance(node, FuncNode): for graph in node.graphs_to_patch(): graphs.append(graph) return graphs diff --git a/rpython/translator/c/external.py b/rpython/translator/c/external.py deleted file mode 100644 --- a/rpython/translator/c/external.py +++ /dev/null @@ -1,54 +0,0 @@ -from rpython.rtyper.lltypesystem.lltype import typeOf, Void -from rpython.translator.c.support import USESLOTS # set to False if necessary while refactoring -from rpython.translator.c.support import cdecl, somelettersfrom - -class CExternalFunctionCodeGenerator(object): - if USESLOTS: - __slots__ = """db fnptr FUNCTYPE argtypenames resulttypename""".split() - - def __init__(self, fnptr, db): - self.fnptr = fnptr - self.db = db - self.FUNCTYPE = typeOf(fnptr) - assert Void not in self.FUNCTYPE.ARGS - self.argtypenames = [db.gettype(T) for T in self.FUNCTYPE.ARGS] - self.resulttypename = db.gettype(self.FUNCTYPE.RESULT) - - def graphs_to_patch(self): - return [] - - def name(self, cname): #virtual - return cname - - def argnames(self): - return ['%s%d' % (somelettersfrom(self.argtypenames[i]), i) - for i in range(len(self.argtypenames))] - - def allconstantvalues(self): - return [] - - def implementation_begin(self): - pass - - def cfunction_declarations(self): - if self.FUNCTYPE.RESULT is not Void: - yield '%s;' % cdecl(self.resulttypename, 'result') - - def cfunction_body(self): - try: - convert_params = self.fnptr.convert_params - except AttributeError: - convert_params = lambda backend, args: [arg for _,arg in args] - call = '%s(%s)' % (self.fnptr._name, ', '.join(convert_params("c", zip(self.FUNCTYPE.ARGS, self.argnames())))) - if self.FUNCTYPE.RESULT is not Void: - yield 'result = %s;' % call - yield 'if (PyErr_Occurred()) RPyConvertExceptionFromCPython();' - yield 'return result;' - else: - yield '%s;' % call - yield 'if (PyErr_Occurred()) RPyConvertExceptionFromCPython();' - - def implementation_end(self): - pass - -assert not USESLOTS or '__dict__' not in dir(CExternalFunctionCodeGenerator) diff --git a/rpython/translator/c/extfunc.py b/rpython/translator/c/extfunc.py --- a/rpython/translator/c/extfunc.py +++ b/rpython/translator/c/extfunc.py @@ -1,23 +1,23 @@ import types from rpython.flowspace.model import FunctionGraph -from rpython.rtyper.lltypesystem import lltype, rstr, rlist +from rpython.annotator.listdef import s_list_of_strings +from rpython.rtyper.lltypesystem import lltype, rlist from rpython.rtyper.lltypesystem.rstr import STR, mallocstr from rpython.translator.c.support import cdecl def find_list_of_str(rtyper): - for r in rtyper.reprs.itervalues(): - if isinstance(r, rlist.ListRepr) and r.item_repr is rstr.string_repr: - return r.lowleveltype.TO - return None + r_strlist = rtyper.getrepr(s_list_of_strings) + rtyper.call_all_setups() + return r_strlist.lowleveltype.TO + def predeclare_common_types(db, rtyper): # Common types yield ('RPyString', STR) LIST_OF_STR = find_list_of_str(rtyper) - if LIST_OF_STR is not None: - yield ('RPyListOfString', LIST_OF_STR) + yield ('RPyListOfString', LIST_OF_STR) def predeclare_utility_functions(db, rtyper): # Common utility functions @@ -32,40 +32,38 @@ # returned directly as results LIST_OF_STR = find_list_of_str(rtyper) - if LIST_OF_STR is not None: - p = lltype.Ptr(LIST_OF_STR) + p = lltype.Ptr(LIST_OF_STR) - def _RPyListOfString_New(length=lltype.Signed): - return LIST_OF_STR.ll_newlist(length) + def _RPyListOfString_New(length=lltype.Signed): + return LIST_OF_STR.ll_newlist(length) - def _RPyListOfString_SetItem(l=p, - index=lltype.Signed, - newstring=lltype.Ptr(STR)): - rlist.ll_setitem_nonneg(rlist.dum_nocheck, l, index, newstring) + def _RPyListOfString_SetItem(l=p, + index=lltype.Signed, + newstring=lltype.Ptr(STR)): + rlist.ll_setitem_nonneg(rlist.dum_nocheck, l, index, newstring) - def _RPyListOfString_GetItem(l=p, - index=lltype.Signed): - return rlist.ll_getitem_fast(l, index) + def _RPyListOfString_GetItem(l=p, + index=lltype.Signed): + return rlist.ll_getitem_fast(l, index) - def _RPyListOfString_Length(l=p): - return rlist.ll_length(l) + def _RPyListOfString_Length(l=p): + return rlist.ll_length(l) for fname, f in locals().items(): if isinstance(f, types.FunctionType): # XXX this is painful :( - if (LIST_OF_STR, fname) in db.helper2ptr: - yield (fname, db.helper2ptr[LIST_OF_STR, fname]) + if fname in db.helpers: + yield (fname, db.helpers[fname]) else: # hack: the defaults give the type of the arguments graph = rtyper.annotate_helper(f, f.func_defaults) - db.helper2ptr[LIST_OF_STR, fname] = graph + db.helpers[fname] = graph yield (fname, graph) -def predeclare_exception_data(db, rtyper): +def predeclare_exception_data(exctransformer, rtyper): # Exception-related types and constants exceptiondata = rtyper.exceptiondata - exctransformer = db.exctransformer yield ('RPYTHON_EXCEPTION_VTABLE', exceptiondata.lltype_of_exception_type) yield ('RPYTHON_EXCEPTION', exceptiondata.lltype_of_exception_value) @@ -93,19 +91,19 @@ def predeclare_all(db, rtyper): for fn in [predeclare_common_types, predeclare_utility_functions, - predeclare_exception_data, ]: for t in fn(db, rtyper): yield t + exctransformer = db.exctransformer + for t in predeclare_exception_data(exctransformer, rtyper): + yield t + def get_all(db, rtyper): - for fn in [predeclare_common_types, - predeclare_utility_functions, - predeclare_exception_data, - ]: - for t in fn(db, rtyper): - yield t[1] + for name, fnptr in predeclare_all(db, rtyper): + yield fnptr + # ____________________________________________________________ diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py --- a/rpython/translator/c/funcgen.py +++ b/rpython/translator/c/funcgen.py @@ -1,9 +1,8 @@ import sys -from rpython.translator.c.support import USESLOTS # set to False if necessary while refactoring from rpython.translator.c.support import cdecl from rpython.translator.c.support import llvalue_from_constant, gen_assignments from rpython.translator.c.support import c_string_constant, barebonearray -from rpython.flowspace.model import Variable, Constant, copygraph +from rpython.flowspace.model import Variable, Constant from rpython.rtyper.lltypesystem.lltype import (Ptr, Void, Bool, Signed, Unsigned, SignedLongLong, Float, UnsignedLongLong, Char, UniChar, ContainerType, Array, FixedSizeArray, ForwardReference, FuncType) @@ -19,39 +18,30 @@ KEEP_INLINED_GRAPHS = False +def make_funcgen(graph, db, exception_policy, functionname): + graph._seen_by_the_backend = True + # apply the exception transformation + if db.exctransformer: + db.exctransformer.create_exception_handling(graph) + # apply the gc transformation + if db.gctransformer: + db.gctransformer.transform_graph(graph) + return FunctionCodeGenerator(graph, db, exception_policy, functionname) + class FunctionCodeGenerator(object): """ Collects information about a function which we have to generate from a flow graph. """ - if USESLOTS: - __slots__ = """graph db gcpolicy - exception_policy - more_ll_values - vars all_cached_consts - illtypes - functionname - blocknum - innerloops - oldgraph""".split() - - def __init__(self, graph, db, exception_policy=None, functionname=None): - graph._seen_by_the_backend = True + def __init__(self, graph, db, exception_policy, functionname): self.graph = graph self.db = db self.gcpolicy = db.gcpolicy self.exception_policy = exception_policy self.functionname = functionname - # apply the exception transformation - if self.db.exctransformer: - self.db.exctransformer.create_exception_handling(self.graph) - # apply the gc transformation - if self.db.gctransformer: - self.db.gctransformer.transform_graph(self.graph) - #self.graph.show() + self.collect_var_and_types() - for v in self.vars: T = v.concretetype # obscure: skip forward references and hope for the best @@ -84,12 +74,6 @@ self.more_ll_values.append(link.llexitcase) elif link.exitcase is not None: mix.append(Constant(link.exitcase)) - if self.exception_policy == "CPython": - v, exc_cleanup_ops = self.graph.exc_cleanup - mix.append(v) - for cleanupop in exc_cleanup_ops: - mix.extend(cleanupop.args) - mix.append(cleanupop.result) uniquemix = [] seen = identity_dict() @@ -99,20 +83,7 @@ seen[v] = True self.vars = uniquemix - def name(self, cname): #virtual - return cname - - def patch_graph(self, copy_graph): - graph = self.graph - if self.db.gctransformer and self.db.gctransformer.inline: - if copy_graph: - graph = copygraph(graph, shallow=True) - self.db.gctransformer.inline_helpers(graph) - return graph - def implementation_begin(self): - self.oldgraph = self.graph - self.graph = self.patch_graph(copy_graph=True) SSI_to_SSA(self.graph) self.collect_var_and_types() self.blocknum = {} @@ -138,8 +109,6 @@ self.vars = None self.blocknum = None self.innerloops = None - self.graph = self.oldgraph - del self.oldgraph def argnames(self): return [LOCALVAR % v.name for v in self.graph.getargs()] @@ -247,8 +216,6 @@ yield '}' link = block.exits[0] assert link.exitcase in (False, True) - #yield 'assert(%s == %s);' % (self.expr(block.exitswitch), - # self.genc.nameofvalue(link.exitcase, ct)) for op in self.gen_link(link): yield op elif TYPE in (Signed, Unsigned, SignedLongLong, @@ -894,14 +861,11 @@ def getdebugfunctionname(self): name = self.functionname - if not name: - return "?" if name.startswith('pypy_g_'): name = name[7:] return name def OP_DEBUG_RECORD_TRACEBACK(self, op): - #if self.functionname is None, we print "?" as the argument */ return 'PYPY_DEBUG_RECORD_TRACEBACK("%s");' % ( self.getdebugfunctionname(),) @@ -941,5 +905,3 @@ cdecl(typename, ''), self.expr(op.args[0]), self.expr(op.result)) - -assert not USESLOTS or '__dict__' not in dir(FunctionCodeGenerator) diff --git a/rpython/translator/c/gc.py b/rpython/translator/c/gc.py --- a/rpython/translator/c/gc.py +++ b/rpython/translator/c/gc.py @@ -1,8 +1,7 @@ import sys from rpython.flowspace.model import Constant -from rpython.rtyper.lltypesystem import lltype -from rpython.rtyper.lltypesystem.lltype import (typeOf, RttiStruct, - RuntimeTypeInfo, top_container) +from rpython.rtyper.lltypesystem.lltype import (RttiStruct, + RuntimeTypeInfo) from rpython.translator.c.node import ContainerNode from rpython.translator.c.support import cdecl from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -18,23 +17,12 @@ return defnode.db.gctransformer.HDR return None - def common_gcheader_initdata(self, defnode): - if defnode.db.gctransformer is not None: - raise NotImplementedError - return None - def struct_gcheader_definition(self, defnode): return self.common_gcheader_definition(defnode) - def struct_gcheader_initdata(self, defnode): - return self.common_gcheader_initdata(defnode) - def array_gcheader_definition(self, defnode): return self.common_gcheader_definition(defnode) - def array_gcheader_initdata(self, defnode): - return self.common_gcheader_initdata(defnode) - def compilation_info(self): if not self.db: return ExternalCompilationInfo() @@ -46,9 +34,6 @@ ] ) - def get_prebuilt_hash(self, obj): - return None - def need_no_typeptr(self): return False @@ -109,16 +94,9 @@ class RefcountingGcPolicy(BasicGcPolicy): - def gettransformer(self): + def gettransformer(self, translator): from rpython.memory.gctransform import refcounting - return refcounting.RefcountingGCTransformer(self.db.translator) - - def common_gcheader_initdata(self, defnode): - if defnode.db.gctransformer is not None: - gct = defnode.db.gctransformer - top = top_container(defnode.obj) - return gct.gcheaderbuilder.header_of_object(top)._obj - return None + return refcounting.RefcountingGCTransformer(translator) # for structs @@ -197,16 +175,9 @@ class BoehmGcPolicy(BasicGcPolicy): - def gettransformer(self): + def gettransformer(self, translator): from rpython.memory.gctransform import boehm - return boehm.BoehmGCTransformer(self.db.translator) - - def common_gcheader_initdata(self, defnode): - if defnode.db.gctransformer is not None: - hdr = lltype.malloc(defnode.db.gctransformer.HDR, immortal=True) - hdr.hash = lltype.identityhash_nocache(defnode.obj._as_ptr()) - return hdr._obj - return None + return boehm.BoehmGCTransformer(translator) def array_setup(self, arraydefnode): pass @@ -313,9 +284,9 @@ class BasicFrameworkGcPolicy(BasicGcPolicy): - def gettransformer(self): + def gettransformer(self, translator): if hasattr(self, 'transformerclass'): # for rpython/memory tests - return self.transformerclass(self.db.translator) + return self.transformerclass(translator) raise NotImplementedError def struct_setup(self, structdefnode, rtti): @@ -362,24 +333,6 @@ args = [funcgen.expr(v) for v in op.args] return '%s = %s; /* for moving GCs */' % (args[1], args[0]) - def common_gcheader_initdata(self, defnode): - o = top_container(defnode.obj) - needs_hash = self.get_prebuilt_hash(o) is not None - hdr = defnode.db.gctransformer.gc_header_for(o, needs_hash) - return hdr._obj - - def get_prebuilt_hash(self, obj): - # for prebuilt objects that need to have their hash stored and - # restored. Note that only structures that are StructNodes all - # the way have their hash stored (and not e.g. structs with var- - # sized arrays at the end). 'obj' must be the top_container. - TYPE = typeOf(obj) - if not isinstance(TYPE, lltype.GcStruct): - return None - if TYPE._is_varsize(): - return None - return getattr(obj, '_hash_cache_', None) - def need_no_typeptr(self): config = self.db.translator.config return config.translation.gcremovetypeptr @@ -440,15 +393,15 @@ class ShadowStackFrameworkGcPolicy(BasicFrameworkGcPolicy): - def gettransformer(self): + def gettransformer(self, translator): from rpython.memory.gctransform import shadowstack - return shadowstack.ShadowStackFrameworkGCTransformer(self.db.translator) + return shadowstack.ShadowStackFrameworkGCTransformer(translator) class AsmGcRootFrameworkGcPolicy(BasicFrameworkGcPolicy): - def gettransformer(self): + def gettransformer(self, translator): from rpython.memory.gctransform import asmgcroot - return asmgcroot.AsmGcRootFrameworkGCTransformer(self.db.translator) + return asmgcroot.AsmGcRootFrameworkGCTransformer(translator) def GC_KEEPALIVE(self, funcgen, v): return 'pypy_asm_keepalive(%s);' % funcgen.expr(v) diff --git a/rpython/translator/c/genc.py b/rpython/translator/c/genc.py --- a/rpython/translator/c/genc.py +++ b/rpython/translator/c/genc.py @@ -126,8 +126,10 @@ if not self.standalone: raise NotImplementedError("--gcrootfinder=asmgcc requires standalone") + exctransformer = translator.getexceptiontransformer() db = LowLevelDatabase(translator, standalone=self.standalone, gcpolicyclass=gcpolicyclass, + exctransformer=exctransformer, thread_enabled=self.config.translation.thread, sandbox=self.config.translation.sandbox) self.db = db @@ -193,22 +195,8 @@ DEBUG_DEFINES = {'RPY_ASSERT': 1, 'RPY_LL_ASSERT': 1} - def generate_graphs_for_llinterp(self, db=None): - # prepare the graphs as when the source is generated, but without - # actually generating the source. - if db is None: - db = self.build_database() - graphs = db.all_graphs() - db.gctransformer.prepare_inline_helpers(graphs) - for node in db.containerlist: - if hasattr(node, 'funcgens'): - for funcgen in node.funcgens: - funcgen.patch_graph(copy_graph=False) - return db - def generate_source(self, db=None, defines={}, exe_name=None): assert self.c_source_filename is None - if db is None: db = self.build_database() pf = self.getentrypointptr() @@ -846,7 +834,6 @@ # sg = SourceGenerator(database) sg.set_strategy(targetdir, split) - database.prepare_inline_helpers() sg.gen_readable_parts_of_source(f) headers_to_precompile = sg.headers_to_precompile[:] headers_to_precompile.insert(0, incfilename) diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -3,8 +3,7 @@ Void, OpaqueType, Float, RuntimeTypeInfo, getRuntimeTypeInfo, Char, _subarray) from rpython.rtyper.lltypesystem import llmemory, llgroup -from rpython.translator.c.funcgen import FunctionCodeGenerator -from rpython.translator.c.external import CExternalFunctionCodeGenerator +from rpython.translator.c.funcgen import make_funcgen from rpython.translator.c.support import USESLOTS # set to False if necessary while refactoring from rpython.translator.c.support import cdecl, forward_cdecl, somelettersfrom from rpython.translator.c.support import c_char_array_constant, barebonearray @@ -540,7 +539,17 @@ class StructNode(ContainerNode): nodekind = 'struct' if USESLOTS: - __slots__ = () + __slots__ = ('gc_init',) + + def __init__(self, db, T, obj): + ContainerNode.__init__(self, db, T, obj) + if needs_gcheader(T): + gct = self.db.gctransformer + if gct is not None: + self.gc_init = gct.gcheader_initdata(self) + db.getcontainernode(self.gc_init) + else: + self.gc_init = None def basename(self): T = self.getTYPE() @@ -567,8 +576,7 @@ data = [] if needs_gcheader(T): - gc_init = self.db.gcpolicy.struct_gcheader_initdata(self) - data.append(('gcheader', gc_init)) + data.append(('gcheader', self.gc_init)) for name in defnode.fieldnames: data.append((name, getattr(self.obj, name))) @@ -641,7 +649,7 @@ def implementation(self): hash_typename = self.get_hash_typename() - hash = self.db.gcpolicy.get_prebuilt_hash(self.obj) + hash = self.db.gctransformer.get_prebuilt_hash(self.obj) assert hash is not None lines = list(self.initializationexpr()) lines.insert(0, '%s = { {' % ( @@ -651,7 +659,8 @@ return lines def gcstructnode_factory(db, T, obj): - if db.gcpolicy.get_prebuilt_hash(obj) is not None: + if (db.gctransformer and + db.gctransformer.get_prebuilt_hash(obj) is not None): cls = GcStructNodeWithHash else: cls = StructNode @@ -661,7 +670,17 @@ class ArrayNode(ContainerNode): nodekind = 'array' if USESLOTS: - __slots__ = () + __slots__ = ('gc_init',) + + def __init__(self, db, T, obj): + ContainerNode.__init__(self, db, T, obj) + if needs_gcheader(T): + gct = self.db.gctransformer + if gct is not None: + self.gc_init = gct.gcheader_initdata(self) + db.getcontainernode(self.gc_init) + else: + self.gc_init = None def getptrname(self): if barebonearray(self.getTYPE()): @@ -681,8 +700,7 @@ T = self.getTYPE() yield '{' if needs_gcheader(T): - gc_init = self.db.gcpolicy.array_gcheader_initdata(self) - lines = generic_initializationexpr(self.db, gc_init, 'gcheader', + lines = generic_initializationexpr(self.db, self.gc_init, 'gcheader', '%sgcheader' % (decoration,)) for line in lines: yield line @@ -781,81 +799,64 @@ comma = '' expr += comma i = expr.find('\n') - if i<0: i = len(expr) + if i < 0: + i = len(expr) expr = '%s\t/* %s */%s' % (expr[:i], decoration, expr[i:]) return expr.split('\n') # ____________________________________________________________ -class FuncNode(ContainerNode): +class FuncNodeBase(ContainerNode): nodekind = 'func' eci_name = 'compilation_info' # there not so many node of this kind, slots should not # be necessary - - def __init__(self, db, T, obj, forcename=None): + def __init__(self, db, T, obj, ptrname): Node.__init__(self, db) self.globalcontainer = True self.T = T self.obj = obj - callable = getattr(obj, '_callable', None) - if (callable is not None and - getattr(callable, 'c_name', None) is not None): - self.name = forcename or obj._callable.c_name - elif getattr(obj, 'external', None) == 'C' and not db.need_sandboxing(obj): - self.name = forcename or self.basename() - else: - self.name = (forcename or - db.namespace.uniquename('g_' + self.basename())) - self.make_funcgens() + self.name = ptrname self.typename = db.gettype(T) #, who_asks=self) def getptrname(self): return self.name - def make_funcgens(self): - self.funcgens = select_function_code_generators(self.obj, self.db, self.name) - if self.funcgens: - argnames = self.funcgens[0].argnames() #Assume identical for all funcgens - self.implementationtypename = self.db.gettype(self.T, argnames=argnames) - self._funccodegen_owner = self.funcgens[0] - else: - self._funccodegen_owner = None - def basename(self): return self.obj._name + +class FuncNode(FuncNodeBase): + def __init__(self, db, T, obj, ptrname): + FuncNodeBase.__init__(self, db, T, obj, ptrname) + exception_policy = getattr(obj, 'exception_policy', None) + self.funcgen = make_funcgen(obj.graph, db, exception_policy, ptrname) + argnames = self.funcgen.argnames() + self.implementationtypename = db.gettype(T, argnames=argnames) + self._funccodegen_owner = self.funcgen + def enum_dependencies(self): - if not self.funcgens: - return [] - return self.funcgens[0].allconstantvalues() #Assume identical for all funcgens + return self.funcgen.allconstantvalues() def forward_declaration(self): callable = getattr(self.obj, '_callable', None) is_exported = getattr(callable, 'exported_symbol', False) - for funcgen in self.funcgens: - yield '%s;' % ( - forward_cdecl(self.implementationtypename, - funcgen.name(self.name), self.db.standalone, - is_exported=is_exported)) + yield '%s;' % ( + forward_cdecl(self.implementationtypename, + self.name, self.db.standalone, is_exported=is_exported)) + + def graphs_to_patch(self): + for i in self.funcgen.graphs_to_patch(): + yield i def implementation(self): - for funcgen in self.funcgens: - for s in self.funcgen_implementation(funcgen): - yield s - - def graphs_to_patch(self): - for funcgen in self.funcgens: - for i in funcgen.graphs_to_patch(): - yield i - - def funcgen_implementation(self, funcgen): + funcgen = self.funcgen funcgen.implementation_begin() # recompute implementationtypename as the argnames may have changed argnames = funcgen.argnames() implementationtypename = self.db.gettype(self.T, argnames=argnames) - yield '%s {' % cdecl(implementationtypename, funcgen.name(self.name)) + yield '%s {' % cdecl(implementationtypename, self.name) # # declare the local variables # @@ -866,7 +867,7 @@ while start < len(localnames): # pack the local declarations over as few lines as possible total = lengths[start] + 8 - end = start+1 + end = start + 1 while total + lengths[end] < 77: total += lengths[end] + 1 end += 1 @@ -897,44 +898,55 @@ del bodyiter funcgen.implementation_end() -def sandbox_stub(fnobj, db): - # unexpected external function for --sandbox translation: replace it - # with a "Not Implemented" stub. To support these functions, port them - # to the new style registry (e.g. rpython.module.ll_os.RegisterOs). - from rpython.translator.sandbox import rsandbox - graph = rsandbox.get_external_function_sandbox_graph(fnobj, db, - force_stub=True) - return [FunctionCodeGenerator(graph, db)] +class ExternalFuncNode(FuncNodeBase): + def __init__(self, db, T, obj, ptrname): + FuncNodeBase.__init__(self, db, T, obj, ptrname) + self._funccodegen_owner = None -def sandbox_transform(fnobj, db): - # for --sandbox: replace a function like os_open_llimpl() with - # code that communicates with the external process to ask it to - # perform the operation. - from rpython.translator.sandbox import rsandbox - graph = rsandbox.get_external_function_sandbox_graph(fnobj, db) - return [FunctionCodeGenerator(graph, db)] + def enum_dependencies(self): + return [] -def select_function_code_generators(fnobj, db, functionname): - sandbox = db.need_sandboxing(fnobj) - if hasattr(fnobj, 'graph'): - if sandbox and sandbox != "if_external": - # apply the sandbox transformation - return sandbox_transform(fnobj, db) - exception_policy = getattr(fnobj, 'exception_policy', None) - return [FunctionCodeGenerator(fnobj.graph, db, exception_policy, - functionname)] - elif getattr(fnobj, 'external', None) is not None: - if sandbox: - return sandbox_stub(fnobj, db) - elif fnobj.external == 'C': - return [] - else: - assert fnobj.external == 'CPython' - return [CExternalFunctionCodeGenerator(fnobj, db)] - elif hasattr(fnobj._callable, "c_name"): - return [] # this case should only be used for entrypoints + def forward_declaration(self): + return [] + + def implementation(self): + return [] + +def new_funcnode(db, T, obj, forcename=None): + if db.sandbox: + if (getattr(obj, 'external', None) is not None and + not obj._safe_not_sandboxed): + from rpython.translator.sandbox import rsandbox + obj.__dict__['graph'] = rsandbox.get_sandbox_stub( + obj, db.translator.rtyper) + obj.__dict__.pop('_safe_not_sandboxed', None) + obj.__dict__.pop('external', None) + if forcename: + name = forcename else: - raise ValueError("don't know how to generate code for %r" % (fnobj,)) + name = _select_name(db, obj) + if hasattr(obj, 'graph'): + return FuncNode(db, T, obj, name) + elif getattr(obj, 'external', None) is not None: + assert obj.external == 'C' + if db.sandbox: + assert obj._safe_not_sandboxed + return ExternalFuncNode(db, T, obj, name) + elif hasattr(obj._callable, "c_name"): + return ExternalFuncNode(db, T, obj, name) # this case should only be used for entrypoints + else: + raise ValueError("don't know how to generate code for %r" % (obj,)) + + +def _select_name(db, obj): + try: + return obj._callable.c_name + except AttributeError: + pass + if getattr(obj, 'external', None) == 'C': + return obj._name + return db.namespace.uniquename('g_' + obj._name) + class ExtType_OpaqueNode(ContainerNode): nodekind = 'rpyopaque' @@ -1044,7 +1056,7 @@ Array: ArrayNode, GcArray: ArrayNode, FixedSizeArray: FixedSizeArrayNode, - FuncType: FuncNode, + FuncType: new_funcnode, OpaqueType: opaquenode_factory, llmemory._WeakRefType: weakrefnode_factory, llgroup.GroupType: GroupNode, diff --git a/rpython/translator/c/test/test_database.py b/rpython/translator/c/test/test_database.py --- a/rpython/translator/c/test/test_database.py +++ b/rpython/translator/c/test/test_database.py @@ -9,8 +9,6 @@ def dump_on_stdout(database): - if database.gctransformer: - database.prepare_inline_helpers() print '/*********************************/' structdeflist = database.getstructdeflist() for node in structdeflist: @@ -171,7 +169,7 @@ F = FuncType([Signed], Signed) f = functionptr(F, "f", graph=graph) - db = LowLevelDatabase(t) + db = LowLevelDatabase(t, exctransformer=t.getexceptiontransformer()) db.get(f) db.complete() dump_on_stdout(db) @@ -186,7 +184,7 @@ return p.x * p.y t, graph = makegraph(ll_f, [int]) - db = LowLevelDatabase(t) + db = LowLevelDatabase(t, exctransformer=t.getexceptiontransformer()) db.get(getfunctionptr(graph)) db.complete() dump_on_stdout(db) @@ -207,7 +205,7 @@ return s.ptr1.x * s.ptr2.x t, graph = makegraph(ll_f, [int]) - db = LowLevelDatabase(t) + db = LowLevelDatabase(t, exctransformer=t.getexceptiontransformer()) db.get(getfunctionptr(graph)) db.complete() dump_on_stdout(db) diff --git a/rpython/translator/c/test/test_refcount.py b/rpython/translator/c/test/test_refcount.py --- a/rpython/translator/c/test/test_refcount.py +++ b/rpython/translator/c/test/test_refcount.py @@ -106,37 +106,6 @@ assert fn(1) == 4 assert fn(0) == 5 - def test_del_basic(self): - py.test.skip("xxx fix or kill") - S = lltype.GcStruct('S', ('x', lltype.Signed), rtti=True) - TRASH = lltype.GcStruct('TRASH', ('x', lltype.Signed)) - GLOBAL = lltype.Struct('GLOBAL', ('x', lltype.Signed)) - glob = lltype.malloc(GLOBAL, immortal=True) - def destructor(s): - glob.x = s.x + 1 - def type_info_S(s): - return lltype.getRuntimeTypeInfo(S) - - def g(n): - s = lltype.malloc(S) - s.x = n - # now 's' should go away - def entrypoint(n): - g(n) - # llop.gc__collect(lltype.Void) - return glob.x - - t = TranslationContext() - t.buildannotator().build_types(entrypoint, [int]) - rtyper = t.buildrtyper() - destrptr = rtyper.annotate_helper_fn(destructor, [lltype.Ptr(S)]) - rtyper.attachRuntimeTypeInfoFunc(S, type_info_S, destrptr=destrptr) From pypy.commits at gmail.com Mon Feb 1 03:57:39 2016 From: pypy.commits at gmail.com (amauryfa) Date: Mon, 01 Feb 2016 00:57:39 -0800 (PST) Subject: [pypy-commit] pypy py3.3: sqlite: add connection.set_trace_callback Message-ID: <56af1e03.c3e01c0a.4279f.7e7e@mx.google.com> Author: Amaury Forgeot d'Arc Branch: py3.3 Changeset: r82024:f471a84da866 Date: 2016-02-01 01:03 +0100 http://bitbucket.org/pypy/pypy/changeset/f471a84da866/ Log: sqlite: add connection.set_trace_callback diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py --- a/lib_pypy/_sqlite3.py +++ b/lib_pypy/_sqlite3.py @@ -624,6 +624,22 @@ _lib.sqlite3_progress_handler(self._db, nsteps, progress_handler, _ffi.NULL) + @_check_thread_wrap + @_check_closed_wrap + def set_trace_callback(self, callable): + if callable is None: + trace_callback = _ffi.NULL + else: + try: + trace_callback = self.__func_cache[callable] + except KeyError: + @_ffi.callback("void(void*, const char*)") + def trace_callback(userdata, statement): + stmt = _ffi.string(statement).decode('utf-8') + callable(stmt) + self.__func_cache[callable] = trace_callback + _lib.sqlite3_trace(self._db, trace_callback, _ffi.NULL) + if sys.version_info[0] >= 3: def __get_in_transaction(self): return self._in_transaction diff --git a/lib_pypy/_sqlite3_build.py b/lib_pypy/_sqlite3_build.py --- a/lib_pypy/_sqlite3_build.py +++ b/lib_pypy/_sqlite3_build.py @@ -159,6 +159,7 @@ const char *sqlite3_column_decltype(sqlite3_stmt*,int); void sqlite3_progress_handler(sqlite3*, int, int(*)(void*), void*); +void sqlite3_trace(sqlite3*, void(*)(void*, const char*), void*); int sqlite3_create_collation( sqlite3*, const char *zName, From pypy.commits at gmail.com Mon Feb 1 03:57:41 2016 From: pypy.commits at gmail.com (amauryfa) Date: Mon, 01 Feb 2016 00:57:41 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Add pickle support to iter(dequeue()) Message-ID: <56af1e05.162f1c0a.77b37.7d2e@mx.google.com> Author: Amaury Forgeot d'Arc Branch: py3.3 Changeset: r82025:61cf3585f0f7 Date: 2016-02-01 01:08 +0100 http://bitbucket.org/pypy/pypy/changeset/61cf3585f0f7/ Log: Add pickle support to iter(dequeue()) diff --git a/pypy/module/_collections/__init__.py b/pypy/module/_collections/__init__.py --- a/pypy/module/_collections/__init__.py +++ b/pypy/module/_collections/__init__.py @@ -12,6 +12,8 @@ interpleveldefs = { 'deque' : 'interp_deque.W_Deque', + 'deque_iterator' : 'interp_deque.W_DequeIter', + 'deque_reverse_iterator' : 'interp_deque.W_DequeRevIter', '__missing__': 'interp_defaultdict.missing', } diff --git a/pypy/module/_collections/interp_deque.py b/pypy/module/_collections/interp_deque.py --- a/pypy/module/_collections/interp_deque.py +++ b/pypy/module/_collections/interp_deque.py @@ -537,7 +537,7 @@ self.index = ri return w_x -W_DequeIter.typedef = TypeDef("deque_iterator", +W_DequeIter.typedef = TypeDef("_collections.deque_iterator", __iter__ = interp2app(W_DequeIter.iter), __length_hint__ = interp2app(W_DequeIter.length), __next__ = interp2app(W_DequeIter.next), @@ -580,7 +580,7 @@ self.index = ri return w_x -W_DequeRevIter.typedef = TypeDef("deque_reverse_iterator", +W_DequeRevIter.typedef = TypeDef("_collections.deque_reverse_iterator", __iter__ = interp2app(W_DequeRevIter.iter), __length_hint__ = interp2app(W_DequeRevIter.length), __next__ = interp2app(W_DequeRevIter.next), From pypy.commits at gmail.com Mon Feb 1 03:57:43 2016 From: pypy.commits at gmail.com (amauryfa) Date: Mon, 01 Feb 2016 00:57:43 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Add a more complex test for e.__context__, directly from CPython. Message-ID: <56af1e07.2a06c20a.5b39a.0d76@mx.google.com> Author: Amaury Forgeot d'Arc Branch: py3.3 Changeset: r82026:9067ded8c01d Date: 2016-02-01 09:39 +0100 http://bitbucket.org/pypy/pypy/changeset/9067ded8c01d/ Log: Add a more complex test for e.__context__, directly from CPython. diff --git a/pypy/interpreter/error.py b/pypy/interpreter/error.py --- a/pypy/interpreter/error.py +++ b/pypy/interpreter/error.py @@ -350,8 +350,9 @@ self.normalize_exception(space) w_value = self.get_w_value(space) w_last = last_exception.get_w_value(space) - w_context = setup_context(space, w_value, w_last, lazy=True) - space.setattr(w_value, space.wrap('__context__'), w_context) + if not space.is_w(w_value, w_last): + w_context = setup_context(space, w_value, w_last, lazy=True) + space.setattr(w_value, space.wrap('__context__'), w_context) def setup_context(space, w_exc, w_last, lazy=False): diff --git a/pypy/interpreter/test/test_raise.py b/pypy/interpreter/test/test_raise.py --- a/pypy/interpreter/test/test_raise.py +++ b/pypy/interpreter/test/test_raise.py @@ -408,6 +408,25 @@ except: func() + def testCauseSyntax(self): + """ + try: + try: + try: + raise TypeError + except Exception: + raise ValueError from None + except ValueError as exc: + assert exc.__cause__ is None + assert exc.__suppress_context__ is True + exc.__suppress_context__ = False + raise exc + except ValueError as exc: + e = exc + assert e.__cause__ is None + assert e.__suppress_context__ is False + assert isinstance(e.__context__, TypeError) + """ class AppTestTraceback: From pypy.commits at gmail.com Mon Feb 1 05:48:21 2016 From: pypy.commits at gmail.com (mjacob) Date: Mon, 01 Feb 2016 02:48:21 -0800 (PST) Subject: [pypy-commit] pypy py3k: 2to3 Message-ID: <56af37f5.0357c20a.58781.3d1a@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82027:91eaf91dbb4c Date: 2016-02-01 11:45 +0100 http://bitbucket.org/pypy/pypy/changeset/91eaf91dbb4c/ Log: 2to3 diff --git a/lib_pypy/greenlet.py b/lib_pypy/greenlet.py --- a/lib_pypy/greenlet.py +++ b/lib_pypy/greenlet.py @@ -203,7 +203,7 @@ try: if hasattr(_tls, 'trace'): _run_trace_callback('throw') - raise exc, value, tb + raise __pypy__.normalize_exc(exc, value, tb) except GreenletExit as e: res = e finally: From pypy.commits at gmail.com Mon Feb 1 05:52:06 2016 From: pypy.commits at gmail.com (mjacob) Date: Mon, 01 Feb 2016 02:52:06 -0800 (PST) Subject: [pypy-commit] pypy default: Add lib_pypy/_libmpdec/ to default's .hgignore as well. Message-ID: <56af38d6.42cbc20a.1c2fa.353f@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82028:22349fa2fc33 Date: 2016-02-01 11:51 +0100 http://bitbucket.org/pypy/pypy/changeset/22349fa2fc33/ Log: Add lib_pypy/_libmpdec/ to default's .hgignore as well. diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ From pypy.commits at gmail.com Mon Feb 1 06:51:10 2016 From: pypy.commits at gmail.com (mjacob) Date: Mon, 01 Feb 2016 03:51:10 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default Message-ID: <56af46ae.85e41c0a.eeabe.ffffc78f@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82029:fd7884dc77a3 Date: 2016-02-01 12:50 +0100 http://bitbucket.org/pypy/pypy/changeset/fd7884dc77a3/ Log: hg merge default diff too long, truncating to 2000 out of 16652 lines diff --git a/.gitignore b/.gitignore --- a/.gitignore +++ b/.gitignore @@ -29,4 +29,4 @@ release/ !pypy/tool/release/ rpython/_cache/ -__pycache__/ +.cache/ diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -28,7 +28,7 @@ DEALINGS IN THE SOFTWARE. -PyPy Copyright holders 2003-2015 +PyPy Copyright holders 2003-2016 ----------------------------------- Except when otherwise stated (look for LICENSE files or information at diff --git a/Makefile b/Makefile --- a/Makefile +++ b/Makefile @@ -39,5 +39,5 @@ # runs. We cannot get their original value either: # http://lists.gnu.org/archive/html/help-make/2010-08/msg00106.html -cffi_imports: +cffi_imports: pypy-c PYTHONPATH=. ./pypy-c pypy/tool/build_cffi_imports.py diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -685,13 +685,17 @@ # the previous version of this code did. This should work for # CPython too. The point is that on PyPy with cpyext, the # config var 'SO' is just ".so" but we want to return - # ".pypy-VERSION.so" instead. - so_ext = _get_c_extension_suffix() + # ".pypy-VERSION.so" instead. Note a further tweak for cffi's + # embedding mode: if EXT_SUFFIX is also defined, use that + # directly. + so_ext = get_config_var('EXT_SUFFIX') if so_ext is None: - so_ext = get_config_var('SO') # fall-back - # extensions in debug_mode are named 'module_d.pyd' under windows - if os.name == 'nt' and self.debug: - so_ext = '_d.pyd' + so_ext = _get_c_extension_suffix() + if so_ext is None: + so_ext = get_config_var('SO') # fall-back + # extensions in debug_mode are named 'module_d.pyd' under windows + if os.name == 'nt' and self.debug: + so_ext = '_d.pyd' return os.path.join(*ext_path) + so_ext def get_export_symbols (self, ext): diff --git a/lib-python/2.7/pickle.py b/lib-python/2.7/pickle.py --- a/lib-python/2.7/pickle.py +++ b/lib-python/2.7/pickle.py @@ -1376,6 +1376,7 @@ def decode_long(data): r"""Decode a long from a two's complement little-endian binary string. + This is overriden on PyPy by a RPython version that has linear complexity. >>> decode_long('') 0L @@ -1402,6 +1403,11 @@ n -= 1L << (nbytes * 8) return n +try: + from __pypy__ import decode_long +except ImportError: + pass + # Shorthands try: diff --git a/lib-python/2.7/sysconfig.py b/lib-python/2.7/sysconfig.py --- a/lib-python/2.7/sysconfig.py +++ b/lib-python/2.7/sysconfig.py @@ -524,6 +524,13 @@ import _osx_support _osx_support.customize_config_vars(_CONFIG_VARS) + # PyPy: + import imp + for suffix, mode, type_ in imp.get_suffixes(): + if type_ == imp.C_EXTENSION: + _CONFIG_VARS['SOABI'] = suffix.split('.')[1] + break + if args: vals = [] for name in args: diff --git a/lib-python/2.7/test/capath/0e4015b9.0 b/lib-python/2.7/test/capath/0e4015b9.0 new file mode 100644 --- /dev/null +++ b/lib-python/2.7/test/capath/0e4015b9.0 @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv +bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG +A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo +b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 +aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ +Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm +Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= +-----END CERTIFICATE----- diff --git a/lib-python/2.7/test/capath/ce7b8643.0 b/lib-python/2.7/test/capath/ce7b8643.0 new file mode 100644 --- /dev/null +++ b/lib-python/2.7/test/capath/ce7b8643.0 @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv +bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG +A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo +b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 +aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ +Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm +Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= +-----END CERTIFICATE----- diff --git a/lib-python/2.7/test/https_svn_python_org_root.pem b/lib-python/2.7/test/https_svn_python_org_root.pem deleted file mode 100644 --- a/lib-python/2.7/test/https_svn_python_org_root.pem +++ /dev/null @@ -1,41 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 -IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB -IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA -Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO -BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi -MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ -ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ -8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 -zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y -fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 -w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc -G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k -epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q -laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ -QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU -fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 -YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w -ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY -gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe -MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 -IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy -dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw -czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 -dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl -aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC -AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg -b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB -ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc -nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg -18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c -gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl -Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY -sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T -SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF -CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum -GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk -zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW -omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD ------END CERTIFICATE----- diff --git a/lib-python/2.7/test/selfsigned_pythontestdotnet.pem b/lib-python/2.7/test/selfsigned_pythontestdotnet.pem --- a/lib-python/2.7/test/selfsigned_pythontestdotnet.pem +++ b/lib-python/2.7/test/selfsigned_pythontestdotnet.pem @@ -1,5 +1,5 @@ -----BEGIN CERTIFICATE----- -MIIChzCCAfCgAwIBAgIJAKGU95wKR8pSMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG @@ -8,9 +8,9 @@ aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv -EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjKTAnMCUGA1UdEQQeMByCGnNl -bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MA0GCSqGSIb3DQEBBQUAA4GBAIOXmdtM -eG9qzP9TiXW/Gc/zI4cBfdCpC+Y4gOfC9bQUC7hefix4iO3+iZjgy3X/FaRxUUoV -HKiXcXIaWqTSUWp45cSh0MbwZXudp6JIAptzdAhvvCrPKeC9i9GvxsPD4LtDAL97 -vSaxQBezA7hdxZd90/EeyMgVZgAnTCnvAWX9 +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= -----END CERTIFICATE----- diff --git a/lib-python/2.7/test/test_ssl.py b/lib-python/2.7/test/test_ssl.py --- a/lib-python/2.7/test/test_ssl.py +++ b/lib-python/2.7/test/test_ssl.py @@ -57,7 +57,8 @@ SIGNED_CERTFILE2 = data_file("keycert4.pem") SIGNING_CA = data_file("pycacert.pem") -SVN_PYTHON_ORG_ROOT_CERT = data_file("https_svn_python_org_root.pem") +REMOTE_HOST = "self-signed.pythontest.net" +REMOTE_ROOT_CERT = data_file("selfsigned_pythontestdotnet.pem") EMPTYCERT = data_file("nullcert.pem") BADCERT = data_file("badcert.pem") @@ -244,7 +245,7 @@ self.assertEqual(p['subjectAltName'], san) def test_DER_to_PEM(self): - with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: + with open(CAFILE_CACERT, 'r') as f: pem = f.read() d1 = ssl.PEM_cert_to_DER_cert(pem) p2 = ssl.DER_cert_to_PEM_cert(d1) @@ -792,7 +793,7 @@ # Mismatching key and cert ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) with self.assertRaisesRegexp(ssl.SSLError, "key values mismatch"): - ctx.load_cert_chain(SVN_PYTHON_ORG_ROOT_CERT, ONLYKEY) + ctx.load_cert_chain(CAFILE_CACERT, ONLYKEY) # Password protected key and cert ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD) ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode()) @@ -1013,7 +1014,7 @@ ctx.load_verify_locations(CERTFILE) self.assertEqual(ctx.cert_store_stats(), {'x509_ca': 0, 'crl': 0, 'x509': 1}) - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + ctx.load_verify_locations(CAFILE_CACERT) self.assertEqual(ctx.cert_store_stats(), {'x509_ca': 1, 'crl': 0, 'x509': 2}) @@ -1023,8 +1024,8 @@ # CERTFILE is not flagged as X509v3 Basic Constraints: CA:TRUE ctx.load_verify_locations(CERTFILE) self.assertEqual(ctx.get_ca_certs(), []) - # but SVN_PYTHON_ORG_ROOT_CERT is a CA cert - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + # but CAFILE_CACERT is a CA cert + ctx.load_verify_locations(CAFILE_CACERT) self.assertEqual(ctx.get_ca_certs(), [{'issuer': ((('organizationName', 'Root CA'),), (('organizationalUnitName', 'http://www.cacert.org'),), @@ -1040,7 +1041,7 @@ (('emailAddress', 'support at cacert.org'),)), 'version': 3}]) - with open(SVN_PYTHON_ORG_ROOT_CERT) as f: + with open(CAFILE_CACERT) as f: pem = f.read() der = ssl.PEM_cert_to_DER_cert(pem) self.assertEqual(ctx.get_ca_certs(True), [der]) @@ -1215,11 +1216,11 @@ class NetworkedTests(unittest.TestCase): def test_connect(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_NONE) try: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) self.assertEqual({}, s.getpeercert()) finally: s.close() @@ -1228,27 +1229,27 @@ s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED) self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", - s.connect, ("svn.python.org", 443)) + s.connect, (REMOTE_HOST, 443)) s.close() # this should succeed because we specify the root cert s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) self.assertTrue(s.getpeercert()) finally: s.close() def test_connect_ex(self): # Issue #11326: check connect_ex() implementation - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - self.assertEqual(0, s.connect_ex(("svn.python.org", 443))) + self.assertEqual(0, s.connect_ex((REMOTE_HOST, 443))) self.assertTrue(s.getpeercert()) finally: s.close() @@ -1256,14 +1257,14 @@ def test_non_blocking_connect_ex(self): # Issue #11326: non-blocking connect_ex() should allow handshake # to proceed after the socket gets ready. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT, + ca_certs=REMOTE_ROOT_CERT, do_handshake_on_connect=False) try: s.setblocking(False) - rc = s.connect_ex(('svn.python.org', 443)) + rc = s.connect_ex((REMOTE_HOST, 443)) # EWOULDBLOCK under Windows, EINPROGRESS elsewhere self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK)) # Wait for connect to finish @@ -1285,58 +1286,62 @@ def test_timeout_connect_ex(self): # Issue #12065: on a timeout, connect_ex() should return the original # errno (mimicking the behaviour of non-SSL sockets). - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT, + ca_certs=REMOTE_ROOT_CERT, do_handshake_on_connect=False) try: s.settimeout(0.0000001) - rc = s.connect_ex(('svn.python.org', 443)) + rc = s.connect_ex((REMOTE_HOST, 443)) if rc == 0: - self.skipTest("svn.python.org responded too quickly") + self.skipTest("REMOTE_HOST responded too quickly") self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK)) finally: s.close() def test_connect_ex_error(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - rc = s.connect_ex(("svn.python.org", 444)) + rc = s.connect_ex((REMOTE_HOST, 444)) # Issue #19919: Windows machines or VMs hosted on Windows # machines sometimes return EWOULDBLOCK. - self.assertIn(rc, (errno.ECONNREFUSED, errno.EWOULDBLOCK)) + errors = ( + errno.ECONNREFUSED, errno.EHOSTUNREACH, errno.ETIMEDOUT, + errno.EWOULDBLOCK, + ) + self.assertIn(rc, errors) finally: s.close() def test_connect_with_context(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): # Same as test_connect, but with a separately created context ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: self.assertEqual({}, s.getpeercert()) finally: s.close() # Same with a server hostname s = ctx.wrap_socket(socket.socket(socket.AF_INET), - server_hostname="svn.python.org") - s.connect(("svn.python.org", 443)) + server_hostname=REMOTE_HOST) + s.connect((REMOTE_HOST, 443)) s.close() # This should fail because we have no verification certs ctx.verify_mode = ssl.CERT_REQUIRED s = ctx.wrap_socket(socket.socket(socket.AF_INET)) self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", - s.connect, ("svn.python.org", 443)) + s.connect, (REMOTE_HOST, 443)) s.close() # This should succeed because we specify the root cert - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + ctx.load_verify_locations(REMOTE_ROOT_CERT) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1349,12 +1354,12 @@ # OpenSSL 0.9.8n and 1.0.0, as a result the capath directory must # contain both versions of each certificate (same content, different # filename) for this test to be portable across OpenSSL releases. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=CAPATH) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1365,7 +1370,7 @@ ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=BYTES_CAPATH) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1373,15 +1378,15 @@ s.close() def test_connect_cadata(self): - with open(CAFILE_CACERT) as f: + with open(REMOTE_ROOT_CERT) as f: pem = f.read().decode('ascii') der = ssl.PEM_cert_to_DER_cert(pem) - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(cadata=pem) with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) cert = s.getpeercert() self.assertTrue(cert) @@ -1390,7 +1395,7 @@ ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(cadata=der) with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) cert = s.getpeercert() self.assertTrue(cert) @@ -1399,9 +1404,9 @@ # Issue #5238: creating a file-like object with makefile() shouldn't # delay closing the underlying "real socket" (here tested with its # file descriptor, hence skipping the test under Windows). - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ss = ssl.wrap_socket(socket.socket(socket.AF_INET)) - ss.connect(("svn.python.org", 443)) + ss.connect((REMOTE_HOST, 443)) fd = ss.fileno() f = ss.makefile() f.close() @@ -1415,9 +1420,9 @@ self.assertEqual(e.exception.errno, errno.EBADF) def test_non_blocking_handshake(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = socket.socket(socket.AF_INET) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) s.setblocking(False) s = ssl.wrap_socket(s, cert_reqs=ssl.CERT_NONE, @@ -1460,12 +1465,12 @@ if support.verbose: sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem)) - _test_get_server_certificate('svn.python.org', 443, SVN_PYTHON_ORG_ROOT_CERT) + _test_get_server_certificate(REMOTE_HOST, 443, REMOTE_ROOT_CERT) if support.IPV6_ENABLED: _test_get_server_certificate('ipv6.google.com', 443) def test_ciphers(self): - remote = ("svn.python.org", 443) + remote = (REMOTE_HOST, 443) with support.transient_internet(remote[0]): with closing(ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_NONE, ciphers="ALL")) as s: @@ -1510,13 +1515,13 @@ def test_get_ca_certs_capath(self): # capath certs are loaded on request - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=CAPATH) self.assertEqual(ctx.get_ca_certs(), []) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1527,12 +1532,12 @@ @needs_sni def test_context_setget(self): # Check that the context of a connected socket can be replaced. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx1 = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ctx2 = ssl.SSLContext(ssl.PROTOCOL_SSLv23) s = socket.socket(socket.AF_INET) with closing(ctx1.wrap_socket(s)) as ss: - ss.connect(("svn.python.org", 443)) + ss.connect((REMOTE_HOST, 443)) self.assertIs(ss.context, ctx1) self.assertIs(ss._sslobj.context, ctx1) ss.context = ctx2 @@ -3026,7 +3031,7 @@ pass for filename in [ - CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, BYTES_CERTFILE, + CERTFILE, REMOTE_ROOT_CERT, BYTES_CERTFILE, ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY, SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA, BADCERT, BADKEY, EMPTYCERT]: diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.4.0 +Version: 1.5.0 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.4.0" -__version_info__ = (1, 4, 0) +__version__ = "1.5.0" +__version_info__ = (1, 5, 0) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -146,8 +146,9 @@ ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) #define _cffi_convert_array_from_object \ ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 #define _cffi_call_python \ - ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[25]) + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) #define _CFFI_NUM_EXPORTS 26 typedef struct _ctypedescr CTypeDescrObject; @@ -206,7 +207,8 @@ /********** end CPython-specific section **********/ #else _CFFI_UNUSED_FN -static void (*_cffi_call_python)(struct _cffi_externpy_s *, char *); +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org #endif diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -74,6 +74,7 @@ self._windows_unicode = None self._init_once_cache = {} self._cdef_version = None + self._embedding = None if hasattr(backend, 'set_ffi'): backend.set_ffi(self) for name in backend.__dict__: @@ -101,13 +102,21 @@ If 'packed' is specified as True, all structs declared inside this cdef are packed, i.e. laid out without any field alignment at all. """ + self._cdef(csource, override=override, packed=packed) + + def embedding_api(self, csource, packed=False): + self._cdef(csource, packed=packed, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): if not isinstance(csource, str): # unicode, on Python 2 if not isinstance(csource, basestring): raise TypeError("cdef() argument must be a string") csource = csource.encode('ascii') with self._lock: self._cdef_version = object() - self._parser.parse(csource, override=override, packed=packed) + self._parser.parse(csource, override=override, **options) self._cdefsources.append(csource) if override: for cache in self._function_caches: @@ -533,6 +542,31 @@ ('_UNICODE', '1')] kwds['define_macros'] = defmacros + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + if '__pypy__' in sys.builtin_module_names: + if hasattr(sys, 'prefix'): + import os + libdir = os.path.join(sys.prefix, 'bin') + dirs = kwds.setdefault('library_dirs', []) + if libdir not in dirs: + dirs.append(libdir) + pythonlib = "pypy-c" + else: + if sys.platform == "win32": + template = "python%d%d" + if sys.flags.debug: + template = template + '_d' + else: + template = "python%d.%d" + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + libraries = kwds.setdefault('libraries', []) + if pythonlib not in libraries: + libraries.append(pythonlib) + def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): raise ValueError("set_source() cannot be called several times " @@ -592,14 +626,23 @@ recompile(self, module_name, source, c_file=filename, call_c_compiler=False, **kwds) - def compile(self, tmpdir='.', verbose=0): + def compile(self, tmpdir='.', verbose=0, target=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ from .recompiler import recompile # if not hasattr(self, '_assigned_source'): raise ValueError("set_source() must be called before compile()") module_name, source, source_extension, kwds = self._assigned_source return recompile(self, module_name, source, tmpdir=tmpdir, - source_extension=source_extension, + target=target, source_extension=source_extension, compiler_verbose=verbose, **kwds) def init_once(self, func, tag): @@ -626,6 +669,32 @@ self._init_once_cache[tag] = (True, result) return result + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,8 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._override = False - self._packed = False + self._options = None self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -281,16 +280,15 @@ msg = 'parse error\n%s' % (msg,) raise api.CDefError(msg) - def parse(self, csource, override=False, packed=False): - prev_override = self._override - prev_packed = self._packed + def parse(self, csource, override=False, packed=False, dllexport=False): + prev_options = self._options try: - self._override = override - self._packed = packed + self._options = {'override': override, + 'packed': packed, + 'dllexport': dllexport} self._internal_parse(csource) finally: - self._override = prev_override - self._packed = prev_packed + self._options = prev_options def _internal_parse(self, csource): ast, macros, csource = self._parse(csource) @@ -376,10 +374,13 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._inside_extern_python: - self._declare('extern_python ' + decl.name, tp) + if self._options['dllexport']: + tag = 'dllexport_python ' + elif self._inside_extern_python: + tag = 'extern_python ' else: - self._declare('function ' + decl.name, tp) + tag = 'function ' + self._declare(tag + decl.name, tp) def _parse_decl(self, decl): node = decl.type @@ -449,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._override: + if not self._options['override']: raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -728,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._packed + tp.packed = self._options['packed'] if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,12 +21,14 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0): +def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, + embedding=False): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose) + outputfilename = _build(tmpdir, ext, compiler_verbose, + target_extension, embedding) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -36,7 +38,32 @@ os.environ[key] = value return outputfilename -def _build(tmpdir, ext, compiler_verbose=0): +def _save_val(name): + import distutils.sysconfig + config_vars = distutils.sysconfig.get_config_vars() + return config_vars.get(name, Ellipsis) + +def _restore_val(name, value): + import distutils.sysconfig + config_vars = distutils.sysconfig.get_config_vars() + config_vars[name] = value + if value is Ellipsis: + del config_vars[name] + +def _win32_hack_for_embedding(): + from distutils.msvc9compiler import MSVCCompiler + if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): + MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ + MSVCCompiler._remove_visual_c_ref + MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file + +def _win32_unhack_for_embedding(): + from distutils.msvc9compiler import MSVCCompiler + MSVCCompiler._remove_visual_c_ref = \ + MSVCCompiler._remove_visual_c_ref_CFFI_BAK + +def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, + embedding=False): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -49,18 +76,29 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: + if sys.platform == 'win32' and embedding: + _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 + old_SO = _save_val('SO') + old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: + if target_extension is not None: + _restore_val('SO', target_extension) + _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) + _restore_val('SO', old_SO) + _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) + if sys.platform == 'win32' and embedding: + _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) # - cmd_obj = dist.get_command_obj('build_ext') - [soname] = cmd_obj.get_outputs() return soname try: diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -3,6 +3,7 @@ from .cffi_opcode import * VERSION = "0x2601" +VERSION_EMBEDDED = "0x2701" class GlobalExpr: @@ -281,6 +282,29 @@ lines[i:i+1] = self._rel_readlines('parse_c_type.h') prnt(''.join(lines)) # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('#define _CFFI_PYTHON_STARTUP_CODE %s' % + (self._string_literal(self.ffi._embedding),)) + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + prnt(''.join(lines)) + version = VERSION_EMBEDDED + else: + version = VERSION + # # then paste the C source given by the user, verbatim. prnt('/************************************************************/') prnt() @@ -365,17 +389,16 @@ prnt() # # the init function - base_module_name = self.module_name.split('.')[-1] prnt('#ifdef PYPY_VERSION') prnt('PyMODINIT_FUNC') prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) prnt('{') if self._num_externpy: prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') - prnt(' _cffi_call_python = ' + prnt(' _cffi_call_python_org = ' '(void(*)(struct _cffi_externpy_s *, char *))p[1];') prnt(' }') - prnt(' p[0] = (const void *)%s;' % VERSION) + prnt(' p[0] = (const void *)%s;' % version) prnt(' p[1] = &_cffi_type_context;') prnt('}') # on Windows, distutils insists on putting init_cffi_xyz in @@ -394,14 +417,14 @@ prnt('PyInit_%s(void)' % (base_module_name,)) prnt('{') prnt(' return _cffi_init("%s", %s, &_cffi_type_context);' % ( - self.module_name, VERSION)) + self.module_name, version)) prnt('}') prnt('#else') prnt('PyMODINIT_FUNC') prnt('init%s(void)' % (base_module_name,)) prnt('{') prnt(' _cffi_init("%s", %s, &_cffi_type_context);' % ( - self.module_name, VERSION)) + self.module_name, version)) prnt('}') prnt('#endif') @@ -1123,7 +1146,10 @@ assert isinstance(tp, model.FunctionPtrType) self._do_collect_type(tp) - def _generate_cpy_extern_python_decl(self, tp, name): + def _generate_cpy_dllexport_python_collecttype(self, tp, name): + self._generate_cpy_extern_python_collecttype(tp, name) + + def _generate_cpy_extern_python_decl(self, tp, name, dllexport=False): prnt = self._prnt if isinstance(tp.result, model.VoidType): size_of_result = '0' @@ -1156,7 +1182,11 @@ size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( tp.result.get_c_name(''), size_of_a, tp.result.get_c_name(''), size_of_a) - prnt('static %s' % tp.result.get_c_name(name_and_arguments)) + if dllexport: + tag = 'CFFI_DLLEXPORT' + else: + tag = 'static' + prnt('%s %s' % (tag, tp.result.get_c_name(name_and_arguments))) prnt('{') prnt(' char a[%s];' % size_of_a) prnt(' char *p = a;') @@ -1174,6 +1204,9 @@ prnt() self._num_externpy += 1 + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._generate_cpy_extern_python_decl(tp, name, dllexport=True) + def _generate_cpy_extern_python_ctx(self, tp, name): if self.target_is_python: raise ffiplatform.VerificationError( @@ -1185,6 +1218,21 @@ self._lsts["global"].append( GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + def _generate_cpy_dllexport_python_ctx(self, tp, name): + self._generate_cpy_extern_python_ctx(tp, name) + + def _string_literal(self, s): + def _char_repr(c): + # escape with a '\' the characters '\', '"' or (for trigraphs) '?' + if c in '\\"?': return '\\' + c + if ' ' <= c < '\x7F': return c + if c == '\n': return '\\n' + return '\\%03o' % ord(c) + lines = [] + for line in s.splitlines(True): + lines.append('"%s"' % ''.join([_char_repr(c) for c in line])) + return ' \\\n'.join(lines) + # ---------- # emitting the opcodes for individual types @@ -1311,12 +1359,15 @@ def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, - compiler_verbose=1, **kwds): + compiler_verbose=1, target=None, **kwds): if not isinstance(module_name, str): module_name = module_name.encode('ascii') if ffi._windows_unicode: ffi._apply_windows_unicode(kwds) if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) if c_file is None: c_file, parts = _modname_to_file(tmpdir, module_name, source_extension) @@ -1325,13 +1376,40 @@ ext_c_file = os.path.join(*parts) else: ext_c_file = c_file - ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + if target == '*': + target_module_name = module_name + target_extension = None # use default + else: + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + else: + target += '.so' + # split along the first '.' (not the last one, otherwise the + # preceeding dots are interpreted as splitting package names) + index = target.find('.') + if index < 0: + raise ValueError("target argument %r should be a file name " + "containing a '.'" % (target,)) + target_module_name = target[:index] + target_extension = target[index:] + # + ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: cwd = os.getcwd() try: os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose, + target_extension, + embedding=embedding) finally: os.chdir(cwd) return outputfilename diff --git a/pypy/doc/embedding.rst b/pypy/doc/embedding.rst --- a/pypy/doc/embedding.rst +++ b/pypy/doc/embedding.rst @@ -10,6 +10,15 @@ with a ``libpypy-c.so`` or ``pypy-c.dll`` file. This is the default in recent versions of PyPy. +.. note:: + + The interface described in this page is kept for backward compatibility. + From PyPy 4.1, it is recommended to use instead CFFI's `native embedding + support,`__ which gives a simpler approach that works on CPython as well + as PyPy. + +.. __: http://cffi.readthedocs.org/en/latest/embedding.html + The resulting shared library exports very few functions, however they are enough to accomplish everything you need, provided you follow a few principles. The API is: @@ -130,8 +139,13 @@ More complete example --------------------- -.. note:: This example depends on pypy_execute_source_ptr which is not available - in PyPy <= 2.2.1. +.. note:: Note that we do not make use of ``extern "Python"``, the new + way to do callbacks in CFFI 1.4: this is because these examples use + the ABI mode, not the API mode, and with the ABI mode you still have + to use ``ffi.callback()``. It is work in progress to integrate + ``extern "Python"`` with the idea of embedding (and it is expected + to ultimately lead to a better way to do embedding than the one + described here, and that would work equally well on CPython and PyPy). Typically we need something more to do than simply execute source. The following is a fully fledged example, please consult cffi documentation for details. diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst --- a/pypy/doc/faq.rst +++ b/pypy/doc/faq.rst @@ -54,7 +54,8 @@ It is quite common nowadays that xyz is available on PyPI_ and installable with ``pip install xyz``. The simplest solution is to `use virtualenv (as documented here)`_. Then enter (activate) the virtualenv -and type: ``pip install xyz``. +and type: ``pip install xyz``. If you don't know or don't want virtualenv, +you can also install ``pip`` globally by saying ``pypy -m ensurepip``. If you get errors from the C compiler, the module is a CPython C Extension module using unsupported features. `See below.`_ diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst --- a/pypy/doc/getting-started-dev.rst +++ b/pypy/doc/getting-started-dev.rst @@ -19,7 +19,9 @@ * Clone this new repo (i.e. the fork) to your local machine with the command ``hg clone ssh://hg at bitbucket.org/yourname/pypy``. It is a very slow - operation but only ever needs to be done once. If you already cloned + operation but only ever needs to be done once. See also + http://pypy.org/download.html#building-from-source . + If you already cloned ``https://bitbucket.org/pypy/pypy`` before, even if some time ago, then you can reuse the same clone by editing the file ``.hg/hgrc`` in your clone to contain the line ``default = diff --git a/pypy/doc/how-to-contribute.rst b/pypy/doc/how-to-contribute.rst --- a/pypy/doc/how-to-contribute.rst +++ b/pypy/doc/how-to-contribute.rst @@ -67,8 +67,8 @@ **module** directory contains extension modules written in RPython * **rpython compiler** that resides in ``rpython/annotator`` and - ``rpython/rtyper`` directories. Consult :doc:`introduction to RPython ` for - further reading + ``rpython/rtyper`` directories. Consult `Getting Started with RPython`_ + for further reading * **JIT generator** lives in ``rpython/jit`` directory. optimizations live in ``rpython/jit/metainterp/optimizeopt``, the main JIT in @@ -80,3 +80,14 @@ The rest of directories serve specific niche goal and are unlikely a good entry point. + + +More documentation +------------------ + +* `Getting Started Developing With PyPy`_ + +* `Getting Started with RPython`_ + +.. _`Getting Started Developing With PyPy`: getting-started-dev.html +.. _`Getting started with RPython`: http://rpython.readthedocs.org/en/latest/getting-started.html diff --git a/pypy/doc/stm.rst b/pypy/doc/stm.rst --- a/pypy/doc/stm.rst +++ b/pypy/doc/stm.rst @@ -83,30 +83,27 @@ **pypy-stm requires 64-bit Linux for now.** -Development is done in the branch `stmgc-c7`_. If you are only -interested in trying it out, you can download a Ubuntu binary here__ -(``pypy-stm-2.*.tar.bz2``, for Ubuntu 12.04-14.04). The current version -supports four "segments", which means that it will run up to four -threads in parallel. (Development recently switched to `stmgc-c8`_, -but that is not ready for trying out yet.) +Development is done in the branch `stmgc-c8`_. If you are only +interested in trying it out, please pester us until we upload a recent +prebuilt binary. The current version supports four "segments", which +means that it will run up to four threads in parallel. To build a version from sources, you first need to compile a custom -version of clang(!); we recommend downloading `llvm and clang like -described here`__, but at revision 201645 (use ``svn co -r 201645 `` -for all checkouts). Then apply all the patches in `this directory`__: -they are fixes for a clang-only feature that hasn't been used so heavily -in the past (without the patches, you get crashes of clang). Then get -the branch `stmgc-c7`_ of PyPy and run:: +version of gcc(!). See the instructions here: +https://bitbucket.org/pypy/stmgc/src/default/gcc-seg-gs/ +(Note that these patches are being incorporated into gcc. It is likely +that future versions of gcc will not need to be patched any more.) + +Then get the branch `stmgc-c8`_ of PyPy and run:: cd pypy/goal ../../rpython/bin/rpython -Ojit --stm - PYTHONPATH=. ./pypy-c pypy/tool/build_cffi_imports.py -.. _`stmgc-c7`: https://bitbucket.org/pypy/pypy/src/stmgc-c7/ +At the end, this will try to compile the generated C code by calling +``gcc-seg-gs``, which must be the script you installed in the +instructions above. + .. _`stmgc-c8`: https://bitbucket.org/pypy/pypy/src/stmgc-c8/ -.. __: https://bitbucket.org/pypy/pypy/downloads/ -.. __: http://clang.llvm.org/get_started.html -.. __: https://bitbucket.org/pypy/stmgc/src/default/c7/llvmfix/ .. _caveats: @@ -114,6 +111,12 @@ Current status (stmgc-c7) ------------------------- +.. warning:: + + THIS PAGE IS OLD, THE REST IS ABOUT STMGC-C7 WHEREAS THE CURRENT + DEVELOPMENT WORK IS DONE ON STMGC-C8 + + * **NEW:** It seems to work fine, without crashing any more. Please `report any crash`_ you find (or other bugs). diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -5,6 +5,8 @@ .. this is a revision shortly after release-4.0.1 .. startrev: 4b5c840d0da2 +Fixed ``_PyLong_FromByteArray()``, which was buggy. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy @@ -44,6 +46,9 @@ .. branch: fix-setslice-can-resize +Make rlist's ll_listsetslice() able to resize the target list to help +simplify objspace/std/listobject.py. Was issue #2196. + .. branch: anntype2 A somewhat random bunch of changes and fixes following up on branch 'anntype'. Highlights: @@ -73,3 +78,64 @@ Move wrappers for OS functions from `rpython/rtyper` to `rpython/rlib` and turn them into regular RPython functions. Most RPython-compatible `os.*` functions are now directly accessible as `rpython.rposix.*`. + +.. branch: always-enable-gil + +Simplify a bit the GIL handling in non-jitted code. Fixes issue #2205. + +.. branch: flowspace-cleanups + +Trivial cleanups in flowspace.operation : fix comment & duplicated method + +.. branch: test-AF_NETLINK + +Add a test for pre-existing AF_NETLINK support. Was part of issue #1942. + +.. branch: small-cleanups-misc + +Trivial misc cleanups: typo, whitespace, obsolete comments + +.. branch: cpyext-slotdefs +.. branch: fix-missing-canraise +.. branch: whatsnew + +.. branch: fix-2211 + +Fix the cryptic exception message when attempting to use extended slicing +in rpython. Was issue #2211. + +.. branch: ec-keepalive + +Optimize the case where, in a new C-created thread, we keep invoking +short-running Python callbacks. (CFFI on CPython has a hack to achieve +the same result.) This can also be seen as a bug fix: previously, +thread-local objects would be reset between two such calls. + +.. branch: globals-quasiimmut + +Optimize global lookups. + +.. branch: cffi-static-callback-embedding + +Updated to CFFI 1.5, which supports a new way to do embedding. +Deprecates http://pypy.readthedocs.org/en/latest/embedding.html. + +.. branch: fix-cpython-ssl-tests-2.7 + +Fix SSL tests by importing cpython's patch + +.. branch: remove-getfield-pure + +Remove pure variants of ``getfield_gc_*`` operations from the JIT. Relevant +optimizations instead consult the field descriptor to determine the purity of +the operation. Additionally, pure ``getfield`` operations are now handled +entirely by `rpython/jit/metainterp/optimizeopt/heap.py` rather than +`rpython/jit/metainterp/optimizeopt/pure.py`, which can result in better codegen +for traces containing a large number of pure getfield operations. + +.. branch: exctrans + +Try to ensure that no new functions get annotated during the 'source_c' phase. +Refactor sandboxing to operate at a higher level. + +.. branch: cpyext-bootstrap diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -91,13 +91,6 @@ from rpython.rlib.entrypoint import entrypoint_highlevel from rpython.rtyper.lltypesystem import rffi, lltype - w_pathsetter = space.appexec([], """(): - def f(path): - import sys - sys.path[:] = path - return f - """) - @entrypoint_highlevel('main', [rffi.CCHARP, rffi.INT], c_name='pypy_setup_home') def pypy_setup_home(ll_home, verbose): @@ -116,7 +109,10 @@ " not found in '%s' or in any parent directory" % home1) return rffi.cast(rffi.INT, 1) space.startup() - space.call_function(w_pathsetter, w_path) + space.appexec([w_path], """(path): + import sys + sys.path[:] = path + """) # import site try: space.setattr(space.getbuiltinmodule('sys'), @@ -156,6 +152,9 @@ return os_thread.setup_threads(space) os_thread.bootstrapper.acquire(space, None, None) + # XXX this doesn't really work. Don't use os.fork(), and + # if your embedder program uses fork(), don't use any PyPy + # code in the fork rthread.gc_thread_start() os_thread.bootstrapper.nbthreads += 1 os_thread.bootstrapper.release() diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py --- a/pypy/interpreter/astcompiler/test/test_compiler.py +++ b/pypy/interpreter/astcompiler/test/test_compiler.py @@ -1019,6 +1019,8 @@ def test_dont_fold_equal_code_objects(self): yield self.st, "f=lambda:1;g=lambda:1.0;x=g()", 'type(x)', float + yield (self.st, "x=(lambda: (-0.0, 0.0), lambda: (0.0, -0.0))[1]()", + 'repr(x)', '(0.0, -0.0)') def test_raise_from(self): test = """if 1: diff --git a/pypy/interpreter/eval.py b/pypy/interpreter/eval.py --- a/pypy/interpreter/eval.py +++ b/pypy/interpreter/eval.py @@ -9,8 +9,8 @@ class Code(W_Root): """A code is a compiled version of some source code. Abstract base class.""" - _immutable_ = True hidden_applevel = False + _immutable_fields_ = ['co_name', 'fast_natural_arity', 'hidden_applevel'] # n >= 0 : arity # FLATPYCALL = 0x100 diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py --- a/pypy/interpreter/pycode.py +++ b/pypy/interpreter/pycode.py @@ -72,10 +72,12 @@ class PyCode(eval.Code): "CPython-style code objects." - _immutable_ = True - _immutable_fields_ = ["co_consts_w[*]", "co_names_w[*]", "co_varnames[*]", - "co_freevars[*]", "co_cellvars[*]", - "_args_as_cellvars[*]"] + _immutable_fields_ = ["_signature", "co_argcount", "co_kwonlyargcount", "co_cellvars[*]", + "co_code", "co_consts_w[*]", "co_filename", + "co_firstlineno", "co_flags", "co_freevars[*]", + "co_lnotab", "co_names_w[*]", "co_nlocals", + "co_stacksize", "co_varnames[*]", + "_args_as_cellvars[*]", "w_globals?"] def __init__(self, space, argcount, kwonlyargcount, nlocals, stacksize, flags, code, consts, names, varnames, filename, @@ -104,6 +106,10 @@ self.co_name = name self.co_firstlineno = firstlineno self.co_lnotab = lnotab + # store the first globals object that the code object is run in in + # here. if a frame is run in that globals object, it does not need to + # store it at all + self.w_globals = None self.hidden_applevel = hidden_applevel self.magic = magic self._signature = cpython_code_signature(self) @@ -111,6 +117,14 @@ self._init_ready() self.new_code_hook() + def frame_stores_global(self, w_globals): + if self.w_globals is None: + self.w_globals = w_globals + return False + if self.w_globals is w_globals: + return False + return True + def new_code_hook(self): code_hook = self.space.fromcache(CodeHookCache)._code_hook if code_hook is not None: diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py --- a/pypy/interpreter/pyframe.py +++ b/pypy/interpreter/pyframe.py @@ -36,6 +36,7 @@ def __init__(self, pycode): self.f_lineno = pycode.co_firstlineno + self.w_globals = pycode.w_globals class PyFrame(W_Root): """Represents a frame for a regular Python function @@ -67,7 +68,6 @@ escaped = False # see mark_as_escaped() debugdata = None - w_globals = None pycode = None # code object executed by that frame locals_cells_stack_w = None # the list of all locals, cells and the valuestack valuestackdepth = 0 # number of items on valuestack @@ -90,8 +90,9 @@ self = hint(self, access_directly=True, fresh_virtualizable=True) assert isinstance(code, pycode.PyCode) self.space = space - self.w_globals = w_globals self.pycode = code + if code.frame_stores_global(w_globals): + self.getorcreatedebug().w_globals = w_globals ncellvars = len(code.co_cellvars) nfreevars = len(code.co_freevars) size = code.co_nlocals + ncellvars + nfreevars + code.co_stacksize @@ -116,6 +117,12 @@ self.debugdata = FrameDebugData(self.pycode) return self.debugdata + def get_w_globals(self): + debugdata = self.getdebug() + if debugdata is not None: + return debugdata.w_globals + return jit.promote(self.pycode).w_globals + def get_w_f_trace(self): d = self.getdebug() if d is None: @@ -201,8 +208,9 @@ if flags & pycode.CO_NEWLOCALS: self.getorcreatedebug().w_locals = self.space.newdict(module=True) else: - assert self.w_globals is not None - self.getorcreatedebug().w_locals = self.w_globals + w_globals = self.get_w_globals() + assert w_globals is not None + self.getorcreatedebug().w_locals = w_globals ncellvars = len(code.co_cellvars) nfreevars = len(code.co_freevars) @@ -449,7 +457,7 @@ w_blockstack, w_exc_value, # last_exception w_tb, # - self.w_globals, + self.get_w_globals(), w(self.last_instr), w(self.frame_finished_execution), w(f_lineno), @@ -658,6 +666,11 @@ def fget_getdictscope(self, space): return self.getdictscope() + def fget_w_globals(self, space): + # bit silly, but GetSetProperty passes a space + return self.get_w_globals() + + ### line numbers ### def fget_f_lineno(self, space): diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py --- a/pypy/interpreter/pyopcode.py +++ b/pypy/interpreter/pyopcode.py @@ -832,16 +832,16 @@ def STORE_GLOBAL(self, nameindex, next_instr): varname = self.getname_u(nameindex) w_newvalue = self.popvalue() - self.space.setitem_str(self.w_globals, varname, w_newvalue) + self.space.setitem_str(self.get_w_globals(), varname, w_newvalue) def DELETE_GLOBAL(self, nameindex, next_instr): w_varname = self.getname_w(nameindex) - self.space.delitem(self.w_globals, w_varname) + self.space.delitem(self.get_w_globals(), w_varname) def LOAD_NAME(self, nameindex, next_instr): w_varname = self.getname_w(nameindex) varname = self.space.identifier_w(w_varname) - if self.getorcreatedebug().w_locals is not self.w_globals: + if self.getorcreatedebug().w_locals is not self.get_w_globals(): w_value = self.space.finditem_str(self.getorcreatedebug().w_locals, varname) if w_value is not None: @@ -855,7 +855,7 @@ self.pushvalue(w_value) def _load_global(self, varname): - w_value = self.space.finditem_str(self.w_globals, varname) + w_value = self.space.finditem_str(self.get_w_globals(), varname) if w_value is None: # not in the globals, now look in the built-ins w_value = self.get_builtin().getdictvalue(self.space, varname) @@ -986,7 +986,7 @@ w_locals = d.w_locals if w_locals is None: # CPython does this w_locals = space.w_None - w_globals = self.w_globals + w_globals = self.get_w_globals() if w_flag is None: w_obj = space.call_function(w_import, w_modulename, w_globals, w_locals, w_fromlist) @@ -1221,7 +1221,7 @@ w_name = self.popvalue() w_def = self.popvalue() space.setitem(w_kw_defs, w_def, w_name) - fn = function.Function(space, codeobj, self.w_globals, defaultarguments, + fn = function.Function(space, codeobj, self.get_w_globals(), defaultarguments, w_kw_defs, freevars, w_ann) self.pushvalue(space.wrap(fn)) @@ -1610,7 +1610,7 @@ if space.is_none(w_locals): w_locals = w_globals else: - w_globals = caller.w_globals + w_globals = caller.get_w_globals() if space.is_none(w_locals): w_locals = caller.getdictscope() elif space.is_none(w_locals): diff --git a/pypy/interpreter/test/test_pyframe.py b/pypy/interpreter/test/test_pyframe.py --- a/pypy/interpreter/test/test_pyframe.py +++ b/pypy/interpreter/test/test_pyframe.py @@ -34,6 +34,7 @@ import sys f = sys._getframe() assert f.f_globals is globals() + raises(AttributeError, "f.f_globals = globals()") def test_f_builtins(self): import sys, builtins diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -790,7 +790,7 @@ f_restricted = GetSetProperty(PyFrame.fget_f_restricted), f_code = GetSetProperty(PyFrame.fget_code), f_locals = GetSetProperty(PyFrame.fget_getdictscope), - f_globals = interp_attrproperty_w('w_globals', cls=PyFrame), + f_globals = GetSetProperty(PyFrame.fget_w_globals), ) assert not PyFrame.typedef.acceptable_as_base_class # no __new__ diff --git a/pypy/module/__builtin__/interp_inspect.py b/pypy/module/__builtin__/interp_inspect.py --- a/pypy/module/__builtin__/interp_inspect.py +++ b/pypy/module/__builtin__/interp_inspect.py @@ -2,7 +2,7 @@ def globals(space): "Return the dictionary containing the current scope's global variables." ec = space.getexecutioncontext() - return ec.gettopframe_nohidden().w_globals + return ec.gettopframe_nohidden().get_w_globals() def locals(space): """Return a dictionary containing the current scope's local variables. diff --git a/pypy/module/__pypy__/__init__.py b/pypy/module/__pypy__/__init__.py --- a/pypy/module/__pypy__/__init__.py +++ b/pypy/module/__pypy__/__init__.py @@ -87,6 +87,7 @@ 'set_code_callback' : 'interp_magic.set_code_callback', 'save_module_content_for_future_reload': 'interp_magic.save_module_content_for_future_reload', + 'decode_long' : 'interp_magic.decode_long', 'normalize_exc' : 'interp_magic.normalize_exc', } diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py --- a/pypy/module/__pypy__/interp_magic.py +++ b/pypy/module/__pypy__/interp_magic.py @@ -1,4 +1,4 @@ -from pypy.interpreter.error import OperationError, wrap_oserror +from pypy.interpreter.error import OperationError, oefmt, wrap_oserror from pypy.interpreter.gateway import WrappedDefault, unwrap_spec from pypy.interpreter.pycode import CodeHookCache from pypy.interpreter.pyframe import PyFrame @@ -83,7 +83,7 @@ Return the underlying strategy currently used by a dict, list or set object """ if isinstance(w_obj, W_DictMultiObject): - name = w_obj.strategy.__class__.__name__ + name = w_obj.get_strategy().__class__.__name__ elif isinstance(w_obj, W_ListObject): name = w_obj.strategy.__class__.__name__ elif isinstance(w_obj, W_BaseSetObject): @@ -139,6 +139,15 @@ else: cache._code_hook = w_callable + at unwrap_spec(string=str, byteorder=str, signed=int) +def decode_long(space, string, byteorder='little', signed=1): + from rpython.rlib.rbigint import rbigint, InvalidEndiannessError + try: + result = rbigint.frombytes(string, byteorder, bool(signed)) + except InvalidEndiannessError: + raise oefmt(space.w_ValueError, "invalid byteorder argument") + return space.newlong_from_rbigint(result) + @unwrap_spec(w_value=WrappedDefault(None), w_tb=WrappedDefault(None)) def normalize_exc(space, w_type, w_value=None, w_tb=None): operr = OperationError(w_type, w_value, w_tb) diff --git a/pypy/module/__pypy__/test/test_magic.py b/pypy/module/__pypy__/test/test_magic.py --- a/pypy/module/__pypy__/test/test_magic.py +++ b/pypy/module/__pypy__/test/test_magic.py @@ -30,4 +30,20 @@ """ in d finally: __pypy__.set_code_callback(None) - assert d['f'].__code__ in l \ No newline at end of file + assert d['f'].__code__ in l + + def test_decode_long(self): + from __pypy__ import decode_long + assert decode_long('') == 0 + assert decode_long('\xff\x00') == 255 + assert decode_long('\xff\x7f') == 32767 + assert decode_long('\x00\xff') == -256 + assert decode_long('\x00\x80') == -32768 + assert decode_long('\x80') == -128 + assert decode_long('\x7f') == 127 + assert decode_long('\x55' * 97) == (1 << (97 * 8)) // 3 + assert decode_long('\x00\x80', 'big') == 128 + assert decode_long('\xff\x7f', 'little', False) == 32767 + assert decode_long('\x00\x80', 'little', False) == 32768 + assert decode_long('\x00\x80', 'little', True) == -32768 + raises(ValueError, decode_long, '', 'foo') diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -1,8 +1,9 @@ import sys from pypy.interpreter.mixedmodule import MixedModule -from rpython.rlib import rdynload, clibffi +from rpython.rlib import rdynload, clibffi, entrypoint +from rpython.rtyper.lltypesystem import rffi -VERSION = "1.4.0" +VERSION = "1.5.0" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -65,6 +66,10 @@ if has_stdcall: interpleveldefs['FFI_STDCALL'] = 'space.wrap(%d)' % FFI_STDCALL + def startup(self, space): + from pypy.module._cffi_backend import embedding + embedding.glob.space = space + def get_dict_rtld_constants(): found = {} @@ -78,3 +83,11 @@ for _name, _value in get_dict_rtld_constants().items(): Module.interpleveldefs[_name] = 'space.wrap(%d)' % _value + + +# write this entrypoint() here, to make sure it is registered early enough + at entrypoint.entrypoint_highlevel('main', [rffi.INT, rffi.VOIDP], + c_name='pypy_init_embedded_cffi_module') +def pypy_init_embedded_cffi_module(version, init_struct): + from pypy.module._cffi_backend import embedding + return embedding.pypy_init_embedded_cffi_module(version, init_struct) diff --git a/pypy/module/_cffi_backend/call_python.py b/pypy/module/_cffi_backend/call_python.py --- a/pypy/module/_cffi_backend/call_python.py +++ b/pypy/module/_cffi_backend/call_python.py @@ -40,10 +40,9 @@ at least 8 bytes in size. """ from pypy.module._cffi_backend.ccallback import reveal_callback + from rpython.rlib import rgil - after = rffi.aroundstate.after - if after: - after() + rgil.acquire() rffi.stackcounter.stacks_counter += 1 llop.gc_stack_bottom(lltype.Void) # marker for trackgcroot.py @@ -71,9 +70,7 @@ cerrno._errno_before(rffi.RFFI_ERR_ALL | rffi.RFFI_ALT_ERRNO) rffi.stackcounter.stacks_counter -= 1 - before = rffi.aroundstate.before - if before: - before() + rgil.release() def get_ll_cffi_call_python(): diff --git a/pypy/module/_cffi_backend/cffi1_module.py b/pypy/module/_cffi_backend/cffi1_module.py --- a/pypy/module/_cffi_backend/cffi1_module.py +++ b/pypy/module/_cffi_backend/cffi1_module.py @@ -2,24 +2,25 @@ from pypy.interpreter.error import oefmt from pypy.interpreter.module import Module +from pypy.module import _cffi_backend from pypy.module._cffi_backend import parse_c_type from pypy.module._cffi_backend.ffi_obj import W_FFIObject from pypy.module._cffi_backend.lib_obj import W_LibObject VERSION_MIN = 0x2601 -VERSION_MAX = 0x26FF +VERSION_MAX = 0x27FF VERSION_EXPORT = 0x0A03 -initfunctype = lltype.Ptr(lltype.FuncType([rffi.VOIDPP], lltype.Void)) +INITFUNCPTR = lltype.Ptr(lltype.FuncType([rffi.VOIDPP], lltype.Void)) def load_cffi1_module(space, name, path, initptr): # This is called from pypy.module.cpyext.api.load_extension_module() from pypy.module._cffi_backend.call_python import get_ll_cffi_call_python - initfunc = rffi.cast(initfunctype, initptr) + initfunc = rffi.cast(INITFUNCPTR, initptr) with lltype.scoped_alloc(rffi.VOIDPP.TO, 16, zero=True) as p: p[0] = rffi.cast(rffi.VOIDP, VERSION_EXPORT) p[1] = rffi.cast(rffi.VOIDP, get_ll_cffi_call_python()) @@ -27,8 +28,10 @@ version = rffi.cast(lltype.Signed, p[0]) if not (VERSION_MIN <= version <= VERSION_MAX): raise oefmt(space.w_ImportError, - "cffi extension module '%s' has unknown version %s", - name, hex(version)) + "cffi extension module '%s' uses an unknown version tag %s. " + "This module might need a more recent version of PyPy. " + "The current PyPy provides CFFI %s.", + name, hex(version), _cffi_backend.VERSION) src_ctx = rffi.cast(parse_c_type.PCTX, p[1]) ffi = W_FFIObject(space, src_ctx) @@ -38,7 +41,8 @@ w_name = space.wrap(name) module = Module(space, w_name) - module.setdictvalue(space, '__file__', space.wrap(path)) + if path is not None: + module.setdictvalue(space, '__file__', space.wrap(path)) module.setdictvalue(space, 'ffi', space.wrap(ffi)) module.setdictvalue(space, 'lib', space.wrap(lib)) w_modules_dict = space.sys.get('modules') diff --git a/pypy/module/_cffi_backend/cglob.py b/pypy/module/_cffi_backend/cglob.py --- a/pypy/module/_cffi_backend/cglob.py +++ b/pypy/module/_cffi_backend/cglob.py @@ -3,6 +3,7 @@ from pypy.interpreter.typedef import TypeDef from pypy.module._cffi_backend.cdataobj import W_CData from pypy.module._cffi_backend import newtype +from rpython.rlib import rgil from rpython.rlib.objectmodel import we_are_translated from rpython.rtyper.lltypesystem import lltype, rffi from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -26,7 +27,9 @@ if not we_are_translated(): FNPTR = rffi.CCallback([], rffi.VOIDP) fetch_addr = rffi.cast(FNPTR, self.fetch_addr) + rgil.release() result = fetch_addr() + rgil.acquire() else: # careful in translated versions: we need to call fetch_addr, # but in a GIL-releasing way. The easiest is to invoke a diff --git a/pypy/module/_cffi_backend/ctypefunc.py b/pypy/module/_cffi_backend/ctypefunc.py --- a/pypy/module/_cffi_backend/ctypefunc.py +++ b/pypy/module/_cffi_backend/ctypefunc.py @@ -423,7 +423,9 @@ exchange_offset += rffi.getintfield(self.atypes[i], 'c_size') # store the exchange data size - cif_descr.exchange_size = exchange_offset + # we also align it to the next multiple of 8, in an attempt to + # work around bugs(?) of libffi (see cffi issue #241) + cif_descr.exchange_size = self.align_arg(exchange_offset) def fb_extra_fields(self, cif_descr): cif_descr.abi = self.fabi diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py new file mode 100644 --- /dev/null +++ b/pypy/module/_cffi_backend/embedding.py @@ -0,0 +1,146 @@ +import os +from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.translator.tool.cbuild import ExternalCompilationInfo + +from pypy.interpreter.error import OperationError, oefmt + +# ____________________________________________________________ + + +EMBED_VERSION_MIN = 0xB011 +EMBED_VERSION_MAX = 0xB0FF + +STDERR = 2 +INITSTRUCTPTR = lltype.Ptr(lltype.Struct('CFFI_INIT', + ('name', rffi.CCHARP), + ('func', rffi.VOIDP), + ('code', rffi.CCHARP))) + +def load_embedded_cffi_module(space, version, init_struct): + from pypy.module._cffi_backend.cffi1_module import load_cffi1_module + declare_c_function() # translation-time hint only: + # declare _cffi_carefully_make_gil() + # + version = rffi.cast(lltype.Signed, version) + if not (EMBED_VERSION_MIN <= version <= EMBED_VERSION_MAX): + raise oefmt(space.w_ImportError, + "cffi embedded module has got unknown version tag %s", + hex(version)) + # + if space.config.objspace.usemodules.thread: + from pypy.module.thread import os_thread + os_thread.setup_threads(space) + # + name = rffi.charp2str(init_struct.name) + load_cffi1_module(space, name, None, init_struct.func) + code = rffi.charp2str(init_struct.code) + compiler = space.createcompiler() + pycode = compiler.compile(code, "" % name, 'exec', 0) + w_globals = space.newdict(module=True) + space.setitem_str(w_globals, "__builtins__", space.wrap(space.builtin)) + pycode.exec_code(space, w_globals, w_globals) + + +class Global: + pass +glob = Global() + +def pypy_init_embedded_cffi_module(version, init_struct): + # called from __init__.py + name = "?" + try: + init_struct = rffi.cast(INITSTRUCTPTR, init_struct) + name = rffi.charp2str(init_struct.name) + # + space = glob.space + must_leave = False + try: + must_leave = space.threadlocals.try_enter_thread(space) + load_embedded_cffi_module(space, version, init_struct) + res = 0 + except OperationError, operr: + operr.write_unraisable(space, "initialization of '%s'" % name, + with_traceback=True) + space.appexec([], r"""(): + import sys + sys.stderr.write('pypy version: %s.%s.%s\n' % + sys.pypy_version_info[:3]) + sys.stderr.write('sys.path: %r\n' % (sys.path,)) + """) + res = -1 + if must_leave: + space.threadlocals.leave_thread(space) + except Exception, e: + # oups! last-level attempt to recover. + try: + os.write(STDERR, "From initialization of '") + os.write(STDERR, name) + os.write(STDERR, "':\n") + os.write(STDERR, str(e)) + os.write(STDERR, "\n") + except: + pass + res = -1 + return rffi.cast(rffi.INT, res) + +# ____________________________________________________________ + + +eci = ExternalCompilationInfo(separate_module_sources=[ +r""" +/* XXX Windows missing */ +#include +#include +#include + +RPY_EXPORTED void rpython_startup_code(void); +RPY_EXPORTED int pypy_setup_home(char *, int); + +static unsigned char _cffi_ready = 0; +static const char *volatile _cffi_module_name; + +static void _cffi_init_error(const char *msg, const char *extra) +{ + fprintf(stderr, + "\nPyPy initialization failure when loading module '%s':\n%s%s\n", + _cffi_module_name, msg, extra); +} + +static void _cffi_init(void) +{ + Dl_info info; + char *home; + + rpython_startup_code(); + RPyGilAllocate(); + + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return; + } + home = realpath(info.dli_fname, NULL); + if (pypy_setup_home(home, 1) != 0) { + _cffi_init_error("pypy_setup_home() failed", ""); + return; + } + _cffi_ready = 1; +} + +RPY_EXPORTED +int pypy_carefully_make_gil(const char *name) +{ + /* For CFFI: this initializes the GIL and loads the home path. + It can be called completely concurrently from unrelated threads. + It assumes that we don't hold the GIL before (if it exists), and we + don't hold it afterwards. + */ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + + _cffi_module_name = name; /* not really thread-safe, but better than + nothing */ + pthread_once(&once_control, _cffi_init); + return (int)_cffi_ready - 1; +} +"""]) + +declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.4.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_continuation/interp_continuation.py b/pypy/module/_continuation/interp_continuation.py --- a/pypy/module/_continuation/interp_continuation.py +++ b/pypy/module/_continuation/interp_continuation.py @@ -195,7 +195,7 @@ class SThread(StackletThread): def __init__(self, space, ec): - StackletThread.__init__(self, space.config) + StackletThread.__init__(self) From pypy.commits at gmail.com Mon Feb 1 07:10:47 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 01 Feb 2016 04:10:47 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: replaced test case parsetuple(..."i"...) with "l", Message-ID: <56af4b47.50371c0a.72df9.ffffcc35@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82030:7be63f4c3bc8 Date: 2016-02-01 13:08 +0100 http://bitbucket.org/pypy/pypy/changeset/7be63f4c3bc8/ Log: replaced test case parsetuple(..."i"...) with "l", memory fence was place one instruction too late, replaced checkpoint sync with serialization (which is more lightweight, and we do not make use of check points diff --git a/pypy/module/cpyext/test/test_typeobject.py b/pypy/module/cpyext/test/test_typeobject.py --- a/pypy/module/cpyext/test/test_typeobject.py +++ b/pypy/module/cpyext/test/test_typeobject.py @@ -606,7 +606,7 @@ long intval; PyObject *name; - if (!PyArg_ParseTuple(args, "i", &intval)) + if (!PyArg_ParseTuple(args, "l", &intval)) return NULL; IntLike_Type.tp_as_number = &intlike_as_number; diff --git a/rpython/jit/backend/zarch/callbuilder.py b/rpython/jit/backend/zarch/callbuilder.py --- a/rpython/jit/backend/zarch/callbuilder.py +++ b/rpython/jit/backend/zarch/callbuilder.py @@ -143,7 +143,7 @@ # in this mode, RSHADOWOLD happens to contain the shadowstack # top at this point, so reuse it instead of loading it again # RSHADOWOLD is moved to the scratch reg just before restoring r8 - ssreg = None # r.SCRATCH + ssreg = r.SCRATCH self.asm._reload_frame_if_necessary(self.mc, shadowstack_reg=ssreg) def emit_raw_call(self): @@ -197,7 +197,7 @@ # 6 registers, 1 for a floating point return value! # registered by prepare_arguments! # - # Save this thread's shadowstack pointer into r29, for later comparison + # Save this thread's shadowstack pointer into r8, for later comparison gcrootmap = self.asm.cpu.gc_ll_descr.gcrootmap if gcrootmap: if gcrootmap.is_shadow_stack: @@ -207,9 +207,9 @@ # # change 'rpy_fastgil' to 0 (it should be non-zero right now) self.mc.load_imm(RFASTGILPTR, fastgil) - self.mc.LGHI(r.SCRATCH, l.imm(0)) + self.mc.XGR(r.SCRATCH, r.SCRATCH) + self.mc.sync() self.mc.STG(r.SCRATCH, l.addr(0, RFASTGILPTR)) - self.mc.sync() # renders the store visible to other cpus def move_real_result_and_call_reacqgil_addr(self, fastgil): @@ -217,24 +217,24 @@ # try to reacquire the lock. The following registers are still # valid from before the call: + RSHADOWOLD = self.RSHADOWOLD # r8: previous val of root_stack_top RSHADOWPTR = self.RSHADOWPTR # r9: &root_stack_top RFASTGILPTR = self.RFASTGILPTR # r10: &fastgil - RSHADOWOLD = self.RSHADOWOLD # r12: previous val of root_stack_top - # Equivalent of 'r12 = __sync_lock_test_and_set(&rpy_fastgil, 1);' + # Equivalent of 'r13 = __sync_lock_test_and_set(&rpy_fastgil, 1);' self.mc.LGHI(r.SCRATCH, l.imm(1)) retry_label = self.mc.currpos() - # compare and swap, only succeeds if the the contents of the - # lock is equal to r12 (= 0) - self.mc.LG(r.r12, l.addr(0, RFASTGILPTR)) - self.mc.CSG(r.r12, r.SCRATCH, l.addr(0, RFASTGILPTR)) # try to claim lock + self.mc.LG(r.r13, l.addr(0, RFASTGILPTR)) + self.mc.CSG(r.r13, r.SCRATCH, l.addr(0, RFASTGILPTR)) # try to claim lock self.mc.BRC(c.NE, l.imm(retry_label - self.mc.currpos())) # retry if failed - self.mc.sync() - self.mc.CGHI(r.r12, l.imm0) + # CSG performs a serialization + + self.mc.CGHI(r.r13, l.imm0) b1_location = self.mc.currpos() - self.mc.trap() # boehm: patched with a BEQ: jump if r12 is zero - self.mc.write('\x00'*4) # shadowstack: patched with BNE instead + # boehm: patched with a BEQ: jump if r13 is zero + # shadowstack: patched with BNE instead + self.mc.reserve_cond_jump() gcrootmap = self.asm.cpu.gc_ll_descr.gcrootmap if gcrootmap: @@ -247,13 +247,12 @@ self.mc.CGR(RSHADOWPTR, RSHADOWOLD) bne_location = b1_location b1_location = self.mc.currpos() - self.mc.trap() - self.mc.write('\x00'*4) + self.mc.reserve_cond_jump() # revert the rpy_fastgil acquired above, so that the # general 'reacqgil_addr' below can acquire it again... - # (here, r12 is conveniently zero) - self.mc.STG(r.r12, l.addr(0,RFASTGILPTR)) + # (here, r13 is conveniently zero) + self.mc.STG(r.r13, l.addr(0,RFASTGILPTR)) pmc = OverwritingBuilder(self.mc, bne_location, 1) pmc.BRCL(c.NE, l.imm(self.mc.currpos() - bne_location)) @@ -268,7 +267,7 @@ # save 1 word below the stack pointer pos = STD_FRAME_SIZE_IN_BYTES if reg.is_core_reg(): - self.mc.STG(reg, l.addr(pos-1*WORD, r.SP)) + self.mc.LGR(RSAVEDRES, reg) elif reg.is_fp_reg(): self.mc.STD(reg, l.addr(pos-1*WORD, r.SP)) self.mc.load_imm(self.mc.RAW_CALL_REG, self.asm.reacqgil_addr) @@ -276,7 +275,7 @@ if reg is not None: pos = STD_FRAME_SIZE_IN_BYTES if reg.is_core_reg(): - self.mc.LG(reg, l.addr(pos-1*WORD, r.SP)) + self.mc.LGR(reg, RSAVEDRES) elif reg.is_fp_reg(): self.mc.LD(reg, l.addr(pos-1*WORD, r.SP)) @@ -285,6 +284,9 @@ pmc.BRCL(c.EQ, l.imm(self.mc.currpos() - b1_location)) pmc.overwrite() + if gcrootmap: + if gcrootmap.is_shadow_stack and self.is_call_release_gil: + self.mc.LGR(r.SCRATCH, RSHADOWOLD) pos = STD_FRAME_SIZE_IN_BYTES - 7*WORD self.mc.LMG(r.r8, r.r13, l.addr(pos, r.SP)) diff --git a/rpython/jit/backend/zarch/codebuilder.py b/rpython/jit/backend/zarch/codebuilder.py --- a/rpython/jit/backend/zarch/codebuilder.py +++ b/rpython/jit/backend/zarch/codebuilder.py @@ -190,7 +190,10 @@ def sync(self): # see sync. section of the zarch manual! - self.BCR_rr(0xf,0) + # 0xf creates a checkpoint which is not needed. + # we never want to restore the checkpoint, we only + # want to create a memory fence (i.e. serialization) + self.BCR_rr(0xe,0) def raw_call(self, call_reg=r.RETURN): """Emit a call to the address stored in the register 'call_reg', diff --git a/rpython/jit/backend/zarch/test/test_assembler.py b/rpython/jit/backend/zarch/test/test_assembler.py --- a/rpython/jit/backend/zarch/test/test_assembler.py +++ b/rpython/jit/backend/zarch/test/test_assembler.py @@ -119,6 +119,13 @@ assert assembler.asm_operations[i] \ is AssemblerZARCH.emit_int_add.im_func + def test_sync(self): + self.a.mc.XGR(r.r2, r.r2) + self.a.mc.sync() + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == 0 + + def test_byte_count_instr(self): assert self.mc.BRC_byte_count == 4 assert self.mc.LG_byte_count == 6 From pypy.commits at gmail.com Mon Feb 1 07:55:04 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 01 Feb 2016 04:55:04 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: shadowold was a pointer not the value (which it should have been) and the comparison compared the pointers not the values Message-ID: <56af55a8.08e11c0a.77354.ffffdbe2@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82031:5fe38e24272c Date: 2016-02-01 13:52 +0100 http://bitbucket.org/pypy/pypy/changeset/5fe38e24272c/ Log: shadowold was a pointer not the value (which it should have been) and the comparison compared the pointers not the values diff --git a/rpython/jit/backend/zarch/callbuilder.py b/rpython/jit/backend/zarch/callbuilder.py --- a/rpython/jit/backend/zarch/callbuilder.py +++ b/rpython/jit/backend/zarch/callbuilder.py @@ -203,7 +203,7 @@ if gcrootmap.is_shadow_stack: rst = gcrootmap.get_root_stack_top_addr() self.mc.load_imm(RSHADOWPTR, rst) - self.mc.LGR(RSHADOWOLD, RSHADOWPTR) + self.mc.load(RSHADOWOLD, RSHADOWPTR, 0) # # change 'rpy_fastgil' to 0 (it should be non-zero right now) self.mc.load_imm(RFASTGILPTR, fastgil) @@ -244,7 +244,8 @@ # thread. So here we check if the shadowstack pointer # is still the same as before we released the GIL (saved # in RSHADOWOLD), and if not, we fall back to 'reacqgil_addr'. - self.mc.CGR(RSHADOWPTR, RSHADOWOLD) + self.load(r.r11, RSHADOWPTR, 0) + self.mc.CGR(r.r11, RSHADOWOLD) bne_location = b1_location b1_location = self.mc.currpos() self.mc.reserve_cond_jump() @@ -291,6 +292,8 @@ self.mc.LMG(r.r8, r.r13, l.addr(pos, r.SP)) def write_real_errno(self, save_err): + # r11 is saved in call_releasegil_addr_and_move_real_arguments, + # thus can be used freely here! if save_err & rffi.RFFI_READSAVED_ERRNO: # Just before a call, read '*_errno' and write it into the # real 'errno'. @@ -314,14 +317,14 @@ def read_real_errno(self, save_err): if save_err & rffi.RFFI_SAVE_ERRNO: # Just after a call, read the real 'errno' and save a copy of - # it inside our thread-local '*_errno'. Registers r4-r10 + # it inside our thread-local '*_errno'. Registers r3-r6 # never contain anything after the call. if save_err & rffi.RFFI_ALT_ERRNO: rpy_errno = llerrno.get_alt_errno_offset(self.asm.cpu) else: rpy_errno = llerrno.get_rpy_errno_offset(self.asm.cpu) p_errno = llerrno.get_p_errno_offset(self.asm.cpu) - self.mc.LG(r.r12, l.addr(THREADLOCAL_ADDR_OFFSET, r.SP)) - self.mc.LG(r.r11, l.addr(p_errno, r.r12)) - self.mc.LGF(r.r11, l.addr(0, r.r11)) - self.mc.STY(r.r11, l.addr(rpy_errno, r.r12)) + self.mc.LG(r.r3, l.addr(THREADLOCAL_ADDR_OFFSET, r.SP)) + self.mc.LG(r.r4, l.addr(p_errno, r.r3)) + self.mc.LGF(r.r4, l.addr(0, r.r4)) + self.mc.STY(r.r4, l.addr(rpy_errno, r.r3)) diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -495,8 +495,7 @@ mc.NILL(r.SCRATCH, l.imm(mask & 0xFF)) jz_location = mc.get_relative_pos() - mc.trap() # patched later with 'EQ' - mc.write('\x00' * 4) + mc.reserve_cond_jump() # patched later with 'EQ' # for cond_call_gc_wb_array, also add another fast path: # if GCFLAG_CARDS_SET, then we can just set one bit and be done @@ -505,8 +504,7 @@ mc.LGR(r.SCRATCH, r.SCRATCH2) mc.NILL(r.SCRATCH, l.imm(card_marking_mask & 0xFF)) js_location = mc.get_relative_pos() - mc.trap() # patched later with 'NE' - mc.write('\x00' * 4) + mc.reserve_cond_jump() # patched later with 'NE' else: js_location = 0 From pypy.commits at gmail.com Mon Feb 1 08:16:49 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 01 Feb 2016 05:16:49 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: ups, need to call method load on code builder, not assembler Message-ID: <56af5ac1.c177c20a.46fc9.ffff8eaa@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82032:035eb5778ead Date: 2016-02-01 13:56 +0100 http://bitbucket.org/pypy/pypy/changeset/035eb5778ead/ Log: ups, need to call method load on code builder, not assembler diff --git a/rpython/jit/backend/zarch/callbuilder.py b/rpython/jit/backend/zarch/callbuilder.py --- a/rpython/jit/backend/zarch/callbuilder.py +++ b/rpython/jit/backend/zarch/callbuilder.py @@ -244,7 +244,7 @@ # thread. So here we check if the shadowstack pointer # is still the same as before we released the GIL (saved # in RSHADOWOLD), and if not, we fall back to 'reacqgil_addr'. - self.load(r.r11, RSHADOWPTR, 0) + self.mc.load(r.r11, RSHADOWPTR, 0) self.mc.CGR(r.r11, RSHADOWOLD) bne_location = b1_location b1_location = self.mc.currpos() From pypy.commits at gmail.com Mon Feb 1 08:22:44 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 01 Feb 2016 05:22:44 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: format and exchanged some registers, no breaking changes Message-ID: <56af5c24.2179c20a.2d3e6.75b5@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82033:46f5a9e87aae Date: 2016-02-01 14:21 +0100 http://bitbucket.org/pypy/pypy/changeset/46f5a9e87aae/ Log: format and exchanged some registers, no breaking changes diff --git a/rpython/jit/backend/zarch/callbuilder.py b/rpython/jit/backend/zarch/callbuilder.py --- a/rpython/jit/backend/zarch/callbuilder.py +++ b/rpython/jit/backend/zarch/callbuilder.py @@ -244,8 +244,8 @@ # thread. So here we check if the shadowstack pointer # is still the same as before we released the GIL (saved # in RSHADOWOLD), and if not, we fall back to 'reacqgil_addr'. - self.mc.load(r.r11, RSHADOWPTR, 0) - self.mc.CGR(r.r11, RSHADOWOLD) + self.mc.load(r.SCRATCH, RSHADOWPTR, 0) + self.mc.CGR(r.SCRATCH, RSHADOWOLD) bne_location = b1_location b1_location = self.mc.currpos() self.mc.reserve_cond_jump() @@ -253,7 +253,7 @@ # revert the rpy_fastgil acquired above, so that the # general 'reacqgil_addr' below can acquire it again... # (here, r13 is conveniently zero) - self.mc.STG(r.r13, l.addr(0,RFASTGILPTR)) + self.mc.STG(r.r13, l.addr(0, RFASTGILPTR)) pmc = OverwritingBuilder(self.mc, bne_location, 1) pmc.BRCL(c.NE, l.imm(self.mc.currpos() - bne_location)) From pypy.commits at gmail.com Mon Feb 1 11:23:05 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 01 Feb 2016 08:23:05 -0800 (PST) Subject: [pypy-commit] pypy py3k: PyIntObject does not exist in py3k Message-ID: <56af8669.01cdc20a.16151.ffffc3f3@mx.google.com> Author: Ronan Lamy Branch: py3k Changeset: r82034:f57006f9cb96 Date: 2016-02-01 16:21 +0000 http://bitbucket.org/pypy/pypy/changeset/f57006f9cb96/ Log: PyIntObject does not exist in py3k diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -506,9 +506,7 @@ def get_structtype_for_ctype(ctype): from pypy.module.cpyext.typeobjectdefs import PyTypeObjectPtr from pypy.module.cpyext.cdatetime import PyDateTime_CAPI - from pypy.module.cpyext.intobject import PyIntObject return {"PyObject*": PyObject, "PyTypeObject*": PyTypeObjectPtr, - "PyIntObject*": PyIntObject, "PyDateTime_CAPI*": lltype.Ptr(PyDateTime_CAPI)}[ctype] PyTypeObject = lltype.ForwardReference() @@ -1097,7 +1095,7 @@ if not use_micronumpy: return use_micronumpy # import to register api functions by side-effect - import pypy.module.cpyext.ndarrayobject + import pypy.module.cpyext.ndarrayobject global GLOBALS, SYMBOLS_C, separate_module_files GLOBALS["PyArray_Type#"]= ('PyTypeObject*', "space.gettypeobject(W_NDimArray.typedef)") SYMBOLS_C += ['PyArray_Type', '_PyArray_FILLWBYTE', '_PyArray_ZEROS'] From pypy.commits at gmail.com Mon Feb 1 11:31:13 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 01 Feb 2016 08:31:13 -0800 (PST) Subject: [pypy-commit] pypy py3k: fix Message-ID: <56af8851.284cc20a.c92bf.ffffcaa2@mx.google.com> Author: Ronan Lamy Branch: py3k Changeset: r82035:2f9a601ed994 Date: 2016-02-01 16:29 +0000 http://bitbucket.org/pypy/pypy/changeset/2f9a601ed994/ Log: fix diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -445,8 +445,8 @@ TYPES = {} GLOBALS = { # this needs to include all prebuilt pto, otherwise segfaults occur '_Py_NoneStruct#': ('PyObject*', 'space.w_None'), - '_Py_TrueStruct#': ('PyIntObject*', 'space.w_True'), - '_Py_ZeroStruct#': ('PyIntObject*', 'space.w_False'), + '_Py_TrueStruct#': ('PyObject*', 'space.w_True'), + '_Py_ZeroStruct#': ('PyObject*', 'space.w_False'), '_Py_NotImplementedStruct#': ('PyObject*', 'space.w_NotImplemented'), '_Py_EllipsisObject#': ('PyObject*', 'space.w_Ellipsis'), 'PyDateTimeAPI': ('PyDateTime_CAPI*', 'None'), @@ -855,7 +855,7 @@ assert False, "Unknown static pointer: %s %s" % (typ, name) ptr.value = ctypes.cast(ll2ctypes.lltype2ctypes(value), ctypes.c_void_p).value - elif typ in ('PyObject*', 'PyTypeObject*', 'PyIntObject*'): + elif typ in ('PyObject*', 'PyTypeObject*'): if name.startswith('PyPyExc_') or name.startswith('cpyexttestExc_'): # we already have the pointer in_dll = ll2ctypes.get_ctypes_type(PyObject).in_dll(bridge, name) @@ -1149,7 +1149,7 @@ if name.startswith('PyExc_'): name = '_' + name w_obj = eval(expr) - if typ in ('PyObject*', 'PyTypeObject*', 'PyIntObject*'): + if typ in ('PyObject*', 'PyTypeObject*'): struct_ptr = make_ref(space, w_obj) elif typ == 'PyDateTime_CAPI*': continue From pypy.commits at gmail.com Mon Feb 1 11:44:39 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 01 Feb 2016 08:44:39 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: for test case zrpy_gc_direct seems that it can prove that float storage is always 0, thus it will not compile for those two Message-ID: <56af8b77.cb371c0a.d64e2.4509@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82036:72e40b33ebc5 Date: 2016-02-01 17:43 +0100 http://bitbucket.org/pypy/pypy/changeset/72e40b33ebc5/ Log: for test case zrpy_gc_direct seems that it can prove that float storage is always 0, thus it will not compile for those two diff --git a/rpython/jit/backend/zarch/pool.py b/rpython/jit/backend/zarch/pool.py --- a/rpython/jit/backend/zarch/pool.py +++ b/rpython/jit/backend/zarch/pool.py @@ -104,6 +104,8 @@ def unique_value(self, val): if val.type == FLOAT: + if val.getfloat() == 0.0: + return 0 return float2longlong(val.getfloat()) elif val.type == INT: return rffi.cast(lltype.Signed, val.getint()) From pypy.commits at gmail.com Mon Feb 1 12:04:15 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 01 Feb 2016 09:04:15 -0800 (PST) Subject: [pypy-commit] pypy py3k: fix merge Message-ID: <56af900f.44e21c0a.c655.7435@mx.google.com> Author: Ronan Lamy Branch: py3k Changeset: r82037:fbbd055f16fa Date: 2016-02-01 17:03 +0000 http://bitbucket.org/pypy/pypy/changeset/fbbd055f16fa/ Log: fix merge diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -130,8 +130,8 @@ if ulist is not None: strategy = space.fromcache(UnicodeDictStrategy) storage = strategy.get_storage_fromkeys(ulist, w_fill) - w_dict = space.allocate_instance(W_DictMultiObject, w_type) - W_DictMultiObject.__init__(w_dict, space, strategy, storage) + w_dict = space.allocate_instance(W_DictObject, w_type) + W_DictObject.__init__(w_dict, space, strategy, storage) else: w_dict = W_DictMultiObject.allocate_and_init_instance(space, w_type) From pypy.commits at gmail.com Mon Feb 1 17:21:10 2016 From: pypy.commits at gmail.com (mattip) Date: Mon, 01 Feb 2016 14:21:10 -0800 (PST) Subject: [pypy-commit] cffi embedding-pypy-win32: prepend dll path on win32 Message-ID: <56afda56.a5c9c20a.42161.55e7@mx.google.com> Author: mattip Branch: embedding-pypy-win32 Changeset: r2617:d623a21d479c Date: 2016-02-02 00:20 +0200 http://bitbucket.org/cffi/cffi/changeset/d623a21d479c/ Log: prepend dll path on win32 diff --git a/testing/embedding/test_basic.py b/testing/embedding/test_basic.py --- a/testing/embedding/test_basic.py +++ b/testing/embedding/test_basic.py @@ -121,7 +121,7 @@ if sys.platform == 'win32': _path = os.environ.get('PATH') # for libpypy-c.dll or Python27.dll - _path += ';' + os.path.split(sys.executable)[0] + _path = os.path.split(sys.executable)[0] + ';' + _path env_extra['PATH'] = _path else: libpath = os.environ.get('LD_LIBRARY_PATH') From pypy.commits at gmail.com Mon Feb 1 17:25:36 2016 From: pypy.commits at gmail.com (mattip) Date: Mon, 01 Feb 2016 14:25:36 -0800 (PST) Subject: [pypy-commit] pypy cffi-embedding-win32: force binary mode io for win32, with side effect of making io unbuffered Message-ID: <56afdb60.8e301c0a.511ad.ffffbd23@mx.google.com> Author: mattip Branch: cffi-embedding-win32 Changeset: r82038:e1b9c0216be7 Date: 2016-02-02 00:24 +0200 http://bitbucket.org/pypy/pypy/changeset/e1b9c0216be7/ Log: force binary mode io for win32, with side effect of making io unbuffered diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -105,6 +105,10 @@ space.appexec([w_path], """(path): import sys sys.path[:] = path + import os + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) """) # import site try: From pypy.commits at gmail.com Tue Feb 2 05:24:38 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 02 Feb 2016 02:24:38 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: copy slides from fosdem2016 Message-ID: <56b083e6.08e11c0a.e188e.ffffb4d6@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5599:daa5d98e6cad Date: 2016-02-02 10:52 +0100 http://bitbucket.org/pypy/extradoc/changeset/daa5d98e6cad/ Log: copy slides from fosdem2016 diff --git a/talk/swisspython2016/slides.rst b/talk/swisspython2016/slides.rst new file mode 100644 --- /dev/null +++ b/talk/swisspython2016/slides.rst @@ -0,0 +1,405 @@ +============= +CFFI and PyPy +============= + + +CFFI +==== + +* successful project according to PyPI + +* 3.4 million downloads last month + +* total 19.2 millions, 27th place on `pypi-ranking.info` + + - Django is 28th + +* some high-visibility projects have switched to it (Cryptography) + + +PyPy +==== + +* harder to say, but probably not so successful + +* more later + + +CFFI +==== + + + +CFFI +==== + +* call C from Python + +* CFFI = C Foreign Function Interface + +* shares ideas from Cython, ctypes, and LuaJIT's FFI + + +CFFI demo +========= + +:: + + $ man getpwuid + + SYNOPSIS + #include + #include + + struct passwd *getpwnam(const char *name); + + +CFFI demo +========= + +:: + + . + . + . + The passwd structure is defined in as follows: + + struct passwd { + char *pw_name; /* username */ + char *pw_passwd; /* user password */ + uid_t pw_uid; /* user ID */ + . + . + . + + +CFFI demo +========= + +:: + + from cffi import FFI + ffi = cffi.FFI() + + ffi.cdef(""" + typedef int... uid_t; + struct passwd { + uid_t pw_uid; + ...; + }; + struct passwd *getpwnam(const char *name); + """) + + +CFFI demo +========= + +:: + + ffi.set_source("_pwuid_cffi", """ + #include + #include + """) + + ffi.compile() + +------- ^^ put that in pwuid_build.py + + +CFFI demo +========= + +:: + + python pwuid_build.py + +creates ``_pwuid_cffi.so`` + + +CFFI demo +========= + +:: + + from _pwuid_cffi import lib + + print lib.getpwnam("arigo").pw_uid + + +CFFI demo +========= + +:: + + from _pwuid_cffi import ffi, lib + +* ``lib`` gives access to all functions from the cdef + +* ``ffi`` gives access to a few general helpers, e.g. + + - ``ffi.cast("float", 42)`` + + - ``p = ffi.new("struct passwd *")`` + + - ``p = ffi.new("char[10]"); p[0] = 'X'; s = lib.getpwnam(p)`` + + - ``p = ffi.new_handle(random_obj); ...; random_obj = ffi.from_handle(p)`` + + +CFFI +==== + +* supports more or less the whole C + +* there is more than my short explanation suggests + +* read the docs: http://cffi.readthedocs.org/ + + + +PyPy +==== + + +PyPy +==== + +* a Python interpreter + +* different from the standard, which is CPython + +* main goal of PyPy: speed + + +PyPy +==== + +:: + + $ pypy + + Python 2.7.10 (5f8302b8bf9f, Nov 18 2015, 10:46:46) + [PyPy 4.0.1 with GCC 4.8.4] on linux2 + Type "help", "copyright", "credits" or "license" for more information. + >>>> 2+3 + 5 + >>>> + + +PyPy +==== + +* run ``pypy my_program.py`` + +* starts working like an interpreter + +* then a Just-in-Time Compiler kicks in + +* generate and execute machine code from the Python program + +* good or great speed-ups for the majority of long-running code + + +PyPy +==== + +* different techniques than CPython also for "garbage collection" + +* works very well (arguably better than CPython's reference counting) + + +PyPy: Garbage Collection +======================== + +* "**moving,** generational, incremental GC" + +* objects don't have reference counters + +* allocated in a "nursery" + +* when nursery full, find surviving nursery objects and move them out + +* usually work on nursery objects only (fast), but rarely also perform + a full GC + + +PyPy: C extensions +================== + +* PyPy works great for running Python + +* less great when there are CPython C extension modules involved + + +PyPy: C extensions +================== + +* not directly possible: we have moving, non-reference-counted objects, + and the C code expects non-moving, reference-counted objects + + +PyPy: C extensions +================== + +* PyPy has still some support for them, called its ``cpyext`` module + +* similar to IronPython's Ironclad + +* emulate all objects for C extensions with a shadow, non-movable, + reference-counted object + + +PyPy: C extensions +================== + +* ``cpyext`` is slow + +* ``cpyext`` is actually *really, really* slow + + - but we're working on making it *only* slow + + +PyPy: C extensions +================== + +* ``cpyext`` will "often" work, but there are a some high-profile C + extension modules that are not supported so far + +* notably, ``numpy`` + +* (it is future work) + + +PyPy: ad +======== + +* but, hey, if you need performance out of Python and don't rely + critically on C extension modules, then give PyPy a try + + - typical area where it works well: web services + + +CPython C API: the problem +========================== + +* CPython comes with a C API + +* very large number of functions + +* assumes objects don't move + +* assumes a "reference counting" model + + +CPython C API +============= + +* actually, the API is some large subset of the functions inside + CPython itself + + +CPython C API +============= + +* easy to use from C + +* historically, part of the success of Python + + +CPython C API +============= + +* further successful tools build on top of that API: + + - SWIG + - Cython + - and other binding generators + - now CFFI + + +CFFI +==== + +* but CFFI is a bit different + + - it does not expose any part of the CPython C API + + - everything is done with a minimal API on the ``ffi`` object + which is closer to C + + - ``ffi.cast()``, ``ffi.new()``, etc. + + - that means it can be directly ported + + +CFFI and PyPy +============= + +* we have a PyPy version of CFFI + +* the demos I have given above work equally well on CPython or on PyPy + +* (supporting PyPy was part of the core motivation behind CFFI) + + +CFFI: performance +================= + +* in PyPy, JIT compiler speeds up calls, so it's very fast + +* in CPython, it doesn't occur, but it is still reasonable when + compared with alternatives + +* main issue is that we write more code in Python with CFFI, + which makes it slower on CPython---but not really on PyPy + + +CFFI: summary +============= + +* call C from Python + +* works natively on CPython and on PyPy + + - and easy to port to other Python implementations + +* supports CPython 2.6, 2.7, 3.2 to 3.5, and + is integrated with PyPy + + +CFFI +==== + +* independent on the particular details of the Python implementation + + - using CFFI, you call C functions and manipulate C-pointer-like + objects directly from Python + + - you do in Python all logic involving Python objects + + - there are no (official) ways around this API to call the CPython C + API, and none are needed + + +CFFI +==== + +* two reasons to switch to it ``:-)`` + + - easy and cool + + - better supported on non-CPython implementations + + +CFFI: latest news +================= + +* support for "embedding" Python inside some other non-Python program + + - now you really never need the CPython C API any more + + +CFFI +==== + +http://cffi.readthedocs.org/ From pypy.commits at gmail.com Tue Feb 2 05:24:40 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 02 Feb 2016 02:24:40 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: expand a bit the cffi part Message-ID: <56b083e8.11301c0a.43dba.ffffb412@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5600:6c7e299c3d71 Date: 2016-02-02 11:24 +0100 http://bitbucket.org/pypy/extradoc/changeset/6c7e299c3d71/ Log: expand a bit the cffi part diff --git a/talk/swisspython2016/slides.rst b/talk/swisspython2016/slides.rst --- a/talk/swisspython2016/slides.rst +++ b/talk/swisspython2016/slides.rst @@ -135,15 +135,83 @@ * ``lib`` gives access to all functions from the cdef -* ``ffi`` gives access to a few general helpers, e.g. +* ``ffi`` gives access to a few general helpers - - ``ffi.cast("float", 42)`` - - ``p = ffi.new("struct passwd *")`` +ffi.cdef() +========== - - ``p = ffi.new("char[10]"); p[0] = 'X'; s = lib.getpwnam(p)`` +:: - - ``p = ffi.new_handle(random_obj); ...; random_obj = ffi.from_handle(p)`` + ffi.cdef(""" + int foo1(int a, int b); + + typedef ... Window; + Window *make_window(int, int, int); + void hide_window(Window *); + """) + + +ffi.new() +========= + +:: + + >>> p = ffi.new("char[]", "Some string") + >>> p + + >>> p[1] + 'o' + >>> q = lib.getpwnam(p) + >>> q + + >>> q.pw_uid + 500 + +ffi.cast() +========== + +:: + + >>> p = lib.getpwnam("root") + >>> p + + >>> ffi.cast("void *", p) + + >>> ffi.cast("long", p) + 305419896 + >>> hex(_) + 0x12345678 + +ffi.new_handle() +================ + +:: + + >>> h1 = ffi.new_handle(some_object) + >>> h1 + > + >>> lib.same_away(h1) + + >>> h2 = lib.fish_again() + >>> h2 + + >>> ffi.from_handle(h2) + + +ffi.string() +============ + +:: + + >>> p + + >>> p.pw_uid + 500 + >>> p.pw_name + + >>> ffi.string(p.pw_name) + "username" CFFI @@ -151,7 +219,7 @@ * supports more or less the whole C -* there is more than my short explanation suggests +* there is more than my short explanations suggests * read the docs: http://cffi.readthedocs.org/ From pypy.commits at gmail.com Tue Feb 2 06:04:13 2016 From: pypy.commits at gmail.com (antubbs) Date: Tue, 02 Feb 2016 03:04:13 -0800 (PST) Subject: [pypy-commit] pypy default: Adding support for f_locals to frameobject in CPyExt. Message-ID: <56b08d2d.4c5c1c0a.24191.ffff8704@mx.google.com> Author: Aaron Tubbs Branch: Changeset: r82039:3280ef85d9b7 Date: 2016-02-01 15:44 -0800 http://bitbucket.org/pypy/pypy/changeset/3280ef85d9b7/ Log: Adding support for f_locals to frameobject in CPyExt. diff --git a/pypy/module/cpyext/frameobject.py b/pypy/module/cpyext/frameobject.py --- a/pypy/module/cpyext/frameobject.py +++ b/pypy/module/cpyext/frameobject.py @@ -17,6 +17,7 @@ PyFrameObjectFields = (PyObjectFields + (("f_code", PyCodeObject), ("f_globals", PyObject), + ("f_locals", PyObject), ("f_lineno", rffi.INT), )) cpython_struct("PyFrameObject", PyFrameObjectFields, PyFrameObjectStruct) @@ -35,6 +36,7 @@ py_frame = rffi.cast(PyFrameObject, py_obj) py_frame.c_f_code = rffi.cast(PyCodeObject, make_ref(space, frame.pycode)) py_frame.c_f_globals = make_ref(space, frame.get_w_globals()) + py_frame.c_f_locals = make_ref(space, frame.get_w_locals()) rffi.setintfield(py_frame, 'c_f_lineno', frame.getorcreatedebug().f_lineno) @cpython_api([PyObject], lltype.Void, external=False) @@ -43,6 +45,7 @@ py_code = rffi.cast(PyObject, py_frame.c_f_code) Py_DecRef(space, py_code) Py_DecRef(space, py_frame.c_f_globals) + Py_DecRef(space, py_frame.c_f_locals) from pypy.module.cpyext.object import PyObject_dealloc PyObject_dealloc(space, py_obj) @@ -72,6 +75,7 @@ space.interp_w(PyCode, w_code) # sanity check py_frame.c_f_code = rffi.cast(PyCodeObject, make_ref(space, w_code)) py_frame.c_f_globals = make_ref(space, w_globals) + py_frame.c_f_locals = make_ref(space, w_locals) return py_frame @cpython_api([PyFrameObject], rffi.INT_real, error=-1) diff --git a/pypy/module/cpyext/include/frameobject.h b/pypy/module/cpyext/include/frameobject.h --- a/pypy/module/cpyext/include/frameobject.h +++ b/pypy/module/cpyext/include/frameobject.h @@ -8,6 +8,7 @@ PyObject_HEAD PyCodeObject *f_code; PyObject *f_globals; + PyObject *f_locals; int f_lineno; } PyFrameObject; diff --git a/pypy/module/cpyext/test/test_frameobject.py b/pypy/module/cpyext/test/test_frameobject.py --- a/pypy/module/cpyext/test/test_frameobject.py +++ b/pypy/module/cpyext/test/test_frameobject.py @@ -9,6 +9,7 @@ PyObject *py_srcfile = PyString_FromString("filename"); PyObject *py_funcname = PyString_FromString("funcname"); PyObject *py_globals = PyDict_New(); + PyObject *py_locals = PyDict_New(); PyObject *empty_string = PyString_FromString(""); PyObject *empty_tuple = PyTuple_New(0); PyCodeObject *py_code; @@ -39,7 +40,7 @@ PyThreadState_Get(), /*PyThreadState *tstate,*/ py_code, /*PyCodeObject *code,*/ py_globals, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ + py_locals /*PyObject *locals*/ ); if (!py_frame) goto bad; py_frame->f_lineno = 48; /* Does not work with CPython */ @@ -51,6 +52,7 @@ Py_XDECREF(empty_string); Py_XDECREF(empty_tuple); Py_XDECREF(py_globals); + Py_XDECREF(py_locals); Py_XDECREF(py_code); Py_XDECREF(py_frame); return NULL; From pypy.commits at gmail.com Tue Feb 2 06:04:15 2016 From: pypy.commits at gmail.com (antubbs) Date: Tue, 02 Feb 2016 03:04:15 -0800 (PST) Subject: [pypy-commit] pypy default: Merged pypy/pypy into default Message-ID: <56b08d2f.2851c20a.765b9.568f@mx.google.com> Author: Aaron Tubbs Branch: Changeset: r82040:bd63983137b5 Date: 2016-02-01 15:55 -0800 http://bitbucket.org/pypy/pypy/changeset/bd63983137b5/ Log: Merged pypy/pypy into default diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -85,8 +85,7 @@ module_dependencies = { '_multiprocessing': [('objspace.usemodules.time', True), ('objspace.usemodules.thread', True)], - 'cpyext': [('objspace.usemodules.array', True), - ('objspace.usemodules.micronumpy', True)], + 'cpyext': [('objspace.usemodules.array', True)], 'cppyy': [('objspace.usemodules.cpyext', True)], } module_suggests = { diff --git a/pypy/module/cpyext/__init__.py b/pypy/module/cpyext/__init__.py --- a/pypy/module/cpyext/__init__.py +++ b/pypy/module/cpyext/__init__.py @@ -36,7 +36,6 @@ import pypy.module.cpyext.object import pypy.module.cpyext.stringobject import pypy.module.cpyext.tupleobject -import pypy.module.cpyext.ndarrayobject import pypy.module.cpyext.setobject import pypy.module.cpyext.dictobject import pypy.module.cpyext.intobject diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -143,7 +143,7 @@ target.chmod(0444) # make the file read-only, to make sure that nobody # edits it by mistake -def copy_header_files(dstdir): +def copy_header_files(dstdir, copy_numpy_headers): # XXX: 20 lines of code to recursively copy a directory, really?? assert dstdir.check(dir=True) headers = include_dir.listdir('*.h') + include_dir.listdir('*.inl') @@ -151,15 +151,16 @@ headers.append(udir.join(name)) _copy_header_files(headers, dstdir) - try: - dstdir.mkdir('numpy') - except py.error.EEXIST: - pass - numpy_dstdir = dstdir / 'numpy' + if copy_numpy_headers: + try: + dstdir.mkdir('numpy') + except py.error.EEXIST: + pass + numpy_dstdir = dstdir / 'numpy' - numpy_include_dir = include_dir / 'numpy' - numpy_headers = numpy_include_dir.listdir('*.h') + numpy_include_dir.listdir('*.inl') - _copy_header_files(numpy_headers, numpy_dstdir) + numpy_include_dir = include_dir / 'numpy' + numpy_headers = numpy_include_dir.listdir('*.h') + numpy_include_dir.listdir('*.inl') + _copy_header_files(numpy_headers, numpy_dstdir) class NotSpecified(object): @@ -482,7 +483,6 @@ "PyComplex_Type": "space.w_complex", "PyByteArray_Type": "space.w_bytearray", "PyMemoryView_Type": "space.w_memoryview", - "PyArray_Type": "space.gettypeobject(W_NDimArray.typedef)", "PyBaseObject_Type": "space.w_object", 'PyNone_Type': 'space.type(space.w_None)', 'PyNotImplemented_Type': 'space.type(space.w_NotImplemented)', @@ -773,6 +773,8 @@ "NOT_RPYTHON" from pypy.module.cpyext.pyobject import make_ref + use_micronumpy = setup_micronumpy(space) + export_symbols = list(FUNCTIONS) + SYMBOLS_C + list(GLOBALS) from rpython.translator.c.database import LowLevelDatabase db = LowLevelDatabase() @@ -1009,6 +1011,24 @@ pypy_decl_h.write('\n'.join(pypy_decls)) return functions +separate_module_files = [source_dir / "varargwrapper.c", + source_dir / "pyerrors.c", + source_dir / "modsupport.c", + source_dir / "getargs.c", + source_dir / "abstract.c", + source_dir / "stringobject.c", + source_dir / "mysnprintf.c", + source_dir / "pythonrun.c", + source_dir / "sysmodule.c", + source_dir / "bufferobject.c", + source_dir / "cobject.c", + source_dir / "structseq.c", + source_dir / "capsule.c", + source_dir / "pysignals.c", + source_dir / "pythread.c", + source_dir / "missing.c", + ] + def build_eci(building_bridge, export_symbols, code): "NOT_RPYTHON" # Build code and get pointer to the structure @@ -1062,24 +1082,7 @@ eci = ExternalCompilationInfo( include_dirs=include_dirs, - separate_module_files=[source_dir / "varargwrapper.c", - source_dir / "pyerrors.c", - source_dir / "modsupport.c", - source_dir / "getargs.c", - source_dir / "abstract.c", - source_dir / "stringobject.c", - source_dir / "mysnprintf.c", - source_dir / "pythonrun.c", - source_dir / "sysmodule.c", - source_dir / "bufferobject.c", - source_dir / "cobject.c", - source_dir / "structseq.c", - source_dir / "capsule.c", - source_dir / "pysignals.c", - source_dir / "pythread.c", - source_dir / "ndarrayobject.c", - source_dir / "missing.c", - ], + separate_module_files= separate_module_files, separate_module_sources=separate_module_sources, compile_extra=compile_extra, **kwds @@ -1087,10 +1090,22 @@ return eci +def setup_micronumpy(space): + use_micronumpy = space.config.objspace.usemodules.micronumpy + if not use_micronumpy: + return use_micronumpy + # import to register api functions by side-effect + import pypy.module.cpyext.ndarrayobject + global GLOBALS, SYMBOLS_C, separate_module_files + GLOBALS["PyArray_Type#"]= ('PyTypeObject*', "space.gettypeobject(W_NDimArray.typedef)") + SYMBOLS_C += ['PyArray_Type', '_PyArray_FILLWBYTE', '_PyArray_ZEROS'] + separate_module_files.append(source_dir / "ndarrayobject.c") + return use_micronumpy def setup_library(space): "NOT_RPYTHON" from pypy.module.cpyext.pyobject import make_ref + use_micronumpy = setup_micronumpy(space) export_symbols = list(FUNCTIONS) + SYMBOLS_C + list(GLOBALS) from rpython.translator.c.database import LowLevelDatabase @@ -1151,7 +1166,7 @@ setup_init_functions(eci, translating=True) trunk_include = pypydir.dirpath() / 'include' - copy_header_files(trunk_include) + copy_header_files(trunk_include, use_micronumpy) def _load_from_cffi(space, name, path, initptr): from pypy.module._cffi_backend import cffi1_module diff --git a/pypy/module/cpyext/include/stringobject.h b/pypy/module/cpyext/include/stringobject.h --- a/pypy/module/cpyext/include/stringobject.h +++ b/pypy/module/cpyext/include/stringobject.h @@ -7,8 +7,8 @@ extern "C" { #endif -#define PyString_GET_SIZE(op) PyString_Size(op) -#define PyString_AS_STRING(op) PyString_AsString(op) +#define PyString_GET_SIZE(op) PyString_Size((PyObject*)(op)) +#define PyString_AS_STRING(op) PyString_AsString((PyObject*)(op)) typedef struct { PyObject_HEAD diff --git a/pypy/module/cpyext/test/test_api.py b/pypy/module/cpyext/test/test_api.py --- a/pypy/module/cpyext/test/test_api.py +++ b/pypy/module/cpyext/test/test_api.py @@ -98,7 +98,7 @@ def test_copy_header_files(tmpdir): - api.copy_header_files(tmpdir) + api.copy_header_files(tmpdir, True) def check(name): f = tmpdir.join(name) assert f.check(file=True) diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -116,7 +116,7 @@ def _find_map_attr(self, name, index): while isinstance(self, PlainAttribute): - if name == self.name and index == self.index: + if index == self.index and name == self.name: return self self = self.back return None @@ -156,7 +156,6 @@ jit.isconstant(name) and jit.isconstant(index)) def add_attr(self, obj, name, index, w_value): - # grumble, jit needs this attr = self._get_new_attr(name, index) oldattr = obj._get_mapdict_map() if not jit.we_are_jitted(): @@ -296,7 +295,7 @@ new_obj._get_mapdict_map().add_attr(new_obj, self.name, self.index, w_value) def delete(self, obj, name, index): - if name == self.name and index == self.index: + if index == self.index and name == self.name: # ok, attribute is deleted if not self.ever_mutated: self.ever_mutated = True diff --git a/pypy/objspace/std/setobject.py b/pypy/objspace/std/setobject.py --- a/pypy/objspace/std/setobject.py +++ b/pypy/objspace/std/setobject.py @@ -942,7 +942,7 @@ return False if w_set.length() == 0: return True - # it's possible to have 0-lenght strategy that's not empty + # it's possible to have 0-length strategy that's not empty if w_set.strategy is w_other.strategy: return self._issubset_unwrapped(w_set, w_other) if not self.may_contain_equal_elements(w_other.strategy): diff --git a/rpython/rtyper/tool/test/test_rffi_platform.py b/rpython/rtyper/tool/test/test_rffi_platform.py --- a/rpython/rtyper/tool/test/test_rffi_platform.py +++ b/rpython/rtyper/tool/test/test_rffi_platform.py @@ -277,10 +277,14 @@ assert not rffi_platform.has("x", "#include ") def test_has_0002(): + if platform.name == 'msvc': + py.test.skip('no m.lib in msvc') assert rffi_platform.has("pow", "#include ", libraries=["m"]) def test_has_0003(): """multiple libraries""" + if platform.name == 'msvc': + py.test.skip('no m.lib in msvc') assert rffi_platform.has("pow", "#include ", libraries=["m", "c"]) def test_has_0004(): From pypy.commits at gmail.com Tue Feb 2 06:09:46 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 02 Feb 2016 03:09:46 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: hack hack hack to make rst2beamer output correct latex Message-ID: <56b08e7a.42cbc20a.37573.59ff@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5601:a0f714ccd3d0 Date: 2016-02-02 12:09 +0100 http://bitbucket.org/pypy/extradoc/changeset/a0f714ccd3d0/ Log: hack hack hack to make rst2beamer output correct latex diff --git a/talk/swisspython2016/slides.rst b/talk/swisspython2016/slides.rst --- a/talk/swisspython2016/slides.rst +++ b/talk/swisspython2016/slides.rst @@ -2,6 +2,15 @@ CFFI and PyPy ============= +.. raw:: latex + + \catcode`\|=13 + \def|{\hskip 1cm} + + \let\foobarbaz=> + \catcode`\>=13 + \def>{\foobarbaz\relax} + CFFI ==== @@ -45,13 +54,19 @@ :: - $ man getpwuid + | $ man getpwuid - SYNOPSIS - #include - #include + | - struct passwd *getpwnam(const char *name); + | SYNOPSIS + + | | #include + + | | #include + + | | + + | | struct passwd *getpwnam(const char *); CFFI demo @@ -59,36 +74,59 @@ :: - . - . - . - The passwd structure is defined in as follows: + | . - struct passwd { - char *pw_name; /* username */ - char *pw_passwd; /* user password */ - uid_t pw_uid; /* user ID */ - . - . - . + | . + | . + + | The passwd structure is defined in + + | as follows: + + | + + | struct passwd { + + | | char *pw_name; /* username */ + + | | char *pw_passwd; /* user password */ + + | | uid_t pw_uid; /* user ID */ + + | . + + | . + + | . + CFFI demo ========= :: - from cffi import FFI - ffi = cffi.FFI() + | from cffi import FFI - ffi.cdef(""" - typedef int... uid_t; - struct passwd { - uid_t pw_uid; - ...; - }; - struct passwd *getpwnam(const char *name); - """) + | ffi = cffi.FFI() + + | + + | ffi.cdef(""" + + | | typedef int... uid_t; + + | | struct passwd { + + | | | uid_t pw_uid; + + | | | ...; + + | | }; + + | | struct passwd *getpwnam(const char *); + + | """) CFFI demo @@ -96,14 +134,21 @@ :: - ffi.set_source("_pwuid_cffi", """ - #include - #include - """) + | ffi.set_source("_pwuid_cffi", """ + + | | #include + + | | #include + + | """) - ffi.compile() + | + + | ffi.compile() -------- ^^ put that in pwuid_build.py + | + +... and put that in pwuid_build.py CFFI demo @@ -111,9 +156,11 @@ :: - python pwuid_build.py + | python pwuid_build.py -creates ``_pwuid_cffi.so`` + | + +creates _pwuid_cffi.so CFFI demo @@ -123,7 +170,7 @@ from _pwuid_cffi import lib - print lib.getpwnam("arigo").pw_uid + print lib.getpwnam("username").pw_uid CFFI demo @@ -143,13 +190,19 @@ :: - ffi.cdef(""" - int foo1(int a, int b); + | ffi.cdef(""" + + | | int foo1(int a, int b); - typedef ... Window; - Window *make_window(int, int, int); - void hide_window(Window *); - """) + | | + + | | typedef ... Window; + + | | Window *make_window(int w, int h); + + | | void hide_window(Window *); + + | """) ffi.new() @@ -157,61 +210,116 @@ :: - >>> p = ffi.new("char[]", "Some string") - >>> p - - >>> p[1] - 'o' - >>> q = lib.getpwnam(p) - >>> q - - >>> q.pw_uid - 500 + | >>> p = ffi.new("char[]", "Some string") + + | >>> p + + | + + | + + | >>> p[1] + + | 'o' + + | + + | >>> q = lib.getpwnam(p) + + | >>> q + + | + + | + + | >>> q.pw_uid + + | 500 ffi.cast() ========== :: - >>> p = lib.getpwnam("root") - >>> p - - >>> ffi.cast("void *", p) - - >>> ffi.cast("long", p) - 305419896 - >>> hex(_) - 0x12345678 + | >>> p = lib.getpwnam("root") + + | >>> p + + | + + | + + | >>> ffi.cast("void *", p) + + | + + | + + | >>> ffi.cast("long", p) + + | 305419896 + + | >>> hex(_) + + | 0x12345678 + ffi.new_handle() ================ :: - >>> h1 = ffi.new_handle(some_object) - >>> h1 - > - >>> lib.same_away(h1) + | >>> h1 = ffi.new_handle(some_object) + + | >>> h1 + + | >> h2 = lib.fish_again() - >>> h2 - - >>> ffi.from_handle(h2) - + | | | | | > + + | >>> lib.save_away(h1) + + | + + | >>> h2 = lib.fish_again() + + | >>> h2 + + | + + | + + | >>> ffi.from_handle(h2) + + | + ffi.string() ============ :: - >>> p - - >>> p.pw_uid - 500 - >>> p.pw_name - - >>> ffi.string(p.pw_name) - "username" + | >>> p + + | + + | + + | >>> p.pw_uid + + | 500 + + | + + | >>> p.pw_name + + | + + | + + | >>> ffi.string(p.pw_name) + + | "username" CFFI @@ -244,14 +352,19 @@ :: - $ pypy + | $ pypy - Python 2.7.10 (5f8302b8bf9f, Nov 18 2015, 10:46:46) - [PyPy 4.0.1 with GCC 4.8.4] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - >>>> 2+3 - 5 - >>>> + | Python 2.7.10 (5f8302b8bf9f, Nov 18 2015, + + | [PyPy 4.0.1 with GCC 4.8.4] on linux2 + + | Type "help", "copyright", "credits" or + + | >>>> 2+3 + + | 5 + + | >>>> PyPy From pypy.commits at gmail.com Tue Feb 2 06:30:43 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 02 Feb 2016 03:30:43 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: tweaks tweaks tweaks Message-ID: <56b09363.c74fc20a.c32fb.618c@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5602:bbe9486a3f32 Date: 2016-02-02 12:30 +0100 http://bitbucket.org/pypy/extradoc/changeset/bbe9486a3f32/ Log: tweaks tweaks tweaks diff --git a/talk/swisspython2016/Makefile b/talk/swisspython2016/Makefile new file mode 100644 --- /dev/null +++ b/talk/swisspython2016/Makefile @@ -0,0 +1,6 @@ +slides.pdf: slides.tex author.latex + pdflatex $< + +slides.tex: slides.rst + rst2beamer.py slides.rst > slides.tex + sed 's/\\date{}/\\input{author.latex}/' -i slides.tex || exit diff --git a/talk/swisspython2016/author.latex b/talk/swisspython2016/author.latex new file mode 100644 --- /dev/null +++ b/talk/swisspython2016/author.latex @@ -0,0 +1,7 @@ +\definecolor{rrblitbackground}{rgb}{0.4, 0.0, 0.0} + +\title[CFFI and PyPy]{CFFI and PyPy} +\author[Armin Rigo]{Armin Rigo} + +\institute{Swiss Python Summit 2016} +\date{Feb 2016} diff --git a/talk/swisspython2016/slides.rst b/talk/swisspython2016/slides.rst --- a/talk/swisspython2016/slides.rst +++ b/talk/swisspython2016/slides.rst @@ -1,6 +1,6 @@ -============= +==================================================== CFFI and PyPy -============= +==================================================== .. raw:: latex @@ -10,6 +10,7 @@ \let\foobarbaz=> \catcode`\>=13 \def>{\foobarbaz\relax} + CFFI From pypy.commits at gmail.com Tue Feb 2 07:39:17 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 02 Feb 2016 04:39:17 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: size_alignment_pos now supports bitfields for big endian platforms, some minor refactorings and simplifications Message-ID: <56b0a375.6953c20a.c061.7c3c@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82041:254804e6eb46 Date: 2016-02-02 13:38 +0100 http://bitbucket.org/pypy/pypy/changeset/254804e6eb46/ Log: size_alignment_pos now supports bitfields for big endian platforms, some minor refactorings and simplifications diff --git a/pypy/module/_rawffi/structure.py b/pypy/module/_rawffi/structure.py --- a/pypy/module/_rawffi/structure.py +++ b/pypy/module/_rawffi/structure.py @@ -18,6 +18,9 @@ from rpython.rlib.rarithmetic import intmask, signedtype, r_uint, \ r_ulonglong from rpython.rtyper.lltypesystem import lltype, rffi +import sys + +IS_BIG_ENDIAN = sys.byteorder == 'big' @@ -114,20 +117,32 @@ size += intmask(fieldsize) bitsizes.append(fieldsize) elif field_type == NEW_BITFIELD: - bitsizes.append((bitsize << 16) + bitoffset) + if IS_BIG_ENDIAN: + off = last_size - bitoffset - bitsize + else: + off = bitoffset + bitsizes.append((bitsize << 16) + off) bitoffset = bitsize size = round_up(size, fieldalignment) pos.append(size) size += fieldsize elif field_type == CONT_BITFIELD: - bitsizes.append((bitsize << 16) + bitoffset) + if IS_BIG_ENDIAN: + off = last_size - bitoffset - bitsize + else: + off = bitoffset + bitsizes.append((bitsize << 16) + off) bitoffset += bitsize # offset is already updated for the NEXT field pos.append(size - fieldsize) elif field_type == EXPAND_BITFIELD: size += fieldsize - last_size / 8 last_size = fieldsize * 8 - bitsizes.append((bitsize << 16) + bitoffset) + if IS_BIG_ENDIAN: + off = last_size - bitoffset - bitsize + else: + off = bitoffset + bitsizes.append((bitsize << 16) + off) bitoffset += bitsize # offset is already updated for the NEXT field pos.append(size - fieldsize) diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -466,21 +466,17 @@ if kind == 'fixed': # compute the size we want - # r5 is saved to the jit frame - # RES == r2! - mc.LGR(r.r5, r.RSZ) - mc.SGR(r.r5, r.RES) - mc.LGR(r.r2, r.r5) + mc.SGRK(r.r2, r.RSZ, r.RES) if hasattr(self.cpu.gc_ll_descr, 'passes_frame'): # for tests only mc.LGR(r.r3, r.SPP) elif kind == 'str' or kind == 'unicode': - pass # length is already in r3 + pass # length is already in r2 else: # arguments to the called function are [itemsize, tid, length] # itemsize is already in r2 + mc.LGR(r.r4, r.RSZ) # length mc.LGR(r.r3, r.SCRATCH2) # tid - mc.LGR(r.r4, r.RSZ) # length # Do the call addr = rffi.cast(lltype.Signed, addr) @@ -498,11 +494,11 @@ self._pop_fp_regs_from_jitframe(mc) nursery_free_adr = self.cpu.gc_ll_descr.get_nursery_free_addr() - self.mc.load_imm(r.SCRATCH, nursery_free_adr) + self.mc.load_imm(r.r1, nursery_free_adr) - # r.SCRATCH is now the address of nursery_free + # r.r1 is now the address of nursery_free # r.RES is still the result of the call done above - # r.RSZ is loaded from [SCRATCH], to make the caller's store a no-op here + # r.RSZ is loaded from [r1], to make the caller's store a no-op here mc.load(r.RSZ, r.r1, 0) # mc.restore_link() @@ -1283,6 +1279,8 @@ # no frame needed, r14 is saved on the jitframe mc.branch_absolute(self.malloc_slowpath) + # here r1 holds nursery_free_addr + offset = mc.currpos() - fast_jmp_pos pmc = OverwritingBuilder(mc, fast_jmp_pos, 1) pmc.BRC(c.LE, l.imm(offset)) # jump if LE (not GT), predicted to be true @@ -1362,15 +1360,13 @@ force_realignment = (itemsize % WORD) != 0 if force_realignment: constsize += WORD - 1 - if lengthloc is not r.RSZ: - mc.LGR(r.RSZ, lengthloc) - mc.AGFI(r.RSZ, l.imm(constsize)) + mc.AGHIK(r.RSZ, lengthloc, l.imm(constsize)) if force_realignment: # "& ~(WORD-1)" mc.LGHI(r.SCRATCH2, l.imm(~(WORD-1))) mc.NGR(r.RSZ, r.SCRATCH2) - mc.AGR(r.RSZ, r.RES) + mc.AGRK(r.RSZ, r.RES, r.RSZ) # now RSZ contains the total size in bytes, rounded up to a multiple # of WORD, plus nursery_free_adr @@ -1393,14 +1389,6 @@ # save the gcmap self.load_gcmap(mc, r.r1, gcmap) # - # load the argument(s) - if kind == rewrite.FLAG_ARRAY: - mc.LGR(r.RSZ, lengthloc) - mc.load_imm(r.RES, itemsize) - mc.load_imm(r.SCRATCH2, arraydescr.tid) - else: - mc.LGR(r.RES, lengthloc) - # # load the function into r14 and jump if kind == rewrite.FLAG_ARRAY: addr = self.malloc_slowpath_varsize @@ -1411,6 +1399,15 @@ else: raise AssertionError(kind) # + # load the argument(s) + if kind == rewrite.FLAG_ARRAY: + mc.LGR(r.RSZ, lengthloc) + mc.load_imm(r.RES, itemsize) + mc.load_imm(r.SCRATCH2, arraydescr.tid) + else: + mc.LGR(r.RES, lengthloc) + # + # # call! mc.branch_absolute(addr) diff --git a/rpython/jit/backend/zarch/codebuilder.py b/rpython/jit/backend/zarch/codebuilder.py --- a/rpython/jit/backend/zarch/codebuilder.py +++ b/rpython/jit/backend/zarch/codebuilder.py @@ -175,7 +175,7 @@ else: # this is not put into the constant pool, because it # is an immediate value that cannot easily be forseen - self.LGFI(dest_reg, l.imm(word & 0xFFFFffff)) + self.IILF(dest_reg, l.imm(word & 0xFFFFffff)) self.IIHF(dest_reg, l.imm((word >> 32) & 0xFFFFffff)) def load_imm_plus(self, dest_reg, word): diff --git a/rpython/jit/backend/zarch/instruction_builder.py b/rpython/jit/backend/zarch/instruction_builder.py --- a/rpython/jit/backend/zarch/instruction_builder.py +++ b/rpython/jit/backend/zarch/instruction_builder.py @@ -308,6 +308,17 @@ self.write_i16(imm16 & BIT_MASK_16) return encode_ri +def build_rie_d(mnemonic, (opcode1,opcode2)): + @builder.arguments('r,r,i16') + def encode_rie_d(self, reg1, reg2, imm16): + self.writechar(opcode1) + byte = (reg1 & BIT_MASK_4) << 4 | (reg2 & BIT_MASK_4) + self.writechar(chr(byte)) + self.write_i16(imm16 & BIT_MASK_16) + self.writechar(chr(0x0)) + self.writechar(opcode2) + return encode_rie_d + def build_rie_e(mnemonic, (opcode1,opcode2)): br = is_branch_relative(mnemonic) @builder.arguments('r,r,i16') diff --git a/rpython/jit/backend/zarch/instructions.py b/rpython/jit/backend/zarch/instructions.py --- a/rpython/jit/backend/zarch/instructions.py +++ b/rpython/jit/backend/zarch/instructions.py @@ -13,6 +13,7 @@ # add 'AR': ('rr', ['\x1A']), 'AGR': ('rre', ['\xB9','\x08']), + 'AGRK': ('rrf_a', ['\xB9','\xE8']), 'AGFR': ('rre', ['\xB9','\x18']), 'A': ('rx', ['\x5A']), 'AGFI': ('ril', ['\xC2','\x08']), @@ -60,6 +61,7 @@ 'AGF': ('rxy', ['\xE3','\x18']), 'AHI': ('ri', ['\xA7','\x0A']), 'AGHI': ('ri', ['\xA7','\x0B']), + 'AGHIK': ('rie_d', ['\xEC','\xD9']), # comparision @@ -150,6 +152,7 @@ 'LGB': ('rxy', ['\xE3','\x77']), 'LLGC': ('rxy', ['\xE3','\x90']), 'LARL': ('ril', ['\xC0','\x00'], 'r/m,h32'), + 'IILF': ('ril', ['\xC0','\x09'], 'r,u32'), 'IIHF': ('ril', ['\xC0','\x08'], 'r,u32'), # load on condition @@ -252,6 +255,9 @@ 'CEB': ('rxe', ['\xED','\x09'], 'r,bidl,-'), 'CDB': ('rxe', ['\xED','\x19'], 'r,bidl,-'), + # compare and trap + 'CGRT': ('rrf_c', ['\xB9','\x60']), + # complement & positive 'LPDBR': ('rre', ['\xB3','\x10']), 'LCDBR': ('rre', ['\xB3','\x13']), diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -679,12 +679,17 @@ def emit_guard_nonnull_class(self, op, arglocs, regalloc): self.mc.cmp_op(arglocs[0], l.imm(1), imm=True, signed=False) + patch_pos = self.mc.currpos() self.mc.reserve_cond_jump(short=True) + self._cmp_guard_class(op, arglocs, regalloc) + #self.mc.CGRT(r.SCRATCH, r.SCRATCH2, c.NE) + pmc = OverwritingBuilder(self.mc, patch_pos, 1) pmc.BRC(c.LT, l.imm(self.mc.currpos() - patch_pos)) pmc.overwrite() + self.guard_success_cc = c.EQ self._emit_guard(op, arglocs[2:]) diff --git a/rpython/jit/backend/zarch/registers.py b/rpython/jit/backend/zarch/registers.py --- a/rpython/jit/backend/zarch/registers.py +++ b/rpython/jit/backend/zarch/registers.py @@ -17,7 +17,7 @@ SCRATCH2 = r0 GPR_RETURN = r2 RES = r2 -RSZ = r6 +RSZ = r12 # do not use a volatile register [f0,f1,f2,f3,f4,f5,f6,f7,f8, f9,f10,f11,f12,f13,f14,f15] = fpregisters diff --git a/rpython/jit/backend/zarch/test/test_assembler.py b/rpython/jit/backend/zarch/test/test_assembler.py --- a/rpython/jit/backend/zarch/test/test_assembler.py +++ b/rpython/jit/backend/zarch/test/test_assembler.py @@ -125,6 +125,20 @@ self.a.mc.BCR(con.ANY, r.r14) assert run_asm(self.a) == 0 + def test_load_64bit(self): + self.a.mc.load_imm(r.r2, 0x0fffFFFF) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == 0x0fffFFFF + + def test_load_64bit_2(self): + self.a.mc.load_imm(r.r2, 0xffffFFFF) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == 0xffffFFFF + + def test_load_64bit_3(self): + self.a.mc.load_imm(r.r2, 2177165728) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == 2177165728 def test_byte_count_instr(self): assert self.mc.BRC_byte_count == 4 From pypy.commits at gmail.com Tue Feb 2 11:18:36 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 02 Feb 2016 08:18:36 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: reading level=2 cache for estimation size of nursery Message-ID: <56b0d6dc.25fac20a.7fb8.ffffd29f@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82042:037ac225f6c1 Date: 2016-02-02 17:17 +0100 http://bitbucket.org/pypy/pypy/changeset/037ac225f6c1/ Log: reading level=2 cache for estimation size of nursery diff --git a/rpython/jit/backend/ppc/callbuilder.py b/rpython/jit/backend/ppc/callbuilder.py --- a/rpython/jit/backend/ppc/callbuilder.py +++ b/rpython/jit/backend/ppc/callbuilder.py @@ -98,7 +98,7 @@ # We must also copy fnloc into FNREG non_float_locs.append(self.fnloc) - non_float_regs.append(self.mc.RAW_CALL_REG) # r2 or r12 + non_float_regs.append(self.mc.RAW_CALL_REG) if float_locs: assert len(float_locs) <= len(self.FPR_ARGS) diff --git a/rpython/jit/backend/zarch/callbuilder.py b/rpython/jit/backend/zarch/callbuilder.py --- a/rpython/jit/backend/zarch/callbuilder.py +++ b/rpython/jit/backend/zarch/callbuilder.py @@ -62,7 +62,6 @@ # called function will in turn call further functions (which must be passed the # address of the new frame). This stack grows downwards from high addresses # """ - self.subtracted_to_sp = 0 gpr_regs = 0 fpr_regs = 0 @@ -88,11 +87,6 @@ if self.is_call_release_gil: self.subtracted_to_sp += 8*WORD base += 8*WORD - # one additional word for remap frame layout - # regalloc_push will overwrite -8(r.SP) and destroy - # a parameter if we would not reserve that space - # base += WORD - # TODO self.subtracted_to_sp += WORD for idx,i in enumerate(stack_params): loc = arglocs[i] offset = STD_FRAME_SIZE_IN_BYTES - base + 8 * idx @@ -149,7 +143,7 @@ def emit_raw_call(self): # always allocate a stack frame for the new function # save the SP back chain - #self.mc.STG(r.SP, l.addr(-self.subtracted_to_sp, r.SP)) + self.mc.STG(r.SP, l.addr(-self.subtracted_to_sp, r.SP)) # move the frame pointer if self.subtracted_to_sp != 0: self.mc.LAY(r.SP, l.addr(-self.subtracted_to_sp, r.SP)) @@ -194,8 +188,6 @@ # pos = STD_FRAME_SIZE_IN_BYTES - 7*WORD self.mc.STMG(r.r8, r.r13, l.addr(pos, r.SP)) - # 6 registers, 1 for a floating point return value! - # registered by prepare_arguments! # # Save this thread's shadowstack pointer into r8, for later comparison gcrootmap = self.asm.cpu.gc_ll_descr.gcrootmap @@ -266,19 +258,17 @@ PARAM_SAVE_AREA_OFFSET = 0 if reg is not None: # save 1 word below the stack pointer - pos = STD_FRAME_SIZE_IN_BYTES if reg.is_core_reg(): self.mc.LGR(RSAVEDRES, reg) elif reg.is_fp_reg(): - self.mc.STD(reg, l.addr(pos-1*WORD, r.SP)) + self.mc.STD(reg, l.addr(16*WORD, r.SP)) self.mc.load_imm(self.mc.RAW_CALL_REG, self.asm.reacqgil_addr) self.mc.raw_call() if reg is not None: - pos = STD_FRAME_SIZE_IN_BYTES if reg.is_core_reg(): self.mc.LGR(reg, RSAVEDRES) elif reg.is_fp_reg(): - self.mc.LD(reg, l.addr(pos-1*WORD, r.SP)) + self.mc.LD(reg, l.addr(16*WORD, r.SP)) # replace b1_location with BEQ(here) pmc = OverwritingBuilder(self.mc, b1_location, 1) diff --git a/rpython/jit/backend/zarch/codebuilder.py b/rpython/jit/backend/zarch/codebuilder.py --- a/rpython/jit/backend/zarch/codebuilder.py +++ b/rpython/jit/backend/zarch/codebuilder.py @@ -189,11 +189,7 @@ return diff def sync(self): - # see sync. section of the zarch manual! - # 0xf creates a checkpoint which is not needed. - # we never want to restore the checkpoint, we only - # want to create a memory fence (i.e. serialization) - self.BCR_rr(0xe,0) + self.BCR_rr(0xf,0) def raw_call(self, call_reg=r.RETURN): """Emit a call to the address stored in the register 'call_reg', diff --git a/rpython/jit/backend/zarch/instruction_builder.py b/rpython/jit/backend/zarch/instruction_builder.py --- a/rpython/jit/backend/zarch/instruction_builder.py +++ b/rpython/jit/backend/zarch/instruction_builder.py @@ -191,6 +191,13 @@ self.write_i32(imm32 & BIT_MASK_32) return encode_ri +def build_s(mnemonic, (opcode1,opcode2)): + @builder.arguments('bd') + def encode_s(self, base_displace): + self.writechar(opcode1) + self.writechar(opcode2) + encode_base_displace(self, base_displace) + return encode_s def build_si(mnemonic, (opcode,)): @builder.arguments('bd,u8') diff --git a/rpython/jit/backend/zarch/instructions.py b/rpython/jit/backend/zarch/instructions.py --- a/rpython/jit/backend/zarch/instructions.py +++ b/rpython/jit/backend/zarch/instructions.py @@ -288,6 +288,8 @@ 'SVC': ('i', ['\x0A']), 'TRAP2': ('e', ['\x01','\xFF']), + + 'STFLE': ('s', ['\xB2','\xB0']), } all_mnemonic_codes.update(arith_mnemonic_codes) all_mnemonic_codes.update(logic_mnemonic_codes) diff --git a/rpython/jit/backend/zarch/test/test_assembler.py b/rpython/jit/backend/zarch/test/test_assembler.py --- a/rpython/jit/backend/zarch/test/test_assembler.py +++ b/rpython/jit/backend/zarch/test/test_assembler.py @@ -144,6 +144,19 @@ assert self.mc.BRC_byte_count == 4 assert self.mc.LG_byte_count == 6 + def test_facility(self): + adr = self.a.datablockwrapper.malloc_aligned(16, 16) + self.a.mc.load_imm(r.r2, adr) + self.a.mc.STFLE(loc.addr(0,r.r2)) + self.a.mc.BCR(con.ANY, r.r14) + run_asm(self.a) + fac_data = rffi.cast(rffi.CArrayPtr(rffi.ULONG), adr) + f64 = bin(fac_data[0])[2:] + s64 = bin(fac_data[1])[2:] + print(f64) + print(s64) + assert f64[18] == '1' # long displacement facility + def test_load_small_int_to_reg(self): self.a.mc.LGHI(r.r2, loc.imm(123)) self.a.jmpto(r.r14) diff --git a/rpython/memory/gc/env.py b/rpython/memory/gc/env.py --- a/rpython/memory/gc/env.py +++ b/rpython/memory/gc/env.py @@ -137,6 +137,8 @@ return get_L2cache_linux2_cpuinfo() if arch in ('alpha', 'ppc'): return get_L2cache_linux2_cpuinfo(label='L2 cache') + if arch in ('s390x'): + return get_L2cache_linux2_cpuinfo_s390x() if arch == 'ia64': return get_L2cache_linux2_ia64() if arch in ('parisc', 'parisc64'): @@ -208,6 +210,67 @@ "Warning: cannot find your CPU L2 cache size in /proc/cpuinfo") return -1 +def get_L2cache_linux2_cpuinfo_s390x(filename="/proc/cpuinfo", label='cache3'): + debug_start("gc-hardware") + L2cache = sys.maxint + try: + fd = os.open(filename, os.O_RDONLY, 0644) + try: + data = [] + while True: + buf = os.read(fd, 4096) + if not buf: + break + data.append(buf) + finally: + os.close(fd) + except OSError: + pass + else: + data = ''.join(data) + linepos = 0 + while True: + start = _findend(data, '\n' + label, linepos) + if start < 0: + break # done + linepos = _findend(data, '\n', start) + if linepos < 0: + break # no end-of-line?? + # *** data[start:linepos] == " : level=2 type=Instruction scope=Private size=2048K ..." + start = _skipspace(data, start) + if data[start] != ':': + continue + # *** data[start:linepos] == ": level=2 type=Instruction scope=Private size=2048K ..." + start = _skipspace(data, start + 1) + # *** data[start:linepos] == "level=2 type=Instruction scope=Private size=2048K ..." + start += 44 + end = start + while '0' <= data[end] <= '9': + end += 1 + # *** data[start:end] == "2048" + if start == end: + continue + number = int(data[start:end]) + # *** data[end:linepos] == " KB\n" + end = _skipspace(data, end) + if data[end] not in ('K', 'k'): # assume kilobytes for now + continue + number = number * 1024 + # for now we look for the smallest of the L2 caches of the CPUs + if number < L2cache: + L2cache = number + + debug_print("L2cache =", L2cache) + debug_stop("gc-hardware") + + if L2cache < sys.maxint: + return L2cache + else: + # Print a top-level warning even in non-debug builds + llop.debug_print(lltype.Void, + "Warning: cannot find your CPU L2 cache size in /proc/cpuinfo") + return -1 + def get_L2cache_linux2_sparc(): debug_start("gc-hardware") cpu = 0 diff --git a/rpython/memory/gc/test/test_env.py b/rpython/memory/gc/test/test_env.py --- a/rpython/memory/gc/test/test_env.py +++ b/rpython/memory/gc/test/test_env.py @@ -161,3 +161,22 @@ """) result = env.get_L2cache_linux2_cpuinfo(str(filepath)) assert result == 3072 * 1024 + +def test_estimate_best_nursery_size_linux2_s390x(): + filepath = udir.join('estimate_best_nursery_size_linux2') + filepath.write("""\ +vendor_id : IBM/S390 +# processors : 2 +bogomips per cpu: 20325.00 +features : esan3 zarch stfle msa ldisp eimm dfp etf3eh highgprs +cache0 : level=1 type=Data scope=Private size=128K line_size=256 associativity=8 +cache1 : level=1 type=Instruction scope=Private size=96K line_size=256 associativity=6 +cache2 : level=2 type=Data scope=Private size=2048K line_size=256 associativity=8 +cache3 : level=2 type=Instruction scope=Private size=2048K line_size=256 associativity=8 +cache4 : level=3 type=Unified scope=Shared size=65536K line_size=256 associativity=16 +cache5 : level=4 type=Unified scope=Shared size=491520K line_size=256 associativity=30 +processor 0: version = FF, identification = 026A77, machine = 2964 +processor 1: version = FF, identification = 026A77, machine = 2964 +""") + result = env.get_L2cache_linux2_cpuinfo_s390x(str(filepath)) + assert result == 2048 * 1024 From pypy.commits at gmail.com Tue Feb 2 12:43:56 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 02 Feb 2016 09:43:56 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: added an explicit memory barrier after compare and swap Message-ID: <56b0eadc.4b921c0a.11bed.6411@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82043:897d3634e91c Date: 2016-02-02 18:42 +0100 http://bitbucket.org/pypy/pypy/changeset/897d3634e91c/ Log: added an explicit memory barrier after compare and swap diff --git a/rpython/jit/backend/zarch/callbuilder.py b/rpython/jit/backend/zarch/callbuilder.py --- a/rpython/jit/backend/zarch/callbuilder.py +++ b/rpython/jit/backend/zarch/callbuilder.py @@ -221,6 +221,8 @@ self.mc.BRC(c.NE, l.imm(retry_label - self.mc.currpos())) # retry if failed # CSG performs a serialization + # but be sure (testing) + self.mc.sync() self.mc.CGHI(r.r13, l.imm0) b1_location = self.mc.currpos() From pypy.commits at gmail.com Tue Feb 2 13:18:58 2016 From: pypy.commits at gmail.com (mjacob) Date: Tue, 02 Feb 2016 10:18:58 -0800 (PST) Subject: [pypy-commit] pypy py3k: 2to3 Message-ID: <56b0f312.8ee61c0a.f9002.7248@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82044:8645b9cf5eab Date: 2016-02-02 18:30 +0100 http://bitbucket.org/pypy/pypy/changeset/8645b9cf5eab/ Log: 2to3 diff --git a/pypy/module/__pypy__/test/test_magic.py b/pypy/module/__pypy__/test/test_magic.py --- a/pypy/module/__pypy__/test/test_magic.py +++ b/pypy/module/__pypy__/test/test_magic.py @@ -24,26 +24,26 @@ __pypy__.set_code_callback(callable) d = {} try: - exec """ + exec(""" def f(): pass -""" in d +""", d) finally: __pypy__.set_code_callback(None) assert d['f'].__code__ in l def test_decode_long(self): from __pypy__ import decode_long - assert decode_long('') == 0 - assert decode_long('\xff\x00') == 255 - assert decode_long('\xff\x7f') == 32767 - assert decode_long('\x00\xff') == -256 - assert decode_long('\x00\x80') == -32768 - assert decode_long('\x80') == -128 - assert decode_long('\x7f') == 127 - assert decode_long('\x55' * 97) == (1 << (97 * 8)) // 3 - assert decode_long('\x00\x80', 'big') == 128 - assert decode_long('\xff\x7f', 'little', False) == 32767 - assert decode_long('\x00\x80', 'little', False) == 32768 - assert decode_long('\x00\x80', 'little', True) == -32768 + assert decode_long(b'') == 0 + assert decode_long(b'\xff\x00') == 255 + assert decode_long(b'\xff\x7f') == 32767 + assert decode_long(b'\x00\xff') == -256 + assert decode_long(b'\x00\x80') == -32768 + assert decode_long(b'\x80') == -128 + assert decode_long(b'\x7f') == 127 + assert decode_long(b'\x55' * 97) == (1 << (97 * 8)) // 3 + assert decode_long(b'\x00\x80', 'big') == 128 + assert decode_long(b'\xff\x7f', 'little', False) == 32767 + assert decode_long(b'\x00\x80', 'little', False) == 32768 + assert decode_long(b'\x00\x80', 'little', True) == -32768 raises(ValueError, decode_long, '', 'foo') From pypy.commits at gmail.com Tue Feb 2 13:19:00 2016 From: pypy.commits at gmail.com (mjacob) Date: Tue, 02 Feb 2016 10:19:00 -0800 (PST) Subject: [pypy-commit] pypy py3k: 2to3 Message-ID: <56b0f314.89bd1c0a.202b4.5ee9@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82045:60029cb06b5b Date: 2016-02-02 19:09 +0100 http://bitbucket.org/pypy/pypy/changeset/60029cb06b5b/ Log: 2to3 diff --git a/pypy/module/_cffi_backend/test/test_recompiler.py b/pypy/module/_cffi_backend/test/test_recompiler.py --- a/pypy/module/_cffi_backend/test/test_recompiler.py +++ b/pypy/module/_cffi_backend/test/test_recompiler.py @@ -1046,7 +1046,7 @@ # @ffi.callback("int *(*)(void)") def get_my_value(): - return values + it.next() + return values + next(it) lib.get_my_value = get_my_value # values[0] = 41 @@ -1390,7 +1390,7 @@ def getvalue(self): if self._result is None: os.close(self._wr) - self._result = os.read(self._rd, 4096) + self._result = os.read(self._rd, 4096).decode() os.close(self._rd) # xxx hack away these lines while self._result.startswith('[platform:execute]'): @@ -1456,11 +1456,11 @@ baz1 = ffi.def_extern()(baz) assert baz1 is baz seen = [] - baz(40L, 4L) - res = lib.baz(50L, 8L) + baz(40, 4) + res = lib.baz(50, 8) assert res is None - assert seen == [("Baz", 40L, 4L), ("Baz", 50, 8)] - assert type(seen[0][1]) is type(seen[0][2]) is long + assert seen == [("Baz", 40, 4), ("Baz", 50, 8)] + assert type(seen[0][1]) is type(seen[0][2]) is int assert type(seen[1][1]) is type(seen[1][2]) is int @ffi.def_extern(name="bok") From pypy.commits at gmail.com Tue Feb 2 13:19:02 2016 From: pypy.commits at gmail.com (mjacob) Date: Tue, 02 Feb 2016 10:19:02 -0800 (PST) Subject: [pypy-commit] pypy py3k: 2to3 Message-ID: <56b0f316.42cbc20a.37573.0a85@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82046:77bfed7253ce Date: 2016-02-02 19:14 +0100 http://bitbucket.org/pypy/pypy/changeset/77bfed7253ce/ Log: 2to3 diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -651,7 +651,7 @@ def test_connect_to_kernel_netlink_routing_socket(self): import _socket, os s = _socket.socket(_socket.AF_NETLINK, _socket.SOCK_DGRAM, _socket.NETLINK_ROUTE) - assert s.getsockname() == (0L, 0L) + assert s.getsockname() == (0, 0) s.bind((0, 0)) a, b = s.getsockname() assert a == os.getpid() From pypy.commits at gmail.com Tue Feb 2 13:19:03 2016 From: pypy.commits at gmail.com (mjacob) Date: Tue, 02 Feb 2016 10:19:03 -0800 (PST) Subject: [pypy-commit] pypy py3k: 2to3 Message-ID: <56b0f317.88d31c0a.90cb5.740f@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82047:fbb1c9fd86c0 Date: 2016-02-02 19:17 +0100 http://bitbucket.org/pypy/pypy/changeset/fbb1c9fd86c0/ Log: 2to3 diff --git a/pypy/module/itertools/test/test_itertools.py b/pypy/module/itertools/test/test_itertools.py --- a/pypy/module/itertools/test/test_itertools.py +++ b/pypy/module/itertools/test/test_itertools.py @@ -201,8 +201,8 @@ # CPython implementation allows floats it = itertools.islice([1, 2, 3, 4, 5], 0.0, 3.0, 2.0) for x in [1, 3]: - assert it.next() == x - raises(StopIteration, it.next) + assert next(it) == x + raises(StopIteration, next, it) it = itertools.islice([1, 2, 3], 0, None) for x in [1, 2, 3]: From pypy.commits at gmail.com Tue Feb 2 13:51:25 2016 From: pypy.commits at gmail.com (mjacob) Date: Tue, 02 Feb 2016 10:51:25 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56b0faad.c74fc20a.c32fb.1747@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82048:1d68aca1032f Date: 2016-02-02 19:50 +0100 http://bitbucket.org/pypy/pypy/changeset/1d68aca1032f/ Log: hg merge py3k diff too long, truncating to 2000 out of 16702 lines diff --git a/.gitignore b/.gitignore --- a/.gitignore +++ b/.gitignore @@ -29,4 +29,4 @@ release/ !pypy/tool/release/ rpython/_cache/ -__pycache__/ +.cache/ diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -28,7 +28,7 @@ DEALINGS IN THE SOFTWARE. -PyPy Copyright holders 2003-2015 +PyPy Copyright holders 2003-2016 ----------------------------------- Except when otherwise stated (look for LICENSE files or information at diff --git a/Makefile b/Makefile --- a/Makefile +++ b/Makefile @@ -39,5 +39,5 @@ # runs. We cannot get their original value either: # http://lists.gnu.org/archive/html/help-make/2010-08/msg00106.html -cffi_imports: +cffi_imports: pypy-c PYTHONPATH=. ./pypy-c pypy/tool/build_cffi_imports.py diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -685,13 +685,17 @@ # the previous version of this code did. This should work for # CPython too. The point is that on PyPy with cpyext, the # config var 'SO' is just ".so" but we want to return - # ".pypy-VERSION.so" instead. - so_ext = _get_c_extension_suffix() + # ".pypy-VERSION.so" instead. Note a further tweak for cffi's + # embedding mode: if EXT_SUFFIX is also defined, use that + # directly. + so_ext = get_config_var('EXT_SUFFIX') if so_ext is None: - so_ext = get_config_var('SO') # fall-back - # extensions in debug_mode are named 'module_d.pyd' under windows - if os.name == 'nt' and self.debug: - so_ext = '_d.pyd' + so_ext = _get_c_extension_suffix() + if so_ext is None: + so_ext = get_config_var('SO') # fall-back + # extensions in debug_mode are named 'module_d.pyd' under windows + if os.name == 'nt' and self.debug: + so_ext = '_d.pyd' return os.path.join(*ext_path) + so_ext def get_export_symbols (self, ext): diff --git a/lib-python/2.7/pickle.py b/lib-python/2.7/pickle.py --- a/lib-python/2.7/pickle.py +++ b/lib-python/2.7/pickle.py @@ -1376,6 +1376,7 @@ def decode_long(data): r"""Decode a long from a two's complement little-endian binary string. + This is overriden on PyPy by a RPython version that has linear complexity. >>> decode_long('') 0L @@ -1402,6 +1403,11 @@ n -= 1L << (nbytes * 8) return n +try: + from __pypy__ import decode_long +except ImportError: + pass + # Shorthands try: diff --git a/lib-python/2.7/sysconfig.py b/lib-python/2.7/sysconfig.py --- a/lib-python/2.7/sysconfig.py +++ b/lib-python/2.7/sysconfig.py @@ -524,6 +524,13 @@ import _osx_support _osx_support.customize_config_vars(_CONFIG_VARS) + # PyPy: + import imp + for suffix, mode, type_ in imp.get_suffixes(): + if type_ == imp.C_EXTENSION: + _CONFIG_VARS['SOABI'] = suffix.split('.')[1] + break + if args: vals = [] for name in args: diff --git a/lib-python/2.7/test/capath/0e4015b9.0 b/lib-python/2.7/test/capath/0e4015b9.0 new file mode 100644 --- /dev/null +++ b/lib-python/2.7/test/capath/0e4015b9.0 @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv +bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG +A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo +b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 +aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ +Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm +Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= +-----END CERTIFICATE----- diff --git a/lib-python/2.7/test/capath/ce7b8643.0 b/lib-python/2.7/test/capath/ce7b8643.0 new file mode 100644 --- /dev/null +++ b/lib-python/2.7/test/capath/ce7b8643.0 @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv +bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG +A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo +b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 +aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ +Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm +Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= +-----END CERTIFICATE----- diff --git a/lib-python/2.7/test/https_svn_python_org_root.pem b/lib-python/2.7/test/https_svn_python_org_root.pem deleted file mode 100644 --- a/lib-python/2.7/test/https_svn_python_org_root.pem +++ /dev/null @@ -1,41 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 -IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB -IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA -Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO -BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi -MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ -ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ -8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 -zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y -fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 -w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc -G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k -epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q -laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ -QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU -fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 -YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w -ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY -gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe -MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 -IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy -dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw -czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 -dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl -aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC -AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg -b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB -ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc -nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg -18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c -gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl -Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY -sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T -SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF -CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum -GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk -zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW -omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD ------END CERTIFICATE----- diff --git a/lib-python/2.7/test/selfsigned_pythontestdotnet.pem b/lib-python/2.7/test/selfsigned_pythontestdotnet.pem --- a/lib-python/2.7/test/selfsigned_pythontestdotnet.pem +++ b/lib-python/2.7/test/selfsigned_pythontestdotnet.pem @@ -1,5 +1,5 @@ -----BEGIN CERTIFICATE----- -MIIChzCCAfCgAwIBAgIJAKGU95wKR8pSMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG @@ -8,9 +8,9 @@ aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv -EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjKTAnMCUGA1UdEQQeMByCGnNl -bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MA0GCSqGSIb3DQEBBQUAA4GBAIOXmdtM -eG9qzP9TiXW/Gc/zI4cBfdCpC+Y4gOfC9bQUC7hefix4iO3+iZjgy3X/FaRxUUoV -HKiXcXIaWqTSUWp45cSh0MbwZXudp6JIAptzdAhvvCrPKeC9i9GvxsPD4LtDAL97 -vSaxQBezA7hdxZd90/EeyMgVZgAnTCnvAWX9 +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= -----END CERTIFICATE----- diff --git a/lib-python/2.7/test/test_ssl.py b/lib-python/2.7/test/test_ssl.py --- a/lib-python/2.7/test/test_ssl.py +++ b/lib-python/2.7/test/test_ssl.py @@ -57,7 +57,8 @@ SIGNED_CERTFILE2 = data_file("keycert4.pem") SIGNING_CA = data_file("pycacert.pem") -SVN_PYTHON_ORG_ROOT_CERT = data_file("https_svn_python_org_root.pem") +REMOTE_HOST = "self-signed.pythontest.net" +REMOTE_ROOT_CERT = data_file("selfsigned_pythontestdotnet.pem") EMPTYCERT = data_file("nullcert.pem") BADCERT = data_file("badcert.pem") @@ -244,7 +245,7 @@ self.assertEqual(p['subjectAltName'], san) def test_DER_to_PEM(self): - with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: + with open(CAFILE_CACERT, 'r') as f: pem = f.read() d1 = ssl.PEM_cert_to_DER_cert(pem) p2 = ssl.DER_cert_to_PEM_cert(d1) @@ -792,7 +793,7 @@ # Mismatching key and cert ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) with self.assertRaisesRegexp(ssl.SSLError, "key values mismatch"): - ctx.load_cert_chain(SVN_PYTHON_ORG_ROOT_CERT, ONLYKEY) + ctx.load_cert_chain(CAFILE_CACERT, ONLYKEY) # Password protected key and cert ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD) ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode()) @@ -1013,7 +1014,7 @@ ctx.load_verify_locations(CERTFILE) self.assertEqual(ctx.cert_store_stats(), {'x509_ca': 0, 'crl': 0, 'x509': 1}) - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + ctx.load_verify_locations(CAFILE_CACERT) self.assertEqual(ctx.cert_store_stats(), {'x509_ca': 1, 'crl': 0, 'x509': 2}) @@ -1023,8 +1024,8 @@ # CERTFILE is not flagged as X509v3 Basic Constraints: CA:TRUE ctx.load_verify_locations(CERTFILE) self.assertEqual(ctx.get_ca_certs(), []) - # but SVN_PYTHON_ORG_ROOT_CERT is a CA cert - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + # but CAFILE_CACERT is a CA cert + ctx.load_verify_locations(CAFILE_CACERT) self.assertEqual(ctx.get_ca_certs(), [{'issuer': ((('organizationName', 'Root CA'),), (('organizationalUnitName', 'http://www.cacert.org'),), @@ -1040,7 +1041,7 @@ (('emailAddress', 'support at cacert.org'),)), 'version': 3}]) - with open(SVN_PYTHON_ORG_ROOT_CERT) as f: + with open(CAFILE_CACERT) as f: pem = f.read() der = ssl.PEM_cert_to_DER_cert(pem) self.assertEqual(ctx.get_ca_certs(True), [der]) @@ -1215,11 +1216,11 @@ class NetworkedTests(unittest.TestCase): def test_connect(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_NONE) try: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) self.assertEqual({}, s.getpeercert()) finally: s.close() @@ -1228,27 +1229,27 @@ s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED) self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", - s.connect, ("svn.python.org", 443)) + s.connect, (REMOTE_HOST, 443)) s.close() # this should succeed because we specify the root cert s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) self.assertTrue(s.getpeercert()) finally: s.close() def test_connect_ex(self): # Issue #11326: check connect_ex() implementation - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - self.assertEqual(0, s.connect_ex(("svn.python.org", 443))) + self.assertEqual(0, s.connect_ex((REMOTE_HOST, 443))) self.assertTrue(s.getpeercert()) finally: s.close() @@ -1256,14 +1257,14 @@ def test_non_blocking_connect_ex(self): # Issue #11326: non-blocking connect_ex() should allow handshake # to proceed after the socket gets ready. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT, + ca_certs=REMOTE_ROOT_CERT, do_handshake_on_connect=False) try: s.setblocking(False) - rc = s.connect_ex(('svn.python.org', 443)) + rc = s.connect_ex((REMOTE_HOST, 443)) # EWOULDBLOCK under Windows, EINPROGRESS elsewhere self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK)) # Wait for connect to finish @@ -1285,58 +1286,62 @@ def test_timeout_connect_ex(self): # Issue #12065: on a timeout, connect_ex() should return the original # errno (mimicking the behaviour of non-SSL sockets). - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT, + ca_certs=REMOTE_ROOT_CERT, do_handshake_on_connect=False) try: s.settimeout(0.0000001) - rc = s.connect_ex(('svn.python.org', 443)) + rc = s.connect_ex((REMOTE_HOST, 443)) if rc == 0: - self.skipTest("svn.python.org responded too quickly") + self.skipTest("REMOTE_HOST responded too quickly") self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK)) finally: s.close() def test_connect_ex_error(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - rc = s.connect_ex(("svn.python.org", 444)) + rc = s.connect_ex((REMOTE_HOST, 444)) # Issue #19919: Windows machines or VMs hosted on Windows # machines sometimes return EWOULDBLOCK. - self.assertIn(rc, (errno.ECONNREFUSED, errno.EWOULDBLOCK)) + errors = ( + errno.ECONNREFUSED, errno.EHOSTUNREACH, errno.ETIMEDOUT, + errno.EWOULDBLOCK, + ) + self.assertIn(rc, errors) finally: s.close() def test_connect_with_context(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): # Same as test_connect, but with a separately created context ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: self.assertEqual({}, s.getpeercert()) finally: s.close() # Same with a server hostname s = ctx.wrap_socket(socket.socket(socket.AF_INET), - server_hostname="svn.python.org") - s.connect(("svn.python.org", 443)) + server_hostname=REMOTE_HOST) + s.connect((REMOTE_HOST, 443)) s.close() # This should fail because we have no verification certs ctx.verify_mode = ssl.CERT_REQUIRED s = ctx.wrap_socket(socket.socket(socket.AF_INET)) self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", - s.connect, ("svn.python.org", 443)) + s.connect, (REMOTE_HOST, 443)) s.close() # This should succeed because we specify the root cert - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + ctx.load_verify_locations(REMOTE_ROOT_CERT) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1349,12 +1354,12 @@ # OpenSSL 0.9.8n and 1.0.0, as a result the capath directory must # contain both versions of each certificate (same content, different # filename) for this test to be portable across OpenSSL releases. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=CAPATH) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1365,7 +1370,7 @@ ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=BYTES_CAPATH) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1373,15 +1378,15 @@ s.close() def test_connect_cadata(self): - with open(CAFILE_CACERT) as f: + with open(REMOTE_ROOT_CERT) as f: pem = f.read().decode('ascii') der = ssl.PEM_cert_to_DER_cert(pem) - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(cadata=pem) with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) cert = s.getpeercert() self.assertTrue(cert) @@ -1390,7 +1395,7 @@ ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(cadata=der) with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) cert = s.getpeercert() self.assertTrue(cert) @@ -1399,9 +1404,9 @@ # Issue #5238: creating a file-like object with makefile() shouldn't # delay closing the underlying "real socket" (here tested with its # file descriptor, hence skipping the test under Windows). - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ss = ssl.wrap_socket(socket.socket(socket.AF_INET)) - ss.connect(("svn.python.org", 443)) + ss.connect((REMOTE_HOST, 443)) fd = ss.fileno() f = ss.makefile() f.close() @@ -1415,9 +1420,9 @@ self.assertEqual(e.exception.errno, errno.EBADF) def test_non_blocking_handshake(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = socket.socket(socket.AF_INET) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) s.setblocking(False) s = ssl.wrap_socket(s, cert_reqs=ssl.CERT_NONE, @@ -1460,12 +1465,12 @@ if support.verbose: sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem)) - _test_get_server_certificate('svn.python.org', 443, SVN_PYTHON_ORG_ROOT_CERT) + _test_get_server_certificate(REMOTE_HOST, 443, REMOTE_ROOT_CERT) if support.IPV6_ENABLED: _test_get_server_certificate('ipv6.google.com', 443) def test_ciphers(self): - remote = ("svn.python.org", 443) + remote = (REMOTE_HOST, 443) with support.transient_internet(remote[0]): with closing(ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_NONE, ciphers="ALL")) as s: @@ -1510,13 +1515,13 @@ def test_get_ca_certs_capath(self): # capath certs are loaded on request - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=CAPATH) self.assertEqual(ctx.get_ca_certs(), []) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1527,12 +1532,12 @@ @needs_sni def test_context_setget(self): # Check that the context of a connected socket can be replaced. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx1 = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ctx2 = ssl.SSLContext(ssl.PROTOCOL_SSLv23) s = socket.socket(socket.AF_INET) with closing(ctx1.wrap_socket(s)) as ss: - ss.connect(("svn.python.org", 443)) + ss.connect((REMOTE_HOST, 443)) self.assertIs(ss.context, ctx1) self.assertIs(ss._sslobj.context, ctx1) ss.context = ctx2 @@ -3026,7 +3031,7 @@ pass for filename in [ - CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, BYTES_CERTFILE, + CERTFILE, REMOTE_ROOT_CERT, BYTES_CERTFILE, ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY, SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA, BADCERT, BADKEY, EMPTYCERT]: diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.4.0 +Version: 1.5.0 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.4.0" -__version_info__ = (1, 4, 0) +__version__ = "1.5.0" +__version_info__ = (1, 5, 0) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -146,8 +146,9 @@ ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) #define _cffi_convert_array_from_object \ ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 #define _cffi_call_python \ - ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[25]) + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) #define _CFFI_NUM_EXPORTS 26 typedef struct _ctypedescr CTypeDescrObject; @@ -206,7 +207,8 @@ /********** end CPython-specific section **********/ #else _CFFI_UNUSED_FN -static void (*_cffi_call_python)(struct _cffi_externpy_s *, char *); +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org #endif diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -74,6 +74,7 @@ self._windows_unicode = None self._init_once_cache = {} self._cdef_version = None + self._embedding = None if hasattr(backend, 'set_ffi'): backend.set_ffi(self) for name in backend.__dict__: @@ -101,13 +102,21 @@ If 'packed' is specified as True, all structs declared inside this cdef are packed, i.e. laid out without any field alignment at all. """ + self._cdef(csource, override=override, packed=packed) + + def embedding_api(self, csource, packed=False): + self._cdef(csource, packed=packed, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): if not isinstance(csource, str): # unicode, on Python 2 if not isinstance(csource, basestring): raise TypeError("cdef() argument must be a string") csource = csource.encode('ascii') with self._lock: self._cdef_version = object() - self._parser.parse(csource, override=override, packed=packed) + self._parser.parse(csource, override=override, **options) self._cdefsources.append(csource) if override: for cache in self._function_caches: @@ -533,6 +542,31 @@ ('_UNICODE', '1')] kwds['define_macros'] = defmacros + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + if '__pypy__' in sys.builtin_module_names: + if hasattr(sys, 'prefix'): + import os + libdir = os.path.join(sys.prefix, 'bin') + dirs = kwds.setdefault('library_dirs', []) + if libdir not in dirs: + dirs.append(libdir) + pythonlib = "pypy-c" + else: + if sys.platform == "win32": + template = "python%d%d" + if sys.flags.debug: + template = template + '_d' + else: + template = "python%d.%d" + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + libraries = kwds.setdefault('libraries', []) + if pythonlib not in libraries: + libraries.append(pythonlib) + def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): raise ValueError("set_source() cannot be called several times " @@ -592,14 +626,23 @@ recompile(self, module_name, source, c_file=filename, call_c_compiler=False, **kwds) - def compile(self, tmpdir='.', verbose=0): + def compile(self, tmpdir='.', verbose=0, target=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ from .recompiler import recompile # if not hasattr(self, '_assigned_source'): raise ValueError("set_source() must be called before compile()") module_name, source, source_extension, kwds = self._assigned_source return recompile(self, module_name, source, tmpdir=tmpdir, - source_extension=source_extension, + target=target, source_extension=source_extension, compiler_verbose=verbose, **kwds) def init_once(self, func, tag): @@ -626,6 +669,32 @@ self._init_once_cache[tag] = (True, result) return result + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,8 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._override = False - self._packed = False + self._options = None self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -281,16 +280,15 @@ msg = 'parse error\n%s' % (msg,) raise api.CDefError(msg) - def parse(self, csource, override=False, packed=False): - prev_override = self._override - prev_packed = self._packed + def parse(self, csource, override=False, packed=False, dllexport=False): + prev_options = self._options try: - self._override = override - self._packed = packed + self._options = {'override': override, + 'packed': packed, + 'dllexport': dllexport} self._internal_parse(csource) finally: - self._override = prev_override - self._packed = prev_packed + self._options = prev_options def _internal_parse(self, csource): ast, macros, csource = self._parse(csource) @@ -376,10 +374,13 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._inside_extern_python: - self._declare('extern_python ' + decl.name, tp) + if self._options['dllexport']: + tag = 'dllexport_python ' + elif self._inside_extern_python: + tag = 'extern_python ' else: - self._declare('function ' + decl.name, tp) + tag = 'function ' + self._declare(tag + decl.name, tp) def _parse_decl(self, decl): node = decl.type @@ -449,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._override: + if not self._options['override']: raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -728,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._packed + tp.packed = self._options['packed'] if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,12 +21,14 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0): +def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, + embedding=False): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose) + outputfilename = _build(tmpdir, ext, compiler_verbose, + target_extension, embedding) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -36,7 +38,32 @@ os.environ[key] = value return outputfilename -def _build(tmpdir, ext, compiler_verbose=0): +def _save_val(name): + import distutils.sysconfig + config_vars = distutils.sysconfig.get_config_vars() + return config_vars.get(name, Ellipsis) + +def _restore_val(name, value): + import distutils.sysconfig + config_vars = distutils.sysconfig.get_config_vars() + config_vars[name] = value + if value is Ellipsis: + del config_vars[name] + +def _win32_hack_for_embedding(): + from distutils.msvc9compiler import MSVCCompiler + if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): + MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ + MSVCCompiler._remove_visual_c_ref + MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file + +def _win32_unhack_for_embedding(): + from distutils.msvc9compiler import MSVCCompiler + MSVCCompiler._remove_visual_c_ref = \ + MSVCCompiler._remove_visual_c_ref_CFFI_BAK + +def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, + embedding=False): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -49,18 +76,29 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: + if sys.platform == 'win32' and embedding: + _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 + old_SO = _save_val('SO') + old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: + if target_extension is not None: + _restore_val('SO', target_extension) + _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) + _restore_val('SO', old_SO) + _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) + if sys.platform == 'win32' and embedding: + _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) # - cmd_obj = dist.get_command_obj('build_ext') - [soname] = cmd_obj.get_outputs() return soname try: diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -3,6 +3,7 @@ from .cffi_opcode import * VERSION = "0x2601" +VERSION_EMBEDDED = "0x2701" class GlobalExpr: @@ -281,6 +282,29 @@ lines[i:i+1] = self._rel_readlines('parse_c_type.h') prnt(''.join(lines)) # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('#define _CFFI_PYTHON_STARTUP_CODE %s' % + (self._string_literal(self.ffi._embedding),)) + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + prnt(''.join(lines)) + version = VERSION_EMBEDDED + else: + version = VERSION + # # then paste the C source given by the user, verbatim. prnt('/************************************************************/') prnt() @@ -365,17 +389,16 @@ prnt() # # the init function - base_module_name = self.module_name.split('.')[-1] prnt('#ifdef PYPY_VERSION') prnt('PyMODINIT_FUNC') prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) prnt('{') if self._num_externpy: prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') - prnt(' _cffi_call_python = ' + prnt(' _cffi_call_python_org = ' '(void(*)(struct _cffi_externpy_s *, char *))p[1];') prnt(' }') - prnt(' p[0] = (const void *)%s;' % VERSION) + prnt(' p[0] = (const void *)%s;' % version) prnt(' p[1] = &_cffi_type_context;') prnt('}') # on Windows, distutils insists on putting init_cffi_xyz in @@ -394,14 +417,14 @@ prnt('PyInit_%s(void)' % (base_module_name,)) prnt('{') prnt(' return _cffi_init("%s", %s, &_cffi_type_context);' % ( - self.module_name, VERSION)) + self.module_name, version)) prnt('}') prnt('#else') prnt('PyMODINIT_FUNC') prnt('init%s(void)' % (base_module_name,)) prnt('{') prnt(' _cffi_init("%s", %s, &_cffi_type_context);' % ( - self.module_name, VERSION)) + self.module_name, version)) prnt('}') prnt('#endif') @@ -1123,7 +1146,10 @@ assert isinstance(tp, model.FunctionPtrType) self._do_collect_type(tp) - def _generate_cpy_extern_python_decl(self, tp, name): + def _generate_cpy_dllexport_python_collecttype(self, tp, name): + self._generate_cpy_extern_python_collecttype(tp, name) + + def _generate_cpy_extern_python_decl(self, tp, name, dllexport=False): prnt = self._prnt if isinstance(tp.result, model.VoidType): size_of_result = '0' @@ -1156,7 +1182,11 @@ size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( tp.result.get_c_name(''), size_of_a, tp.result.get_c_name(''), size_of_a) - prnt('static %s' % tp.result.get_c_name(name_and_arguments)) + if dllexport: + tag = 'CFFI_DLLEXPORT' + else: + tag = 'static' + prnt('%s %s' % (tag, tp.result.get_c_name(name_and_arguments))) prnt('{') prnt(' char a[%s];' % size_of_a) prnt(' char *p = a;') @@ -1174,6 +1204,9 @@ prnt() self._num_externpy += 1 + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._generate_cpy_extern_python_decl(tp, name, dllexport=True) + def _generate_cpy_extern_python_ctx(self, tp, name): if self.target_is_python: raise ffiplatform.VerificationError( @@ -1185,6 +1218,21 @@ self._lsts["global"].append( GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + def _generate_cpy_dllexport_python_ctx(self, tp, name): + self._generate_cpy_extern_python_ctx(tp, name) + + def _string_literal(self, s): + def _char_repr(c): + # escape with a '\' the characters '\', '"' or (for trigraphs) '?' + if c in '\\"?': return '\\' + c + if ' ' <= c < '\x7F': return c + if c == '\n': return '\\n' + return '\\%03o' % ord(c) + lines = [] + for line in s.splitlines(True): + lines.append('"%s"' % ''.join([_char_repr(c) for c in line])) + return ' \\\n'.join(lines) + # ---------- # emitting the opcodes for individual types @@ -1311,12 +1359,15 @@ def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, - compiler_verbose=1, **kwds): + compiler_verbose=1, target=None, **kwds): if not isinstance(module_name, str): module_name = module_name.encode('ascii') if ffi._windows_unicode: ffi._apply_windows_unicode(kwds) if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) if c_file is None: c_file, parts = _modname_to_file(tmpdir, module_name, source_extension) @@ -1325,13 +1376,40 @@ ext_c_file = os.path.join(*parts) else: ext_c_file = c_file - ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + if target == '*': + target_module_name = module_name + target_extension = None # use default + else: + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + else: + target += '.so' + # split along the first '.' (not the last one, otherwise the + # preceeding dots are interpreted as splitting package names) + index = target.find('.') + if index < 0: + raise ValueError("target argument %r should be a file name " + "containing a '.'" % (target,)) + target_module_name = target[:index] + target_extension = target[index:] + # + ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: cwd = os.getcwd() try: os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose, + target_extension, + embedding=embedding) finally: os.chdir(cwd) return outputfilename diff --git a/lib_pypy/greenlet.py b/lib_pypy/greenlet.py --- a/lib_pypy/greenlet.py +++ b/lib_pypy/greenlet.py @@ -203,7 +203,7 @@ try: if hasattr(_tls, 'trace'): _run_trace_callback('throw') - raise exc, value, tb + raise __pypy__.normalize_exc(exc, value, tb) except GreenletExit as e: res = e finally: diff --git a/pypy/doc/embedding.rst b/pypy/doc/embedding.rst --- a/pypy/doc/embedding.rst +++ b/pypy/doc/embedding.rst @@ -10,6 +10,15 @@ with a ``libpypy-c.so`` or ``pypy-c.dll`` file. This is the default in recent versions of PyPy. +.. note:: + + The interface described in this page is kept for backward compatibility. + From PyPy 4.1, it is recommended to use instead CFFI's `native embedding + support,`__ which gives a simpler approach that works on CPython as well + as PyPy. + +.. __: http://cffi.readthedocs.org/en/latest/embedding.html + The resulting shared library exports very few functions, however they are enough to accomplish everything you need, provided you follow a few principles. The API is: @@ -130,8 +139,13 @@ More complete example --------------------- -.. note:: This example depends on pypy_execute_source_ptr which is not available - in PyPy <= 2.2.1. +.. note:: Note that we do not make use of ``extern "Python"``, the new + way to do callbacks in CFFI 1.4: this is because these examples use + the ABI mode, not the API mode, and with the ABI mode you still have + to use ``ffi.callback()``. It is work in progress to integrate + ``extern "Python"`` with the idea of embedding (and it is expected + to ultimately lead to a better way to do embedding than the one + described here, and that would work equally well on CPython and PyPy). Typically we need something more to do than simply execute source. The following is a fully fledged example, please consult cffi documentation for details. diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst --- a/pypy/doc/faq.rst +++ b/pypy/doc/faq.rst @@ -54,7 +54,8 @@ It is quite common nowadays that xyz is available on PyPI_ and installable with ``pip install xyz``. The simplest solution is to `use virtualenv (as documented here)`_. Then enter (activate) the virtualenv -and type: ``pip install xyz``. +and type: ``pip install xyz``. If you don't know or don't want virtualenv, +you can also install ``pip`` globally by saying ``pypy -m ensurepip``. If you get errors from the C compiler, the module is a CPython C Extension module using unsupported features. `See below.`_ diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst --- a/pypy/doc/getting-started-dev.rst +++ b/pypy/doc/getting-started-dev.rst @@ -19,7 +19,9 @@ * Clone this new repo (i.e. the fork) to your local machine with the command ``hg clone ssh://hg at bitbucket.org/yourname/pypy``. It is a very slow - operation but only ever needs to be done once. If you already cloned + operation but only ever needs to be done once. See also + http://pypy.org/download.html#building-from-source . + If you already cloned ``https://bitbucket.org/pypy/pypy`` before, even if some time ago, then you can reuse the same clone by editing the file ``.hg/hgrc`` in your clone to contain the line ``default = diff --git a/pypy/doc/how-to-contribute.rst b/pypy/doc/how-to-contribute.rst --- a/pypy/doc/how-to-contribute.rst +++ b/pypy/doc/how-to-contribute.rst @@ -67,8 +67,8 @@ **module** directory contains extension modules written in RPython * **rpython compiler** that resides in ``rpython/annotator`` and - ``rpython/rtyper`` directories. Consult :doc:`introduction to RPython ` for - further reading + ``rpython/rtyper`` directories. Consult `Getting Started with RPython`_ + for further reading * **JIT generator** lives in ``rpython/jit`` directory. optimizations live in ``rpython/jit/metainterp/optimizeopt``, the main JIT in @@ -80,3 +80,14 @@ The rest of directories serve specific niche goal and are unlikely a good entry point. + + +More documentation +------------------ + +* `Getting Started Developing With PyPy`_ + +* `Getting Started with RPython`_ + +.. _`Getting Started Developing With PyPy`: getting-started-dev.html +.. _`Getting started with RPython`: http://rpython.readthedocs.org/en/latest/getting-started.html diff --git a/pypy/doc/stm.rst b/pypy/doc/stm.rst --- a/pypy/doc/stm.rst +++ b/pypy/doc/stm.rst @@ -83,30 +83,27 @@ **pypy-stm requires 64-bit Linux for now.** -Development is done in the branch `stmgc-c7`_. If you are only -interested in trying it out, you can download a Ubuntu binary here__ -(``pypy-stm-2.*.tar.bz2``, for Ubuntu 12.04-14.04). The current version -supports four "segments", which means that it will run up to four -threads in parallel. (Development recently switched to `stmgc-c8`_, -but that is not ready for trying out yet.) +Development is done in the branch `stmgc-c8`_. If you are only +interested in trying it out, please pester us until we upload a recent +prebuilt binary. The current version supports four "segments", which +means that it will run up to four threads in parallel. To build a version from sources, you first need to compile a custom -version of clang(!); we recommend downloading `llvm and clang like -described here`__, but at revision 201645 (use ``svn co -r 201645 `` -for all checkouts). Then apply all the patches in `this directory`__: -they are fixes for a clang-only feature that hasn't been used so heavily -in the past (without the patches, you get crashes of clang). Then get -the branch `stmgc-c7`_ of PyPy and run:: +version of gcc(!). See the instructions here: +https://bitbucket.org/pypy/stmgc/src/default/gcc-seg-gs/ +(Note that these patches are being incorporated into gcc. It is likely +that future versions of gcc will not need to be patched any more.) + +Then get the branch `stmgc-c8`_ of PyPy and run:: cd pypy/goal ../../rpython/bin/rpython -Ojit --stm - PYTHONPATH=. ./pypy-c pypy/tool/build_cffi_imports.py -.. _`stmgc-c7`: https://bitbucket.org/pypy/pypy/src/stmgc-c7/ +At the end, this will try to compile the generated C code by calling +``gcc-seg-gs``, which must be the script you installed in the +instructions above. + .. _`stmgc-c8`: https://bitbucket.org/pypy/pypy/src/stmgc-c8/ -.. __: https://bitbucket.org/pypy/pypy/downloads/ -.. __: http://clang.llvm.org/get_started.html -.. __: https://bitbucket.org/pypy/stmgc/src/default/c7/llvmfix/ .. _caveats: @@ -114,6 +111,12 @@ Current status (stmgc-c7) ------------------------- +.. warning:: + + THIS PAGE IS OLD, THE REST IS ABOUT STMGC-C7 WHEREAS THE CURRENT + DEVELOPMENT WORK IS DONE ON STMGC-C8 + + * **NEW:** It seems to work fine, without crashing any more. Please `report any crash`_ you find (or other bugs). diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -5,6 +5,8 @@ .. this is a revision shortly after release-4.0.1 .. startrev: 4b5c840d0da2 +Fixed ``_PyLong_FromByteArray()``, which was buggy. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy @@ -44,6 +46,9 @@ .. branch: fix-setslice-can-resize +Make rlist's ll_listsetslice() able to resize the target list to help +simplify objspace/std/listobject.py. Was issue #2196. + .. branch: anntype2 A somewhat random bunch of changes and fixes following up on branch 'anntype'. Highlights: @@ -73,3 +78,64 @@ Move wrappers for OS functions from `rpython/rtyper` to `rpython/rlib` and turn them into regular RPython functions. Most RPython-compatible `os.*` functions are now directly accessible as `rpython.rposix.*`. + +.. branch: always-enable-gil + +Simplify a bit the GIL handling in non-jitted code. Fixes issue #2205. + +.. branch: flowspace-cleanups + +Trivial cleanups in flowspace.operation : fix comment & duplicated method + +.. branch: test-AF_NETLINK + +Add a test for pre-existing AF_NETLINK support. Was part of issue #1942. + +.. branch: small-cleanups-misc + +Trivial misc cleanups: typo, whitespace, obsolete comments + +.. branch: cpyext-slotdefs +.. branch: fix-missing-canraise +.. branch: whatsnew + +.. branch: fix-2211 + +Fix the cryptic exception message when attempting to use extended slicing +in rpython. Was issue #2211. + +.. branch: ec-keepalive + +Optimize the case where, in a new C-created thread, we keep invoking +short-running Python callbacks. (CFFI on CPython has a hack to achieve +the same result.) This can also be seen as a bug fix: previously, +thread-local objects would be reset between two such calls. + +.. branch: globals-quasiimmut + +Optimize global lookups. + +.. branch: cffi-static-callback-embedding + +Updated to CFFI 1.5, which supports a new way to do embedding. +Deprecates http://pypy.readthedocs.org/en/latest/embedding.html. + +.. branch: fix-cpython-ssl-tests-2.7 + +Fix SSL tests by importing cpython's patch + +.. branch: remove-getfield-pure + +Remove pure variants of ``getfield_gc_*`` operations from the JIT. Relevant +optimizations instead consult the field descriptor to determine the purity of +the operation. Additionally, pure ``getfield`` operations are now handled +entirely by `rpython/jit/metainterp/optimizeopt/heap.py` rather than +`rpython/jit/metainterp/optimizeopt/pure.py`, which can result in better codegen +for traces containing a large number of pure getfield operations. + +.. branch: exctrans + +Try to ensure that no new functions get annotated during the 'source_c' phase. +Refactor sandboxing to operate at a higher level. + +.. branch: cpyext-bootstrap diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -91,13 +91,6 @@ from rpython.rlib.entrypoint import entrypoint_highlevel from rpython.rtyper.lltypesystem import rffi, lltype - w_pathsetter = space.appexec([], """(): - def f(path): - import sys - sys.path[:] = path - return f - """) - @entrypoint_highlevel('main', [rffi.CCHARP, rffi.INT], c_name='pypy_setup_home') def pypy_setup_home(ll_home, verbose): @@ -116,7 +109,10 @@ " not found in '%s' or in any parent directory" % home1) return rffi.cast(rffi.INT, 1) space.startup() - space.call_function(w_pathsetter, w_path) + space.appexec([w_path], """(path): + import sys + sys.path[:] = path + """) # import site try: space.setattr(space.getbuiltinmodule('sys'), @@ -156,6 +152,9 @@ return os_thread.setup_threads(space) os_thread.bootstrapper.acquire(space, None, None) + # XXX this doesn't really work. Don't use os.fork(), and + # if your embedder program uses fork(), don't use any PyPy + # code in the fork rthread.gc_thread_start() os_thread.bootstrapper.nbthreads += 1 os_thread.bootstrapper.release() diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py --- a/pypy/interpreter/astcompiler/test/test_compiler.py +++ b/pypy/interpreter/astcompiler/test/test_compiler.py @@ -1019,6 +1019,8 @@ def test_dont_fold_equal_code_objects(self): yield self.st, "f=lambda:1;g=lambda:1.0;x=g()", 'type(x)', float + yield (self.st, "x=(lambda: (-0.0, 0.0), lambda: (0.0, -0.0))[1]()", + 'repr(x)', '(0.0, -0.0)') def test_raise_from(self): test = """if 1: diff --git a/pypy/interpreter/eval.py b/pypy/interpreter/eval.py --- a/pypy/interpreter/eval.py +++ b/pypy/interpreter/eval.py @@ -9,8 +9,8 @@ class Code(W_Root): """A code is a compiled version of some source code. Abstract base class.""" - _immutable_ = True hidden_applevel = False + _immutable_fields_ = ['co_name', 'fast_natural_arity', 'hidden_applevel'] # n >= 0 : arity # FLATPYCALL = 0x100 diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py --- a/pypy/interpreter/pycode.py +++ b/pypy/interpreter/pycode.py @@ -76,10 +76,12 @@ class PyCode(eval.Code): "CPython-style code objects." - _immutable_ = True - _immutable_fields_ = ["co_consts_w[*]", "co_names_w[*]", "co_varnames[*]", - "co_freevars[*]", "co_cellvars[*]", - "_args_as_cellvars[*]"] + _immutable_fields_ = ["_signature", "co_argcount", "co_kwonlyargcount", "co_cellvars[*]", + "co_code", "co_consts_w[*]", "co_filename", + "co_firstlineno", "co_flags", "co_freevars[*]", + "co_lnotab", "co_names_w[*]", "co_nlocals", + "co_stacksize", "co_varnames[*]", + "_args_as_cellvars[*]", "w_globals?"] def __init__(self, space, argcount, kwonlyargcount, nlocals, stacksize, flags, code, consts, names, varnames, filename, @@ -108,6 +110,10 @@ self.co_name = name self.co_firstlineno = firstlineno self.co_lnotab = lnotab + # store the first globals object that the code object is run in in + # here. if a frame is run in that globals object, it does not need to + # store it at all + self.w_globals = None self.hidden_applevel = hidden_applevel self.magic = magic self._signature = cpython_code_signature(self) @@ -115,6 +121,14 @@ self._init_ready() self.new_code_hook() + def frame_stores_global(self, w_globals): + if self.w_globals is None: + self.w_globals = w_globals + return False + if self.w_globals is w_globals: + return False + return True + def new_code_hook(self): code_hook = self.space.fromcache(CodeHookCache)._code_hook if code_hook is not None: diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py --- a/pypy/interpreter/pyframe.py +++ b/pypy/interpreter/pyframe.py @@ -36,6 +36,7 @@ def __init__(self, pycode): self.f_lineno = pycode.co_firstlineno + self.w_globals = pycode.w_globals class PyFrame(W_Root): """Represents a frame for a regular Python function @@ -67,7 +68,6 @@ escaped = False # see mark_as_escaped() debugdata = None - w_globals = None pycode = None # code object executed by that frame locals_cells_stack_w = None # the list of all locals, cells and the valuestack valuestackdepth = 0 # number of items on valuestack @@ -90,8 +90,9 @@ self = hint(self, access_directly=True, fresh_virtualizable=True) assert isinstance(code, pycode.PyCode) self.space = space - self.w_globals = w_globals self.pycode = code + if code.frame_stores_global(w_globals): + self.getorcreatedebug().w_globals = w_globals ncellvars = len(code.co_cellvars) nfreevars = len(code.co_freevars) size = code.co_nlocals + ncellvars + nfreevars + code.co_stacksize @@ -116,6 +117,12 @@ self.debugdata = FrameDebugData(self.pycode) return self.debugdata + def get_w_globals(self): + debugdata = self.getdebug() + if debugdata is not None: + return debugdata.w_globals + return jit.promote(self.pycode).w_globals + def get_w_f_trace(self): d = self.getdebug() if d is None: @@ -201,8 +208,9 @@ if flags & pycode.CO_NEWLOCALS: self.getorcreatedebug().w_locals = self.space.newdict(module=True) else: - assert self.w_globals is not None - self.getorcreatedebug().w_locals = self.w_globals + w_globals = self.get_w_globals() + assert w_globals is not None + self.getorcreatedebug().w_locals = w_globals ncellvars = len(code.co_cellvars) nfreevars = len(code.co_freevars) @@ -449,7 +457,7 @@ w_blockstack, w_exc_value, # last_exception w_tb, # - self.w_globals, + self.get_w_globals(), w(self.last_instr), w(self.frame_finished_execution), w(f_lineno), @@ -658,6 +666,11 @@ def fget_getdictscope(self, space): return self.getdictscope() + def fget_w_globals(self, space): + # bit silly, but GetSetProperty passes a space + return self.get_w_globals() + + ### line numbers ### def fget_f_lineno(self, space): diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py --- a/pypy/interpreter/pyopcode.py +++ b/pypy/interpreter/pyopcode.py @@ -832,16 +832,16 @@ def STORE_GLOBAL(self, nameindex, next_instr): varname = self.getname_u(nameindex) w_newvalue = self.popvalue() - self.space.setitem_str(self.w_globals, varname, w_newvalue) + self.space.setitem_str(self.get_w_globals(), varname, w_newvalue) def DELETE_GLOBAL(self, nameindex, next_instr): w_varname = self.getname_w(nameindex) - self.space.delitem(self.w_globals, w_varname) + self.space.delitem(self.get_w_globals(), w_varname) def LOAD_NAME(self, nameindex, next_instr): w_varname = self.getname_w(nameindex) varname = self.space.identifier_w(w_varname) - if self.getorcreatedebug().w_locals is not self.w_globals: + if self.getorcreatedebug().w_locals is not self.get_w_globals(): w_value = self.space.finditem_str(self.getorcreatedebug().w_locals, varname) if w_value is not None: @@ -855,7 +855,7 @@ self.pushvalue(w_value) def _load_global(self, varname): - w_value = self.space.finditem_str(self.w_globals, varname) + w_value = self.space.finditem_str(self.get_w_globals(), varname) if w_value is None: # not in the globals, now look in the built-ins w_value = self.get_builtin().getdictvalue(self.space, varname) @@ -986,7 +986,7 @@ w_locals = d.w_locals if w_locals is None: # CPython does this w_locals = space.w_None - w_globals = self.w_globals + w_globals = self.get_w_globals() if w_flag is None: w_obj = space.call_function(w_import, w_modulename, w_globals, w_locals, w_fromlist) @@ -1251,7 +1251,7 @@ w_name = self.popvalue() w_def = self.popvalue() space.setitem(w_kw_defs, w_def, w_name) - fn = function.Function(space, codeobj, self.w_globals, defaultarguments, + fn = function.Function(space, codeobj, self.get_w_globals(), defaultarguments, w_kw_defs, freevars, w_ann, qualname=qualname) self.pushvalue(space.wrap(fn)) @@ -1638,7 +1638,7 @@ if space.is_none(w_locals): w_locals = w_globals else: - w_globals = caller.w_globals + w_globals = caller.get_w_globals() if space.is_none(w_locals): w_locals = caller.getdictscope() elif space.is_none(w_locals): diff --git a/pypy/interpreter/test/test_pyframe.py b/pypy/interpreter/test/test_pyframe.py --- a/pypy/interpreter/test/test_pyframe.py +++ b/pypy/interpreter/test/test_pyframe.py @@ -34,6 +34,7 @@ import sys f = sys._getframe() assert f.f_globals is globals() + raises(AttributeError, "f.f_globals = globals()") def test_f_builtins(self): import sys, builtins diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -790,7 +790,7 @@ f_restricted = GetSetProperty(PyFrame.fget_f_restricted), f_code = GetSetProperty(PyFrame.fget_code), f_locals = GetSetProperty(PyFrame.fget_getdictscope), - f_globals = interp_attrproperty_w('w_globals', cls=PyFrame), + f_globals = GetSetProperty(PyFrame.fget_w_globals), ) assert not PyFrame.typedef.acceptable_as_base_class # no __new__ diff --git a/pypy/module/__builtin__/interp_inspect.py b/pypy/module/__builtin__/interp_inspect.py --- a/pypy/module/__builtin__/interp_inspect.py +++ b/pypy/module/__builtin__/interp_inspect.py @@ -2,7 +2,7 @@ def globals(space): "Return the dictionary containing the current scope's global variables." ec = space.getexecutioncontext() - return ec.gettopframe_nohidden().w_globals + return ec.gettopframe_nohidden().get_w_globals() def locals(space): """Return a dictionary containing the current scope's local variables. diff --git a/pypy/module/__pypy__/__init__.py b/pypy/module/__pypy__/__init__.py --- a/pypy/module/__pypy__/__init__.py +++ b/pypy/module/__pypy__/__init__.py @@ -87,6 +87,7 @@ 'set_code_callback' : 'interp_magic.set_code_callback', 'save_module_content_for_future_reload': 'interp_magic.save_module_content_for_future_reload', + 'decode_long' : 'interp_magic.decode_long', 'normalize_exc' : 'interp_magic.normalize_exc', 'StdErrPrinter' : 'interp_stderrprinter.W_StdErrPrinter', } diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py --- a/pypy/module/__pypy__/interp_magic.py +++ b/pypy/module/__pypy__/interp_magic.py @@ -1,4 +1,4 @@ -from pypy.interpreter.error import OperationError, wrap_oserror +from pypy.interpreter.error import OperationError, oefmt, wrap_oserror from pypy.interpreter.gateway import WrappedDefault, unwrap_spec from pypy.interpreter.pycode import CodeHookCache from pypy.interpreter.pyframe import PyFrame @@ -83,7 +83,7 @@ Return the underlying strategy currently used by a dict, list or set object """ if isinstance(w_obj, W_DictMultiObject): - name = w_obj.strategy.__class__.__name__ + name = w_obj.get_strategy().__class__.__name__ elif isinstance(w_obj, W_ListObject): name = w_obj.strategy.__class__.__name__ elif isinstance(w_obj, W_BaseSetObject): @@ -139,6 +139,15 @@ else: cache._code_hook = w_callable + at unwrap_spec(string=str, byteorder=str, signed=int) +def decode_long(space, string, byteorder='little', signed=1): + from rpython.rlib.rbigint import rbigint, InvalidEndiannessError + try: + result = rbigint.frombytes(string, byteorder, bool(signed)) + except InvalidEndiannessError: + raise oefmt(space.w_ValueError, "invalid byteorder argument") + return space.newlong_from_rbigint(result) + @unwrap_spec(w_value=WrappedDefault(None), w_tb=WrappedDefault(None)) def normalize_exc(space, w_type, w_value=None, w_tb=None): operr = OperationError(w_type, w_value, w_tb) diff --git a/pypy/module/__pypy__/test/test_magic.py b/pypy/module/__pypy__/test/test_magic.py --- a/pypy/module/__pypy__/test/test_magic.py +++ b/pypy/module/__pypy__/test/test_magic.py @@ -24,10 +24,26 @@ __pypy__.set_code_callback(callable) d = {} try: - exec """ + exec(""" def f(): pass -""" in d +""", d) finally: __pypy__.set_code_callback(None) - assert d['f'].__code__ in l \ No newline at end of file + assert d['f'].__code__ in l + + def test_decode_long(self): + from __pypy__ import decode_long + assert decode_long(b'') == 0 + assert decode_long(b'\xff\x00') == 255 + assert decode_long(b'\xff\x7f') == 32767 + assert decode_long(b'\x00\xff') == -256 + assert decode_long(b'\x00\x80') == -32768 + assert decode_long(b'\x80') == -128 + assert decode_long(b'\x7f') == 127 + assert decode_long(b'\x55' * 97) == (1 << (97 * 8)) // 3 + assert decode_long(b'\x00\x80', 'big') == 128 + assert decode_long(b'\xff\x7f', 'little', False) == 32767 + assert decode_long(b'\x00\x80', 'little', False) == 32768 + assert decode_long(b'\x00\x80', 'little', True) == -32768 + raises(ValueError, decode_long, '', 'foo') diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -1,8 +1,9 @@ import sys from pypy.interpreter.mixedmodule import MixedModule -from rpython.rlib import rdynload, clibffi +from rpython.rlib import rdynload, clibffi, entrypoint +from rpython.rtyper.lltypesystem import rffi -VERSION = "1.4.0" +VERSION = "1.5.0" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -65,6 +66,10 @@ if has_stdcall: interpleveldefs['FFI_STDCALL'] = 'space.wrap(%d)' % FFI_STDCALL + def startup(self, space): + from pypy.module._cffi_backend import embedding + embedding.glob.space = space + def get_dict_rtld_constants(): found = {} @@ -78,3 +83,11 @@ for _name, _value in get_dict_rtld_constants().items(): Module.interpleveldefs[_name] = 'space.wrap(%d)' % _value + + +# write this entrypoint() here, to make sure it is registered early enough + at entrypoint.entrypoint_highlevel('main', [rffi.INT, rffi.VOIDP], + c_name='pypy_init_embedded_cffi_module') +def pypy_init_embedded_cffi_module(version, init_struct): + from pypy.module._cffi_backend import embedding + return embedding.pypy_init_embedded_cffi_module(version, init_struct) diff --git a/pypy/module/_cffi_backend/call_python.py b/pypy/module/_cffi_backend/call_python.py --- a/pypy/module/_cffi_backend/call_python.py +++ b/pypy/module/_cffi_backend/call_python.py @@ -40,10 +40,9 @@ at least 8 bytes in size. """ from pypy.module._cffi_backend.ccallback import reveal_callback + from rpython.rlib import rgil - after = rffi.aroundstate.after - if after: - after() + rgil.acquire() rffi.stackcounter.stacks_counter += 1 llop.gc_stack_bottom(lltype.Void) # marker for trackgcroot.py @@ -71,9 +70,7 @@ cerrno._errno_before(rffi.RFFI_ERR_ALL | rffi.RFFI_ALT_ERRNO) rffi.stackcounter.stacks_counter -= 1 - before = rffi.aroundstate.before - if before: - before() + rgil.release() def get_ll_cffi_call_python(): diff --git a/pypy/module/_cffi_backend/cffi1_module.py b/pypy/module/_cffi_backend/cffi1_module.py --- a/pypy/module/_cffi_backend/cffi1_module.py +++ b/pypy/module/_cffi_backend/cffi1_module.py @@ -2,24 +2,25 @@ from pypy.interpreter.error import oefmt from pypy.interpreter.module import Module +from pypy.module import _cffi_backend from pypy.module._cffi_backend import parse_c_type from pypy.module._cffi_backend.ffi_obj import W_FFIObject from pypy.module._cffi_backend.lib_obj import W_LibObject VERSION_MIN = 0x2601 -VERSION_MAX = 0x26FF +VERSION_MAX = 0x27FF VERSION_EXPORT = 0x0A03 -initfunctype = lltype.Ptr(lltype.FuncType([rffi.VOIDPP], lltype.Void)) +INITFUNCPTR = lltype.Ptr(lltype.FuncType([rffi.VOIDPP], lltype.Void)) def load_cffi1_module(space, name, path, initptr): # This is called from pypy.module.cpyext.api.load_extension_module() from pypy.module._cffi_backend.call_python import get_ll_cffi_call_python - initfunc = rffi.cast(initfunctype, initptr) + initfunc = rffi.cast(INITFUNCPTR, initptr) with lltype.scoped_alloc(rffi.VOIDPP.TO, 16, zero=True) as p: p[0] = rffi.cast(rffi.VOIDP, VERSION_EXPORT) p[1] = rffi.cast(rffi.VOIDP, get_ll_cffi_call_python()) @@ -27,8 +28,10 @@ version = rffi.cast(lltype.Signed, p[0]) if not (VERSION_MIN <= version <= VERSION_MAX): raise oefmt(space.w_ImportError, - "cffi extension module '%s' has unknown version %s", - name, hex(version)) + "cffi extension module '%s' uses an unknown version tag %s. " + "This module might need a more recent version of PyPy. " + "The current PyPy provides CFFI %s.", + name, hex(version), _cffi_backend.VERSION) src_ctx = rffi.cast(parse_c_type.PCTX, p[1]) ffi = W_FFIObject(space, src_ctx) @@ -38,7 +41,8 @@ w_name = space.wrap(name) module = Module(space, w_name) - module.setdictvalue(space, '__file__', space.wrap(path)) + if path is not None: + module.setdictvalue(space, '__file__', space.wrap(path)) module.setdictvalue(space, 'ffi', space.wrap(ffi)) module.setdictvalue(space, 'lib', space.wrap(lib)) w_modules_dict = space.sys.get('modules') diff --git a/pypy/module/_cffi_backend/cglob.py b/pypy/module/_cffi_backend/cglob.py --- a/pypy/module/_cffi_backend/cglob.py +++ b/pypy/module/_cffi_backend/cglob.py @@ -3,6 +3,7 @@ from pypy.interpreter.typedef import TypeDef from pypy.module._cffi_backend.cdataobj import W_CData from pypy.module._cffi_backend import newtype +from rpython.rlib import rgil from rpython.rlib.objectmodel import we_are_translated from rpython.rtyper.lltypesystem import lltype, rffi from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -26,7 +27,9 @@ if not we_are_translated(): FNPTR = rffi.CCallback([], rffi.VOIDP) fetch_addr = rffi.cast(FNPTR, self.fetch_addr) + rgil.release() result = fetch_addr() + rgil.acquire() else: # careful in translated versions: we need to call fetch_addr, # but in a GIL-releasing way. The easiest is to invoke a diff --git a/pypy/module/_cffi_backend/ctypefunc.py b/pypy/module/_cffi_backend/ctypefunc.py --- a/pypy/module/_cffi_backend/ctypefunc.py +++ b/pypy/module/_cffi_backend/ctypefunc.py @@ -423,7 +423,9 @@ exchange_offset += rffi.getintfield(self.atypes[i], 'c_size') # store the exchange data size - cif_descr.exchange_size = exchange_offset + # we also align it to the next multiple of 8, in an attempt to + # work around bugs(?) of libffi (see cffi issue #241) + cif_descr.exchange_size = self.align_arg(exchange_offset) def fb_extra_fields(self, cif_descr): cif_descr.abi = self.fabi diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py new file mode 100644 --- /dev/null +++ b/pypy/module/_cffi_backend/embedding.py @@ -0,0 +1,146 @@ +import os +from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.translator.tool.cbuild import ExternalCompilationInfo + +from pypy.interpreter.error import OperationError, oefmt + +# ____________________________________________________________ + + +EMBED_VERSION_MIN = 0xB011 +EMBED_VERSION_MAX = 0xB0FF + +STDERR = 2 +INITSTRUCTPTR = lltype.Ptr(lltype.Struct('CFFI_INIT', + ('name', rffi.CCHARP), + ('func', rffi.VOIDP), + ('code', rffi.CCHARP))) + +def load_embedded_cffi_module(space, version, init_struct): + from pypy.module._cffi_backend.cffi1_module import load_cffi1_module + declare_c_function() # translation-time hint only: + # declare _cffi_carefully_make_gil() + # + version = rffi.cast(lltype.Signed, version) + if not (EMBED_VERSION_MIN <= version <= EMBED_VERSION_MAX): + raise oefmt(space.w_ImportError, + "cffi embedded module has got unknown version tag %s", + hex(version)) + # + if space.config.objspace.usemodules.thread: + from pypy.module.thread import os_thread + os_thread.setup_threads(space) + # + name = rffi.charp2str(init_struct.name) + load_cffi1_module(space, name, None, init_struct.func) + code = rffi.charp2str(init_struct.code) + compiler = space.createcompiler() + pycode = compiler.compile(code, "" % name, 'exec', 0) + w_globals = space.newdict(module=True) + space.setitem_str(w_globals, "__builtins__", space.wrap(space.builtin)) + pycode.exec_code(space, w_globals, w_globals) + + +class Global: + pass +glob = Global() + +def pypy_init_embedded_cffi_module(version, init_struct): + # called from __init__.py + name = "?" + try: + init_struct = rffi.cast(INITSTRUCTPTR, init_struct) + name = rffi.charp2str(init_struct.name) + # + space = glob.space + must_leave = False + try: + must_leave = space.threadlocals.try_enter_thread(space) + load_embedded_cffi_module(space, version, init_struct) + res = 0 + except OperationError, operr: + operr.write_unraisable(space, "initialization of '%s'" % name, + with_traceback=True) + space.appexec([], r"""(): + import sys + sys.stderr.write('pypy version: %s.%s.%s\n' % + sys.pypy_version_info[:3]) + sys.stderr.write('sys.path: %r\n' % (sys.path,)) + """) + res = -1 + if must_leave: + space.threadlocals.leave_thread(space) + except Exception, e: + # oups! last-level attempt to recover. + try: + os.write(STDERR, "From initialization of '") + os.write(STDERR, name) + os.write(STDERR, "':\n") + os.write(STDERR, str(e)) + os.write(STDERR, "\n") + except: + pass + res = -1 + return rffi.cast(rffi.INT, res) + +# ____________________________________________________________ + + +eci = ExternalCompilationInfo(separate_module_sources=[ +r""" +/* XXX Windows missing */ +#include +#include +#include + +RPY_EXPORTED void rpython_startup_code(void); +RPY_EXPORTED int pypy_setup_home(char *, int); + +static unsigned char _cffi_ready = 0; +static const char *volatile _cffi_module_name; + +static void _cffi_init_error(const char *msg, const char *extra) +{ + fprintf(stderr, + "\nPyPy initialization failure when loading module '%s':\n%s%s\n", + _cffi_module_name, msg, extra); +} + +static void _cffi_init(void) +{ + Dl_info info; + char *home; + + rpython_startup_code(); + RPyGilAllocate(); + + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return; + } + home = realpath(info.dli_fname, NULL); + if (pypy_setup_home(home, 1) != 0) { + _cffi_init_error("pypy_setup_home() failed", ""); + return; + } + _cffi_ready = 1; +} + +RPY_EXPORTED +int pypy_carefully_make_gil(const char *name) +{ + /* For CFFI: this initializes the GIL and loads the home path. + It can be called completely concurrently from unrelated threads. + It assumes that we don't hold the GIL before (if it exists), and we + don't hold it afterwards. + */ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + + _cffi_module_name = name; /* not really thread-safe, but better than + nothing */ + pthread_once(&once_control, _cffi_init); + return (int)_cffi_ready - 1; +} +"""]) + +declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.4.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): From pypy.commits at gmail.com Tue Feb 2 14:38:50 2016 From: pypy.commits at gmail.com (rlamy) Date: Tue, 02 Feb 2016 11:38:50 -0800 (PST) Subject: [pypy-commit] pypy default: Make apptests more 3-friendly Message-ID: <56b105ca.41df1c0a.a0854.5414@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82049:babeead98057 Date: 2016-02-02 17:10 +0000 http://bitbucket.org/pypy/pypy/changeset/babeead98057/ Log: Make apptests more 3-friendly diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -102,7 +102,7 @@ fd = space.appexec([w_socket, space.wrap(orig_fd.fileno()), space.wrap(socket.AF_INET), space.wrap(socket.SOCK_STREAM), space.wrap(0)], - """(_socket, fd, family, type, proto): + """(_socket, fd, family, type, proto): return _socket.fromfd(fd, family, type, proto)""") assert space.unwrap(space.call_method(fd, 'fileno')) @@ -326,7 +326,7 @@ def test_ntoa_exception(self): import _socket - raises(_socket.error, _socket.inet_ntoa, "ab") + raises(_socket.error, _socket.inet_ntoa, b"ab") def test_aton_exceptions(self): import _socket @@ -418,7 +418,7 @@ # it if there is no connection. try: s.connect(("www.python.org", 80)) - except _socket.gaierror, ex: + except _socket.gaierror as ex: skip("GAIError - probably no connection: %s" % str(ex.args)) name = s.getpeername() # Will raise socket.error if not connected assert name[1] == 80 @@ -465,7 +465,7 @@ sizes = {socket.htonl: 32, socket.ntohl: 32, socket.htons: 16, socket.ntohs: 16} for func, size in sizes.items(): - mask = (1L< Author: Richard Plangger Branch: s390x-backend Changeset: r82050:067d1f873658 Date: 2016-02-03 08:24 +0100 http://bitbucket.org/pypy/pypy/changeset/067d1f873658/ Log: replaced tab with 8 spaces diff --git a/rpython/memory/gc/test/test_env.py b/rpython/memory/gc/test/test_env.py --- a/rpython/memory/gc/test/test_env.py +++ b/rpython/memory/gc/test/test_env.py @@ -168,7 +168,7 @@ vendor_id : IBM/S390 # processors : 2 bogomips per cpu: 20325.00 -features : esan3 zarch stfle msa ldisp eimm dfp etf3eh highgprs +features : esan3 zarch stfle msa ldisp eimm dfp etf3eh highgprs cache0 : level=1 type=Data scope=Private size=128K line_size=256 associativity=8 cache1 : level=1 type=Instruction scope=Private size=96K line_size=256 associativity=6 cache2 : level=2 type=Data scope=Private size=2048K line_size=256 associativity=8 From pypy.commits at gmail.com Wed Feb 3 02:35:55 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 02 Feb 2016 23:35:55 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: removed checkpoint/serialization points, zarch is sequentially consistent. some small test fixes Message-ID: <56b1addb.46bb1c0a.fb9dd.fffff2e8@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82051:01a7007c80c5 Date: 2016-02-03 08:35 +0100 http://bitbucket.org/pypy/pypy/changeset/01a7007c80c5/ Log: removed checkpoint/serialization points, zarch is sequentially consistent. some small test fixes diff --git a/pypy/module/_rawffi/test/test_struct.py b/pypy/module/_rawffi/test/test_struct.py --- a/pypy/module/_rawffi/test/test_struct.py +++ b/pypy/module/_rawffi/test/test_struct.py @@ -1,4 +1,4 @@ - +import sys from pypy.module._rawffi.structure import size_alignment_pos from pypy.module._rawffi.interp_rawffi import TYPEMAP, letter2tp @@ -63,4 +63,7 @@ for (name, t, size) in fields]) assert size == 8 assert pos == [0, 0, 0] - assert bitsizes == [0x10000, 0x3e0001, 0x1003f] + if sys.byteorder == 'little': + assert bitsizes == [0x10000, 0x3e0001, 0x1003f] + else: + assert bitsizes == [0x1003f, 0x3e0001, 0x10000] diff --git a/rpython/jit/backend/zarch/callbuilder.py b/rpython/jit/backend/zarch/callbuilder.py --- a/rpython/jit/backend/zarch/callbuilder.py +++ b/rpython/jit/backend/zarch/callbuilder.py @@ -200,7 +200,7 @@ # change 'rpy_fastgil' to 0 (it should be non-zero right now) self.mc.load_imm(RFASTGILPTR, fastgil) self.mc.XGR(r.SCRATCH, r.SCRATCH) - self.mc.sync() + # zarch is sequentially consistent self.mc.STG(r.SCRATCH, l.addr(0, RFASTGILPTR)) @@ -221,8 +221,7 @@ self.mc.BRC(c.NE, l.imm(retry_label - self.mc.currpos())) # retry if failed # CSG performs a serialization - # but be sure (testing) - self.mc.sync() + # zarch is sequential consistent! self.mc.CGHI(r.r13, l.imm0) b1_location = self.mc.currpos() diff --git a/rpython/jit/backend/zarch/test/test_runner.py b/rpython/jit/backend/zarch/test/test_runner.py --- a/rpython/jit/backend/zarch/test/test_runner.py +++ b/rpython/jit/backend/zarch/test/test_runner.py @@ -27,4 +27,4 @@ add_loop_instructions = "lg; lgr; larl; agr; cgfi; je; j;$" # realloc frame takes the most space (from just after larl, to lay) bridge_loop_instructions = "larl; lg; cgfi; jhe; lghi; " \ - "lgfi;( iihf;)? lgfi;( iihf;)? basr; lg; br;$" + "iilf;( iihf;)? iilf;( iihf;)? basr; lg; br;$" From pypy.commits at gmail.com Wed Feb 3 04:53:05 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 01:53:05 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: modified test to see if an empty print differs from a print with newline Message-ID: <56b1ce01.29cec20a.6265d.ffffff2d@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82052:18bd2d236f85 Date: 2016-02-03 10:52 +0100 http://bitbucket.org/pypy/pypy/changeset/18bd2d236f85/ Log: modified test to see if an empty print differs from a print with newline diff --git a/pypy/module/select/test/test_select.py b/pypy/module/select/test/test_select.py --- a/pypy/module/select/test/test_select.py +++ b/pypy/module/select/test/test_select.py @@ -287,7 +287,8 @@ t = thread.start_new_thread(pollster.poll, ()) try: time.sleep(0.3) - for i in range(5): print '', # to release GIL untranslated + # TODO restore print '', if this is not the reason + for i in range(5): print 'release gil select' # to release GIL untranslated # trigger ufds array reallocation for fd in rfds: pollster.unregister(fd) From pypy.commits at gmail.com Wed Feb 3 05:03:01 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 02:03:01 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: disabled blocking tests Message-ID: <56b1d055.520e1c0a.ee5ff.7338@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82053:074033f70453 Date: 2016-02-03 11:01 +0100 http://bitbucket.org/pypy/pypy/changeset/074033f70453/ Log: disabled blocking tests diff --git a/pypy/module/__pypy__/test/test_signal.py b/pypy/module/__pypy__/test/test_signal.py --- a/pypy/module/__pypy__/test/test_signal.py +++ b/pypy/module/__pypy__/test/test_signal.py @@ -71,6 +71,8 @@ def test_thread_fork_signals(self): import __pypy__ import os, thread, signal + if os.uname()[4] == 's390x': + skip("skip for now s390x") if not hasattr(os, 'fork'): skip("No fork on this platform") diff --git a/pypy/module/select/test/test_select.py b/pypy/module/select/test/test_select.py --- a/pypy/module/select/test/test_select.py +++ b/pypy/module/select/test/test_select.py @@ -329,6 +329,10 @@ "usemodules": ["select", "_socket", "time", "thread"], } + import os + if os.uname()[4] == 's390x': + py.test.skip("build bot for s390x cannot open sockets") + def w_make_server(self): import socket if hasattr(self, 'sock'): From pypy.commits at gmail.com Wed Feb 3 07:56:21 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 04:56:21 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: disabled more in test signal (bbot seems to still timeout...), simplifications in the malloc assembly operations Message-ID: <56b1f8f5.84c9c20a.3e383.485e@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82054:b2ed3adc63e7 Date: 2016-02-03 13:55 +0100 http://bitbucket.org/pypy/pypy/changeset/b2ed3adc63e7/ Log: disabled more in test signal (bbot seems to still timeout...), simplifications in the malloc assembly operations diff --git a/pypy/module/__pypy__/test/test_signal.py b/pypy/module/__pypy__/test/test_signal.py --- a/pypy/module/__pypy__/test/test_signal.py +++ b/pypy/module/__pypy__/test/test_signal.py @@ -2,6 +2,11 @@ from pypy.module.thread.test.support import GenericTestThread +import os +if os.uname()[4] == 's390x': + # TMP!!! + import py + py.test.skip("skip for now s390x") class AppTestMinimal: spaceconfig = dict(usemodules=['__pypy__']) @@ -71,8 +76,6 @@ def test_thread_fork_signals(self): import __pypy__ import os, thread, signal - if os.uname()[4] == 's390x': - skip("skip for now s390x") if not hasattr(os, 'fork'): skip("No fork on this platform") diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -1258,16 +1258,15 @@ mc.load_imm(r.r1, nursery_free_adr) mc.load(r.RES, r.r1, 0) # load nursery_free + mc.load(r.r14, r.r1, diff) # load nursery_top - mc.LGR(r.RSZ, r.RES) if check_imm_value(size): mc.AGHI(r.RSZ, l.imm(size)) else: - mc.load_imm(r.SCRATCH2, size) - mc.AGR(r.RSZ, r.SCRATCH2) + mc.load_imm(r.RSZ, size) + mc.AGRK(r.RSZ, r.RES, r.RSZ) - mc.load(r.SCRATCH2, r.r1, diff) # load nursery_top - mc.cmp_op(r.RSZ, r.SCRATCH2, signed=False) + mc.cmp_op(r.RSZ, r.r14, signed=False) fast_jmp_pos = mc.currpos() mc.reserve_cond_jump(short=True) # conditional jump, patched later @@ -1276,7 +1275,6 @@ # new value of nursery_free_adr in RSZ and the adr of the new object # in RES. self.load_gcmap(mc, r.r1, gcmap) - # no frame needed, r14 is saved on the jitframe mc.branch_absolute(self.malloc_slowpath) # here r1 holds nursery_free_addr @@ -1301,14 +1299,11 @@ sizeloc = r.RSZ mc.load(r.RES, r.r1, 0) # load nursery_free + mc.load(r.r0, r.r1, diff) # load nursery_top - mc.LGR(r.SCRATCH2, r.RES) - mc.AGR(r.SCRATCH2, sizeloc) # sizeloc can be RSZ - mc.LGR(r.RSZ, r.SCRATCH2) + mc.AGRK(RSZ, r.RES, sizeloc) - mc.load(r.SCRATCH2, r.r1, diff) # load nursery_top - - mc.cmp_op(r.RSZ, r.SCRATCH2, signed=False) + mc.cmp_op(r.RSZ, r.r0, signed=False) fast_jmp_pos = mc.currpos() mc.reserve_cond_jump(short=True) # conditional jump, patched later @@ -1354,6 +1349,7 @@ # item size mc.load(r.RES, r.r1, 0) # load nursery_free + mc.load(r.SCRATCH2, r.r1, diff) # load nursery_top assert arraydescr.basesize >= self.gc_minimal_size_in_nursery constsize = arraydescr.basesize + self.gc_size_of_header @@ -1363,14 +1359,12 @@ mc.AGHIK(r.RSZ, lengthloc, l.imm(constsize)) if force_realignment: # "& ~(WORD-1)" - mc.LGHI(r.SCRATCH2, l.imm(~(WORD-1))) - mc.NGR(r.RSZ, r.SCRATCH2) + mc.RISBGN(r.RSZ, r.RSZ, loc.imm(0), loc.imm(0x80 | 60), loc.imm(0)) mc.AGRK(r.RSZ, r.RES, r.RSZ) # now RSZ contains the total size in bytes, rounded up to a multiple # of WORD, plus nursery_free_adr - mc.load(r.SCRATCH2, r.r1, diff) # load nursery_top mc.cmp_op(r.RSZ, r.SCRATCH2, signed=False) jmp_adr1 = mc.currpos() diff --git a/rpython/jit/backend/zarch/instruction_builder.py b/rpython/jit/backend/zarch/instruction_builder.py --- a/rpython/jit/backend/zarch/instruction_builder.py +++ b/rpython/jit/backend/zarch/instruction_builder.py @@ -479,6 +479,7 @@ bt = argtypes[1] if len(argtypes) >= 2 else '-' ct = argtypes[2] if len(argtypes) >= 3 else '-' dt = argtypes[3] if len(argtypes) >= 4 else '-' + et = argtypes[4] if len(argtypes) >= 5 else '-' def function0(self): return func(self) def function1(self, a): @@ -511,6 +512,13 @@ f = unpack_arg(b, bt) g = unpack_arg(c, ct) return func(self, e, f, g, 0) + def function5(self, a, b, c, d, e): + f = unpack_arg(a, at) + g = unpack_arg(b, bt) + h = unpack_arg(c, ct) + i = unpack_arg(d, dt) + j = unpack_arg(e, et) + return func(self, f, g, h, i, j) if len(argtypes) == 0: function = function0 elif len(argtypes) == 1: @@ -530,6 +538,8 @@ if argtypes[3] == '-': # e.g. FIEBR or CGEBR ignore the last element function = function4_last_default + elif len(argtypes) == 5: + function = function5 else: assert 0, "implement function for argtypes %s" % (argtypes,) function.__name__ = mnemonic diff --git a/rpython/jit/backend/zarch/instructions.py b/rpython/jit/backend/zarch/instructions.py --- a/rpython/jit/backend/zarch/instructions.py +++ b/rpython/jit/backend/zarch/instructions.py @@ -47,7 +47,8 @@ # does not seem to be installed # cpu fails at this instruction, and gnu assembler # does not recognize mnemonic - # 'RISBGN': ('rie_f', ['\xEC','\x59']), + 'RISBG': ('rie_f', ['\xEC','\x55']), + 'RISBGN': ('rie_f', ['\xEC','\x59']), # invert & negative & absolute 'LPGR': ('rre', ['\xB9','\x00']), diff --git a/rpython/jit/backend/zarch/test/test_assembler.py b/rpython/jit/backend/zarch/test/test_assembler.py --- a/rpython/jit/backend/zarch/test/test_assembler.py +++ b/rpython/jit/backend/zarch/test/test_assembler.py @@ -157,6 +157,42 @@ print(s64) assert f64[18] == '1' # long displacement facility + def test_load_byte_zero_extend(self): + adr = self.a.datablockwrapper.malloc_aligned(16, 16) + data = rffi.cast(rffi.CArrayPtr(rffi.ULONG), adr) + data[0] = rffi.cast(rffi.ULONG,0xffffFFFFffffFF02) + self.a.mc.load_imm(r.r3, adr+7) + self.a.mc.LLGC(r.r2, loc.addr(0,r.r3)) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == 2 + + def test_load_byte_and_imm(self): + adr = self.a.datablockwrapper.malloc_aligned(16, 16) + data = rffi.cast(rffi.CArrayPtr(rffi.ULONG), adr) + data[0] = rffi.cast(rffi.ULONG,0xffffFFFFffff0001) + self.a.mc.load_imm(r.r3, adr) + self.a.mc.LG(r.r2, loc.addr(0,r.r3)) + self.a.mc.LLGC(r.r2, loc.addr(7,r.r3)) + self.a.mc.NILL(r.r2, loc.imm(0x0)) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == 0 + + @py.test.mark.parametrize('p,v', [(0,0),(8,8),(7,0),(4,0),(1,0),(9,8)]) + def test_align(self, p, v): + WORD = 8 + self.a.mc.load_imm(r.r2, p) + self.a.mc.LGHI(r.r0, loc.imm(~(WORD-1))) + self.a.mc.NGR(r.r2, r.r0) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == v + + @py.test.mark.parametrize('p', [2**32,2**32+1,2**63-1,2**63-2,0,1,2,3,4,5,6,7,8,10001]) + def test_align_withroll(self, p): + self.a.mc.load_imm(r.r2, p & 0xffffFFFFffffFFFF) + self.a.mc.RISBGN(r.r2, r.r2, loc.imm(0), loc.imm(0x80 | 60), loc.imm(0)) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == rffi.cast(rffi.ULONG,p) & ~(7) + def test_load_small_int_to_reg(self): self.a.mc.LGHI(r.r2, loc.imm(123)) self.a.jmpto(r.r14) From pypy.commits at gmail.com Wed Feb 3 09:01:21 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 06:01:21 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: added zEC12 to assembler command for test Message-ID: <56b20831.2968c20a.215b1.68eb@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82055:a32a0a3922a4 Date: 2016-02-03 15:00 +0100 http://bitbucket.org/pypy/pypy/changeset/a32a0a3922a4/ Log: added zEC12 to assembler command for test diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -1359,7 +1359,7 @@ mc.AGHIK(r.RSZ, lengthloc, l.imm(constsize)) if force_realignment: # "& ~(WORD-1)" - mc.RISBGN(r.RSZ, r.RSZ, loc.imm(0), loc.imm(0x80 | 60), loc.imm(0)) + mc.RISBGN(r.RSZ, r.RSZ, l.imm(0), l.imm(0x80 | 60), l.imm(0)) mc.AGRK(r.RSZ, r.RES, r.RSZ) # now RSZ contains the total size in bytes, rounded up to a multiple diff --git a/rpython/jit/backend/zarch/test/test_auto_encoding.py b/rpython/jit/backend/zarch/test/test_auto_encoding.py --- a/rpython/jit/backend/zarch/test/test_auto_encoding.py +++ b/rpython/jit/backend/zarch/test/test_auto_encoding.py @@ -205,7 +205,7 @@ g.write('%s\n' % op) oplist.append(op) g.write('\t.string "%s"\n' % END_TAG) - proc = subprocess.Popen(['as', '-m64', '-mzarch', + proc = subprocess.Popen(['as', '-m64', '-mzarch', '-march=zEC12', inputname, '-o', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE) diff --git a/rpython/translator/platform/linux.py b/rpython/translator/platform/linux.py --- a/rpython/translator/platform/linux.py +++ b/rpython/translator/platform/linux.py @@ -23,7 +23,7 @@ if platform.machine() == 's390x': # force the right target arch for s390x - cflags = ('-march=zEC12',) + cflags + cflags = ('-march=zEC12','-m64','-mzarch') + cflags def _args_for_shared(self, args): return ['-shared'] + args From pypy.commits at gmail.com Wed Feb 3 09:09:28 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 06:09:28 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: do not execute auto assembler on 5 args + (takes far too long) Message-ID: <56b20a18.8ab71c0a.358d7.ffffc323@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82056:e9fe4f2225ca Date: 2016-02-03 15:08 +0100 http://bitbucket.org/pypy/pypy/changeset/e9fe4f2225ca/ Log: do not execute auto assembler on 5 args + (takes far too long) diff --git a/rpython/jit/backend/zarch/test/test_auto_encoding.py b/rpython/jit/backend/zarch/test/test_auto_encoding.py --- a/rpython/jit/backend/zarch/test/test_auto_encoding.py +++ b/rpython/jit/backend/zarch/test/test_auto_encoding.py @@ -237,6 +237,8 @@ return mode def make_all_tests(self, methname, modes, args=[]): + if methname.startswith("RIS"): + return [] arg_types = self.get_func_arg_types(methname) combinations = [] for i,m in enumerate(arg_types): From pypy.commits at gmail.com Wed Feb 3 09:20:41 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 06:20:41 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: big endian issue in cppyy test case Message-ID: <56b20cb9.c8921c0a.27486.ffff9e3f@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82057:f49815ceed3a Date: 2016-02-03 15:19 +0100 http://bitbucket.org/pypy/pypy/changeset/f49815ceed3a/ Log: big endian issue in cppyy test case diff --git a/pypy/module/cppyy/src/dummy_backend.cxx b/pypy/module/cppyy/src/dummy_backend.cxx --- a/pypy/module/cppyy/src/dummy_backend.cxx +++ b/pypy/module/cppyy/src/dummy_backend.cxx @@ -390,7 +390,7 @@ ((dummy::cppyy_test_data*)self)->destroy_arrays(); } else if (idx == s_methods["cppyy_test_data::set_bool"]) { assert(self && nargs == 1); - ((dummy::cppyy_test_data*)self)->set_bool((bool)((CPPYY_G__value*)args)[0].obj.in); + ((dummy::cppyy_test_data*)self)->set_bool((bool)((CPPYY_G__value*)args)[0].obj.i); } else if (idx == s_methods["cppyy_test_data::set_char"]) { assert(self && nargs == 1); ((dummy::cppyy_test_data*)self)->set_char(((CPPYY_G__value*)args)[0].obj.ch); From pypy.commits at gmail.com Wed Feb 3 10:17:27 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 07:17:27 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: fixed translation issue. Message-ID: <56b21a07.88c8c20a.e8ecd.ffff8361@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82058:9fabf4399c18 Date: 2016-02-03 16:13 +0100 http://bitbucket.org/pypy/pypy/changeset/9fabf4399c18/ Log: fixed translation issue. diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -1301,7 +1301,7 @@ mc.load(r.RES, r.r1, 0) # load nursery_free mc.load(r.r0, r.r1, diff) # load nursery_top - mc.AGRK(RSZ, r.RES, sizeloc) + mc.AGRK(r.RSZ, r.RES, sizeloc) mc.cmp_op(r.RSZ, r.r0, signed=False) From pypy.commits at gmail.com Wed Feb 3 11:44:36 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 08:44:36 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: critical bugfix in gc writebarrier fastpath, simplifications and a test Message-ID: <56b22e74.6507c20a.91457.ffffadcb@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82059:822ac80ddd23 Date: 2016-02-03 17:43 +0100 http://bitbucket.org/pypy/pypy/changeset/822ac80ddd23/ Log: critical bugfix in gc writebarrier fastpath, simplifications and a test diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -205,7 +205,7 @@ saved_regs = None saved_fp_regs = None else: - # push all volatile registers, push RCS1, and sometimes push RCS2 + # push all volatile registers, sometimes push RCS2 if withcards: saved_regs = r.VOLATILES + [RCS2] else: @@ -223,8 +223,9 @@ # since the call to write barrier can't collect # (and this is assumed a bit left and right here, like lack # of _reload_frame_if_necessary) - # This trashes r0 and r2, which is fine in this case + # This trashes r0 and r1, which is fine in this case assert argument_loc is not r.r0 + assert argument_loc is not r.r1 self._store_and_reset_exception(mc, RCS2, RCS3) if withcards: diff --git a/rpython/jit/backend/zarch/instructions.py b/rpython/jit/backend/zarch/instructions.py --- a/rpython/jit/backend/zarch/instructions.py +++ b/rpython/jit/backend/zarch/instructions.py @@ -100,6 +100,7 @@ # OR operations 'OGR': ('rre', ['\xB9','\x81']), + 'OGRK': ('rrf_a', ['\xB9','\xE6']), 'OG': ('rxy', ['\xE3','\x81']), # or one byte and store it back at the op2 position 'OI': ('si', ['\x96']), diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -535,8 +535,7 @@ # So here, we can simply write again a beq, which will be # taken if GCFLAG_CARDS_SET is still not set. jns_location = mc.get_relative_pos() - mc.trap() - mc.write('\x00'*4) + mc.reserve_cond_jump() # # patch the 'NE' above currpos = mc.currpos() @@ -560,25 +559,23 @@ # compute in SCRATCH the index of the bit inside the byte: # (index >> card_page_shift) & 7 - # not supported on the development s390x :(, extension is not installed - # 0x80 sets zero flag. will store 0 into all selected bits - # mc.RISBGN(r.SCRATCH, loc_index, l.imm(3), l.imm(0x80 | 63), l.imm(61)) - mc.SRAG(r.SCRATCH, loc_index, l.addr(n)) - mc.NILL(r.SCRATCH, l.imm(0x7)) + # 0x80 sets zero flag. will store 0 into all not selected bits + mc.RISBGN(r.SCRATCH, loc_index, l.imm(61), l.imm(0x80 | 63), l.imm(64-n)) # invert the bits of tmp_loc - mc.XIHF(tmp_loc, l.imm(0xffffFFFF)) - mc.XILF(tmp_loc, l.imm(0xffffFFFF)) + mc.LCGR(tmp_loc, tmp_loc) + #mc.XIHF(tmp_loc, l.imm(0xffffFFFF)) + #mc.XILF(tmp_loc, l.imm(0xffffFFFF)) - # set SCRATCH to 1 << r2 + # set SCRATCH to 1 << r1 mc.LGHI(r.SCRATCH2, l.imm(1)) - mc.SLAG(r.SCRATCH2, r.SCRATCH2, l.addr(0,r.SCRATCH)) + mc.SLAG(r.SCRATCH, r.SCRATCH2, l.addr(0,r.SCRATCH)) # set this bit inside the byte of interest addr = l.addr(0, loc_base, tmp_loc) mc.LLGC(r.SCRATCH, addr) - mc.OGR(r.SCRATCH, r.SCRATCH2) - mc.STCY(r.SCRATCH, addr) + mc.OGRK(r.SCRATCH, r.SCRATCH, r.SCRATCH2) + mc.STC(r.SCRATCH, addr) # done else: byte_index = loc_index.value >> descr.jit_wb_card_page_shift @@ -589,7 +586,7 @@ addr = l.addr(byte_ofs, loc_base) mc.LLGC(r.SCRATCH, addr) mc.OILL(r.SCRATCH, l.imm(byte_val)) - mc.STCY(r.SCRATCH, addr) + mc.STC(r.SCRATCH, addr) # # patch the beq just above currpos = mc.currpos() diff --git a/rpython/jit/backend/zarch/test/test_assembler.py b/rpython/jit/backend/zarch/test/test_assembler.py --- a/rpython/jit/backend/zarch/test/test_assembler.py +++ b/rpython/jit/backend/zarch/test/test_assembler.py @@ -193,6 +193,16 @@ self.a.mc.BCR(con.ANY, r.r14) assert run_asm(self.a) == rffi.cast(rffi.ULONG,p) & ~(7) + def test_nill(self): + self.a.mc.load_imm(r.r2, 1) + self.a.mc.load_imm(r.r3, 0x010001) + self.a.mc.NILL(r.r3, loc.imm(0xFFFF)) + self.a.mc.BCR(con.EQ, r.r14) # should not branch + self.a.mc.load_imm(r.r2, 0) # should return here + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == 0 + + def test_load_small_int_to_reg(self): self.a.mc.LGHI(r.r2, loc.imm(123)) self.a.jmpto(r.r14) From pypy.commits at gmail.com Wed Feb 3 11:58:00 2016 From: pypy.commits at gmail.com (rlamy) Date: Wed, 03 Feb 2016 08:58:00 -0800 (PST) Subject: [pypy-commit] pypy default: Kill randomly vendored obsolete version of pytest_cov Message-ID: <56b23198.6953c20a.c061.ffffac85@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82060:643912e63967 Date: 2016-02-03 16:56 +0000 http://bitbucket.org/pypy/pypy/changeset/643912e63967/ Log: Kill randomly vendored obsolete version of pytest_cov diff --git a/pypy/test_all.py b/pypy/test_all.py --- a/pypy/test_all.py +++ b/pypy/test_all.py @@ -26,11 +26,10 @@ #Add toplevel repository dir to sys.path sys.path.insert(0,os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) import pytest - import pytest_cov if sys.platform == 'win32': #Try to avoid opeing a dialog box if one of the tests causes a system error # We do this in runner.py, but buildbots run twisted which ruins inheritance - # in windows subprocesses. + # in windows subprocesses. import ctypes winapi = ctypes.windll.kernel32 SetErrorMode = winapi.SetErrorMode @@ -44,4 +43,4 @@ old_mode = SetErrorMode(flags) SetErrorMode(old_mode | flags) - sys.exit(pytest.main(plugins=[pytest_cov])) + sys.exit(pytest.main()) diff --git a/pytest_cov.py b/pytest_cov.py deleted file mode 100644 --- a/pytest_cov.py +++ /dev/null @@ -1,353 +0,0 @@ -"""produce code coverage reports using the 'coverage' package, including support for distributed testing. - -This plugin produces coverage reports. It supports centralised testing and distributed testing in -both load and each modes. It also supports coverage of subprocesses. - -All features offered by the coverage package should be available, either through pytest-cov or -through coverage's config file. - - -Installation ------------- - -The `pytest-cov`_ package may be installed with pip or easy_install:: - - pip install pytest-cov - easy_install pytest-cov - -.. _`pytest-cov`: http://pypi.python.org/pypi/pytest-cov/ - - -Uninstallation --------------- - -Uninstalling packages is supported by pip:: - - pip uninstall pytest-cov - -However easy_install does not provide an uninstall facility. - -.. IMPORTANT:: - - Ensure that you manually delete the init_cov_core.pth file in your site-packages directory. - - This file starts coverage collection of subprocesses if appropriate during site initialisation - at python startup. - - -Usage ------ - -Centralised Testing -~~~~~~~~~~~~~~~~~~~ - -Centralised testing will report on the combined coverage of the main process and all of it's -subprocesses. - -Running centralised testing:: - - py.test --cov myproj tests/ - -Shows a terminal report:: - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Distributed Testing: Load -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Distributed testing with dist mode set to load will report on the combined coverage of all slaves. -The slaves may be spread out over any number of hosts and each slave may be located anywhere on the -file system. Each slave will have it's subprocesses measured. - -Running distributed testing with dist mode set to load:: - - py.test --cov myproj -n 2 tests/ - -Shows a terminal report:: - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Again but spread over different hosts and different directories:: - - py.test --cov myproj --dist load - --tx ssh=memedough at host1//chdir=testenv1 - --tx ssh=memedough at host2//chdir=/tmp/testenv2//python=/tmp/env1/bin/python - --rsyncdir myproj --rsyncdir tests --rsync examples - tests/ - -Shows a terminal report:: - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Distributed Testing: Each -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Distributed testing with dist mode set to each will report on the combined coverage of all slaves. -Since each slave is running all tests this allows generating a combined coverage report for multiple -environments. - -Running distributed testing with dist mode set to each:: - - py.test --cov myproj --dist each - --tx popen//chdir=/tmp/testenv3//python=/usr/local/python27/bin/python - --tx ssh=memedough at host2//chdir=/tmp/testenv4//python=/tmp/env2/bin/python - --rsyncdir myproj --rsyncdir tests --rsync examples - tests/ - -Shows a terminal report:: - - ---------------------------------------- coverage ---------------------------------------- - platform linux2, python 2.6.5-final-0 - platform linux2, python 2.7.0-final-0 - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Reporting ---------- - -It is possible to generate any combination of the reports for a single test run. - -The available reports are terminal (with or without missing line numbers shown), HTML, XML and -annotated source code. - -The terminal report without line numbers (default):: - - py.test --cov-report term --cov myproj tests/ - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -The terminal report with line numbers:: - - py.test --cov-report term-missing --cov myproj tests/ - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover Missing - -------------------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% 24-26, 99, 149, 233-236, 297-298, 369-370 - myproj/feature4286 94 7 92% 183-188, 197 - -------------------------------------------------- - TOTAL 353 20 94% - - -The remaining three reports output to files without showing anything on the terminal (useful for -when the output is going to a continuous integration server):: - - py.test --cov-report html - --cov-report xml - --cov-report annotate - --cov myproj tests/ - - -Coverage Data File ------------------- - -The data file is erased at the beginning of testing to ensure clean data for each test run. - -The data file is left at the end of testing so that it is possible to use normal coverage tools to -examine it. - - -Coverage Config File --------------------- - -This plugin provides a clean minimal set of command line options that are added to pytest. For -further control of coverage use a coverage config file. - -For example if tests are contained within the directory tree being measured the tests may be -excluded if desired by using a .coveragerc file with the omit option set:: - - py.test --cov-config .coveragerc - --cov myproj - myproj/tests/ - -Where the .coveragerc file contains file globs:: - - [run] - omit = tests/* - -For full details refer to the `coverage config file`_ documentation. - -.. _`coverage config file`: http://nedbatchelder.com/code/coverage/config.html - -Note that this plugin controls some options and setting the option in the config file will have no -effect. These include specifying source to be measured (source option) and all data file handling -(data_file and parallel options). - - -Limitations ------------ - -For distributed testing the slaves must have the pytest-cov package installed. This is needed since -the plugin must be registered through setuptools / distribute for pytest to start the plugin on the -slave. - -For subprocess measurement environment variables must make it from the main process to the -subprocess. The python used by the subprocess must have pytest-cov installed. The subprocess must -do normal site initialisation so that the environment variables can be detected and coverage -started. - - -Acknowledgements ----------------- - -Whilst this plugin has been built fresh from the ground up it has been influenced by the work done -on pytest-coverage (Ross Lawley, James Mills, Holger Krekel) and nose-cover (Jason Pellerin) which are -other coverage plugins. - -Ned Batchelder for coverage and its ability to combine the coverage results of parallel runs. - -Holger Krekel for pytest with its distributed testing support. - -Jason Pellerin for nose. - -Michael Foord for unittest2. - -No doubt others have contributed to these tools as well. -""" - - -def pytest_addoption(parser): - """Add options to control coverage.""" - - group = parser.getgroup('coverage reporting with distributed testing support') - group.addoption('--cov', action='append', default=[], metavar='path', - dest='cov_source', - help='measure coverage for filesystem path (multi-allowed)') - group.addoption('--cov-report', action='append', default=[], metavar='type', - choices=['term', 'term-missing', 'annotate', 'html', 'xml'], - dest='cov_report', - help='type of report to generate: term, term-missing, annotate, html, xml (multi-allowed)') - group.addoption('--cov-config', action='store', default='.coveragerc', metavar='path', - dest='cov_config', - help='config file for coverage, default: .coveragerc') - - -def pytest_configure(config): - """Activate coverage plugin if appropriate.""" - - if config.getvalue('cov_source'): - config.pluginmanager.register(CovPlugin(), '_cov') - - -class CovPlugin(object): - """Use coverage package to produce code coverage reports. - - Delegates all work to a particular implementation based on whether - this test process is centralised, a distributed master or a - distributed slave. - """ - - def __init__(self): - """Creates a coverage pytest plugin. - - We read the rc file that coverage uses to get the data file - name. This is needed since we give coverage through it's API - the data file name. - """ - - # Our implementation is unknown at this time. - self.cov_controller = None - - def pytest_sessionstart(self, session): - """At session start determine our implementation and delegate to it.""" - - import cov_core - - cov_source = session.config.getvalue('cov_source') - cov_report = session.config.getvalue('cov_report') or ['term'] - cov_config = session.config.getvalue('cov_config') - - session_name = session.__class__.__name__ - is_master = (session.config.pluginmanager.hasplugin('dsession') or - session_name == 'DSession') - is_slave = (hasattr(session.config, 'slaveinput') or - session_name == 'SlaveSession') - nodeid = None - - if is_master: - controller_cls = cov_core.DistMaster - elif is_slave: - controller_cls = cov_core.DistSlave - nodeid = session.config.slaveinput.get('slaveid', getattr(session, 'nodeid')) - else: - controller_cls = cov_core.Central - - self.cov_controller = controller_cls(cov_source, - cov_report, - cov_config, - session.config, - nodeid) - - self.cov_controller.start() - - def pytest_configure_node(self, node): - """Delegate to our implementation.""" - - self.cov_controller.configure_node(node) - pytest_configure_node.optionalhook = True - - def pytest_testnodedown(self, node, error): - """Delegate to our implementation.""" - - self.cov_controller.testnodedown(node, error) - pytest_testnodedown.optionalhook = True - - def pytest_sessionfinish(self, session, exitstatus): - """Delegate to our implementation.""" - - self.cov_controller.finish() - - def pytest_terminal_summary(self, terminalreporter): - """Delegate to our implementation.""" - - self.cov_controller.summary(terminalreporter._tw) - - -def pytest_funcarg__cov(request): - """A pytest funcarg that provides access to the underlying coverage object.""" - - # Check with hasplugin to avoid getplugin exception in older pytest. - if request.config.pluginmanager.hasplugin('_cov'): - plugin = request.config.pluginmanager.getplugin('_cov') - if plugin.cov_controller: - return plugin.cov_controller.cov - return None From pypy.commits at gmail.com Wed Feb 3 12:34:01 2016 From: pypy.commits at gmail.com (mattip) Date: Wed, 03 Feb 2016 09:34:01 -0800 (PST) Subject: [pypy-commit] pypy default: first stab at fixing embedding for win32 Message-ID: <56b23a09.8f7e1c0a.85f70.29f8@mx.google.com> Author: mattip Branch: Changeset: r82061:338d32e89981 Date: 2016-01-29 16:06 +0200 http://bitbucket.org/pypy/pypy/changeset/338d32e89981/ Log: first stab at fixing embedding for win32 (grafted from 237a8c5c0202731866a958c900235d68ff739912) diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -84,11 +84,68 @@ return rffi.cast(rffi.INT, res) # ____________________________________________________________ +if os.name == 'nt': + do_startup = r''' +#include +#define WIN32_LEAN_AND_MEAN +#include +RPY_EXPORTED void rpython_startup_code(void); +RPY_EXPORTED int pypy_setup_home(char *, int); +static unsigned char _cffi_ready = 0; +static const char *volatile _cffi_module_name; -eci = ExternalCompilationInfo(separate_module_sources=[ -r""" -/* XXX Windows missing */ +static void _cffi_init_error(const char *msg, const char *extra) +{ + fprintf(stderr, + "\nPyPy initialization failure when loading module '%s':\n%s%s\n", + _cffi_module_name, msg, extra); +} + +BOOL CALLBACK _cffi_init(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *lpContex) +{ + + HMODULE hModule; + TCHAR home[_MAX_PATH]; + rpython_startup_code(); + RPyGilAllocate(); + + GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + (LPCTSTR)&_cffi_init, &hModule); + if (hModule == 0 ) { + /* TODO turn the int into a string with FormatMessage */ + + _cffi_init_error("dladdr() failed: ", ""); + return TRUE; + } + GetModuleFileName(hModule, home, _MAX_PATH); + if (pypy_setup_home(home, 1) != 0) { + _cffi_init_error("pypy_setup_home() failed", ""); + return TRUE; + } + _cffi_ready = 1; + fprintf(stderr, "startup succeeded, home %s\n", home); + return TRUE; +} + +RPY_EXPORTED +int pypy_carefully_make_gil(const char *name) +{ + /* For CFFI: this initializes the GIL and loads the home path. + It can be called completely concurrently from unrelated threads. + It assumes that we don't hold the GIL before (if it exists), and we + don't hold it afterwards. + */ + static INIT_ONCE s_init_once; + + _cffi_module_name = name; /* not really thread-safe, but better than + nothing */ + InitOnceExecuteOnce(&s_init_once, _cffi_init, NULL, NULL); + return (int)_cffi_ready - 1; +}''' +else: + do_startup = r""" #include #include #include @@ -141,6 +198,7 @@ pthread_once(&once_control, _cffi_init); return (int)_cffi_ready - 1; } -"""]) +""" +eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) declare_c_function = rffi.llexternal_use_eci(eci) From pypy.commits at gmail.com Wed Feb 3 12:50:36 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 09:50:36 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: give the saved registers r10, r11, r12, r2, f0 some space on the stack. this prevents overwriting of values if they are set in the calling function Message-ID: <56b23dec.8378c20a.b89d4.ffffcc0a@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82062:38a946dc43e3 Date: 2016-02-03 18:49 +0100 http://bitbucket.org/pypy/pypy/changeset/38a946dc43e3/ Log: give the saved registers r10,r11,r12,r2,f0 some space on the stack. this prevents overwriting of values if they are set in the calling function diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -182,9 +182,8 @@ RCS2 = r.r10 RCS3 = r.r12 - LOCAL_VARS_OFFSET = 0 - extra_stack_size = LOCAL_VARS_OFFSET + 4 * WORD + 8 - extra_stack_size = (extra_stack_size + 15) & ~15 + # r10,r11,r12,r2,f0 -> makes exactly 4 words + 8 byte + extra_stack_size = 4 * WORD + 8 if for_frame: # NOTE: don't save registers on the jitframe here! It might # override already-saved values that will be restored @@ -199,9 +198,10 @@ # the RPython exception that occurred in the CALL, if any). # off = STD_FRAME_SIZE_IN_BYTES - mc.STMG(r.r10, r.r12, l.addr(off+10*WORD, r.SP)) - mc.STG(r.r2, l.addr(off+2*WORD, r.SP)) - mc.STD(r.f0, l.addr(off+16*WORD, r.SP)) + mc.LAY(r.SP, l.addr(-extra_stack_size, r.SP)) + mc.STMG(r.r10, r.r12, l.addr(off, r.SP)) + mc.STG(r.r2, l.addr(off+3*WORD, r.SP)) + mc.STD(r.f0, l.addr(off+4*WORD, r.SP)) saved_regs = None saved_fp_regs = None else: @@ -250,9 +250,10 @@ if for_frame: off = STD_FRAME_SIZE_IN_BYTES - mc.LMG(r.r10, r.r12, l.addr(off+10*WORD, r.SP)) - mc.LG(r.r2, l.addr(off+2*WORD, r.SP)) - mc.LD(r.f0, l.addr(off+16*WORD, r.SP)) + mc.LMG(r.r10, r.r12, l.addr(off, r.SP)) + mc.LG(r.r2, l.addr(off+3*WORD, r.SP)) + mc.LD(r.f0, l.addr(off+4*WORD, r.SP)) + mc.LAY(r.SP, l.addr(extra_stack_size, r.SP)) else: self._pop_core_regs_from_jitframe(mc, saved_regs) self._pop_fp_regs_from_jitframe(mc, saved_fp_regs) @@ -1259,15 +1260,15 @@ mc.load_imm(r.r1, nursery_free_adr) mc.load(r.RES, r.r1, 0) # load nursery_free - mc.load(r.r14, r.r1, diff) # load nursery_top + mc.load(r.r0, r.r1, diff) # load nursery_top if check_imm_value(size): - mc.AGHI(r.RSZ, l.imm(size)) + mc.AGHIK(r.RSZ, r.RES, l.imm(size)) else: mc.load_imm(r.RSZ, size) mc.AGRK(r.RSZ, r.RES, r.RSZ) - mc.cmp_op(r.RSZ, r.r14, signed=False) + mc.cmp_op(r.RSZ, r.r0, signed=False) fast_jmp_pos = mc.currpos() mc.reserve_cond_jump(short=True) # conditional jump, patched later diff --git a/rpython/jit/backend/zarch/instructions.py b/rpython/jit/backend/zarch/instructions.py --- a/rpython/jit/backend/zarch/instructions.py +++ b/rpython/jit/backend/zarch/instructions.py @@ -175,6 +175,7 @@ 'STG': ('rxy', ['\xE3','\x24']), 'STY': ('rxy', ['\xE3','\x50']), 'STHY': ('rxy', ['\xE3','\x70']), + 'STC': ('rx', ['\x42']), 'STCY': ('rxy', ['\xE3','\x72']), # store float diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -567,9 +567,9 @@ #mc.XIHF(tmp_loc, l.imm(0xffffFFFF)) #mc.XILF(tmp_loc, l.imm(0xffffFFFF)) - # set SCRATCH to 1 << r1 + # set SCRATCH2 to 1 << r1 mc.LGHI(r.SCRATCH2, l.imm(1)) - mc.SLAG(r.SCRATCH, r.SCRATCH2, l.addr(0,r.SCRATCH)) + mc.SLAG(r.SCRATCH2, r.SCRATCH2, l.addr(0,r.SCRATCH)) # set this bit inside the byte of interest addr = l.addr(0, loc_base, tmp_loc) From pypy.commits at gmail.com Wed Feb 3 14:20:10 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 11:20:10 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: cond_call_gc_wb_array can now not trash a volatile register. wrong allocation now takes a non volatile register! Message-ID: <56b252ea.2457c20a.b9997.ffffede2@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82063:621a42ebea23 Date: 2016-02-03 20:19 +0100 http://bitbucket.org/pypy/pypy/changeset/621a42ebea23/ Log: cond_call_gc_wb_array can now not trash a volatile register. wrong allocation now takes a non volatile register! diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -241,7 +241,7 @@ self._restore_exception(mc, RCS2, RCS3) if withcards: - # A final andix before the blr, for the caller. Careful to + # A final NILL before the return to the caller. Careful to # not follow this instruction with another one that changes # the status of the condition code card_marking_mask = descr.jit_wb_cards_set_singlebyte diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -495,7 +495,7 @@ mc.NILL(r.SCRATCH, l.imm(mask & 0xFF)) jz_location = mc.get_relative_pos() - mc.reserve_cond_jump() # patched later with 'EQ' + mc.reserve_cond_jump(short=True) # patched later with 'EQ' # for cond_call_gc_wb_array, also add another fast path: # if GCFLAG_CARDS_SET, then we can just set one bit and be done @@ -535,7 +535,7 @@ # So here, we can simply write again a beq, which will be # taken if GCFLAG_CARDS_SET is still not set. jns_location = mc.get_relative_pos() - mc.reserve_cond_jump() + mc.reserve_cond_jump(short=True) # # patch the 'NE' above currpos = mc.currpos() @@ -547,6 +547,8 @@ # directly the card flag setting loc_index = arglocs[1] if loc_index.is_reg(): + # must a register that is preserved across function calls + assert loc_index.value >= 6 tmp_loc = arglocs[2] n = descr.jit_wb_card_page_shift @@ -562,15 +564,16 @@ # 0x80 sets zero flag. will store 0 into all not selected bits mc.RISBGN(r.SCRATCH, loc_index, l.imm(61), l.imm(0x80 | 63), l.imm(64-n)) + # set SCRATCH2 to 1 << r1 # invert the bits of tmp_loc - mc.LCGR(tmp_loc, tmp_loc) #mc.XIHF(tmp_loc, l.imm(0xffffFFFF)) #mc.XILF(tmp_loc, l.imm(0xffffFFFF)) - - # set SCRATCH2 to 1 << r1 + mc.LG(r.SCRATCH2, l.pool(self.pool.constant_64_ones)) + mc.XGR(tmp_loc, r.SCRATCH2) mc.LGHI(r.SCRATCH2, l.imm(1)) mc.SLAG(r.SCRATCH2, r.SCRATCH2, l.addr(0,r.SCRATCH)) + # set this bit inside the byte of interest addr = l.addr(0, loc_base, tmp_loc) mc.LLGC(r.SCRATCH, addr) @@ -591,13 +594,13 @@ # patch the beq just above currpos = mc.currpos() pmc = OverwritingBuilder(mc, jns_location, 1) - pmc.BRCL(c.EQ, l.imm(currpos - jns_location)) + pmc.BRC(c.EQ, l.imm(currpos - jns_location)) pmc.overwrite() # patch the JZ above currpos = mc.currpos() pmc = OverwritingBuilder(mc, jz_location, 1) - pmc.BRCL(c.EQ, l.imm(currpos - jz_location)) + pmc.BRC(c.EQ, l.imm(currpos - jz_location)) pmc.overwrite() def emit_cond_call_gc_wb(self, op, arglocs, regalloc): diff --git a/rpython/jit/backend/zarch/regalloc.py b/rpython/jit/backend/zarch/regalloc.py --- a/rpython/jit/backend/zarch/regalloc.py +++ b/rpython/jit/backend/zarch/regalloc.py @@ -99,9 +99,9 @@ forbidden_vars=self.temp_boxes) return loc - def get_scratch_reg(self,): + def get_scratch_reg(self, selected_reg=None): box = TempFloat() - reg = self.force_allocate_reg(box, forbidden_vars=self.temp_boxes) + reg = self.force_allocate_reg(box, forbidden_vars=self.temp_boxes, selected_reg=selected_reg) self.temp_boxes.append(box) return reg @@ -151,9 +151,9 @@ selected_reg=selected_reg) return loc - def get_scratch_reg(self): + def get_scratch_reg(self, selected_reg=None): box = TempInt() - reg = self.force_allocate_reg(box, forbidden_vars=self.temp_boxes) + reg = self.force_allocate_reg(box, forbidden_vars=self.temp_boxes, selected_reg=selected_reg) self.temp_boxes.append(box) return reg @@ -583,13 +583,13 @@ else: return self.rm.ensure_reg(box, force_in_reg) - def ensure_reg_or_16bit_imm(self, box): + def ensure_reg_or_16bit_imm(self, box, selected_reg=None): if box.type == FLOAT: return self.fprm.ensure_reg(box, True) else: if helper.check_imm(box): return imm(box.getint()) - return self.rm.ensure_reg(box, force_in_reg=True) + return self.rm.ensure_reg(box, force_in_reg=True, selected_reg=selected_reg) def ensure_reg_or_any_imm(self, box): if box.type == FLOAT: @@ -599,11 +599,11 @@ return imm(box.getint()) return self.rm.ensure_reg(box, force_in_reg=True) - def get_scratch_reg(self, type): + def get_scratch_reg(self, type, selected_reg=None): if type == FLOAT: return self.fprm.get_scratch_reg() else: - return self.rm.get_scratch_reg() + return self.rm.get_scratch_reg(selected_reg=selected_reg) def free_op_vars(self): # free the boxes in the 'temp_boxes' lists, which contain both @@ -984,8 +984,11 @@ return arglocs def prepare_cond_call_gc_wb_array(self, op): + # just calling ensure_reg may return a register r2->r6. + # but in the assembly a sub routine is called that trashes r2->r6. + # thus select two registers that are preserved arglocs = [self.ensure_reg(op.getarg(0), force_in_reg=True), - self.ensure_reg_or_16bit_imm(op.getarg(1)), + self.ensure_reg_or_16bit_imm(op.getarg(1), selected_reg=r.r7), None] if arglocs[1].is_reg(): arglocs[2] = self.get_scratch_reg(INT) diff --git a/rpython/jit/backend/zarch/test/test_assembler.py b/rpython/jit/backend/zarch/test/test_assembler.py --- a/rpython/jit/backend/zarch/test/test_assembler.py +++ b/rpython/jit/backend/zarch/test/test_assembler.py @@ -202,6 +202,13 @@ self.a.mc.BCR(con.ANY, r.r14) assert run_asm(self.a) == 0 + def test_complement(self): + self.a.mc.load_imm(r.r2, 0) + #self.a.mc.LCGR(r.r2, r.r2) + self.a.mc.XIHF(r.r2, loc.imm(0xffffFFFF)) + self.a.mc.XILF(r.r2, loc.imm(0xffffFFFF)) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == -1 def test_load_small_int_to_reg(self): self.a.mc.LGHI(r.r2, loc.imm(123)) From pypy.commits at gmail.com Wed Feb 3 16:22:36 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 13:22:36 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: minor fix. loading from pool does not work Message-ID: <56b26f9c.162f1c0a.9c500.7afe@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82064:0ef7606ff55f Date: 2016-02-03 22:21 +0100 http://bitbucket.org/pypy/pypy/changeset/0ef7606ff55f/ Log: minor fix. loading from pool does not work diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -566,10 +566,10 @@ # set SCRATCH2 to 1 << r1 # invert the bits of tmp_loc - #mc.XIHF(tmp_loc, l.imm(0xffffFFFF)) - #mc.XILF(tmp_loc, l.imm(0xffffFFFF)) - mc.LG(r.SCRATCH2, l.pool(self.pool.constant_64_ones)) - mc.XGR(tmp_loc, r.SCRATCH2) + mc.XIHF(tmp_loc, l.imm(0xffffFFFF)) + mc.XILF(tmp_loc, l.imm(0xffffFFFF)) + #mc.LG(r.SCRATCH2, l.pool(self.pool.constant_64_ones)) + #mc.XGR(tmp_loc, r.SCRATCH2) mc.LGHI(r.SCRATCH2, l.imm(1)) mc.SLAG(r.SCRATCH2, r.SCRATCH2, l.addr(0,r.SCRATCH)) From pypy.commits at gmail.com Wed Feb 3 16:46:11 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 03 Feb 2016 13:46:11 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: STC can only tak 12 bits imm, this is wrong (use STCY again), fixed pool issue Message-ID: <56b27523.c177c20a.3c771.14e8@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82065:785c6853c253 Date: 2016-02-03 22:44 +0100 http://bitbucket.org/pypy/pypy/changeset/785c6853c253/ Log: STC can only tak 12 bits imm, this is wrong (use STCY again), fixed pool issue diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -563,13 +563,14 @@ # (index >> card_page_shift) & 7 # 0x80 sets zero flag. will store 0 into all not selected bits mc.RISBGN(r.SCRATCH, loc_index, l.imm(61), l.imm(0x80 | 63), l.imm(64-n)) + #mc.SRAG(r.SCRATCH, loc_index, l.addr(n)) + #mc.NILL(r.SCRATCH, l.imm(0x7)) # set SCRATCH2 to 1 << r1 # invert the bits of tmp_loc - mc.XIHF(tmp_loc, l.imm(0xffffFFFF)) - mc.XILF(tmp_loc, l.imm(0xffffFFFF)) - #mc.LG(r.SCRATCH2, l.pool(self.pool.constant_64_ones)) - #mc.XGR(tmp_loc, r.SCRATCH2) + #mc.XIHF(tmp_loc, l.imm(0xffffFFFF)) + #mc.XILF(tmp_loc, l.imm(0xffffFFFF)) + mc.XG(tmp_loc, l.pool(self.pool.constant_64_ones)) mc.LGHI(r.SCRATCH2, l.imm(1)) mc.SLAG(r.SCRATCH2, r.SCRATCH2, l.addr(0,r.SCRATCH)) @@ -589,7 +590,7 @@ addr = l.addr(byte_ofs, loc_base) mc.LLGC(r.SCRATCH, addr) mc.OILL(r.SCRATCH, l.imm(byte_val)) - mc.STC(r.SCRATCH, addr) + mc.STCY(r.SCRATCH, addr) # # patch the beq just above currpos = mc.currpos() diff --git a/rpython/jit/backend/zarch/pool.py b/rpython/jit/backend/zarch/pool.py --- a/rpython/jit/backend/zarch/pool.py +++ b/rpython/jit/backend/zarch/pool.py @@ -88,6 +88,8 @@ if arg.is_constant(): self.reserve_literal(8, arg) return + elif opnum == rop.COND_CALL_GC_WB_ARRAY: + self.constant_64_ones = 1 # we need constant ones!!! for arg in op.getarglist(): if arg.is_constant(): self.reserve_literal(8, arg) From pypy.commits at gmail.com Wed Feb 3 16:59:05 2016 From: pypy.commits at gmail.com (mattip) Date: Wed, 03 Feb 2016 13:59:05 -0800 (PST) Subject: [pypy-commit] pypy default: importing clibffi has side effects on win32, causing nested external function calls if not imported early Message-ID: <56b27829.41dfc20a.5b267.1fa6@mx.google.com> Author: mattip Branch: Changeset: r82066:cbfc796b3af3 Date: 2016-02-03 23:58 +0200 http://bitbucket.org/pypy/pypy/changeset/cbfc796b3af3/ Log: importing clibffi has side effects on win32, causing nested external function calls if not imported early a deeper solution would be to replace clibffi with cffi diff --git a/rpython/rlib/test/test_rzipfile.py b/rpython/rlib/test/test_rzipfile.py --- a/rpython/rlib/test/test_rzipfile.py +++ b/rpython/rlib/test/test_rzipfile.py @@ -4,6 +4,7 @@ from rpython.tool.udir import udir from zipfile import ZIP_STORED, ZIP_DEFLATED, ZipInfo, ZipFile from rpython.rtyper.test.tool import BaseRtypingTest +from rpython.rlib import clibffi # for side effect of testing lib_c_name on win32 import os import time diff --git a/rpython/rlib/test/test_rzlib.py b/rpython/rlib/test/test_rzlib.py --- a/rpython/rlib/test/test_rzlib.py +++ b/rpython/rlib/test/test_rzlib.py @@ -6,6 +6,7 @@ import py from rpython.rlib import rzlib from rpython.rlib.rarithmetic import r_uint +from rpython.rlib import clibffi # for side effect of testing lib_c_name on win32 import zlib expanded = 'some bytes which will be compressed' From pypy.commits at gmail.com Thu Feb 4 02:02:03 2016 From: pypy.commits at gmail.com (arigo) Date: Wed, 03 Feb 2016 23:02:03 -0800 (PST) Subject: [pypy-commit] cffi default: Issue #245: __stdcall not generated correctly on extern "Python" functions Message-ID: <56b2f76b.4577c20a.2f806.ffff9de8@mx.google.com> Author: Armin Rigo Branch: Changeset: r2618:465ce534fc8a Date: 2016-02-04 07:40 +0100 http://bitbucket.org/cffi/cffi/changeset/465ce534fc8a/ Log: Issue #245: __stdcall not generated correctly on extern "Python" functions diff --git a/cffi/_cffi_include.h b/cffi/_cffi_include.h --- a/cffi/_cffi_include.h +++ b/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/cffi/recompiler.py b/cffi/recompiler.py --- a/cffi/recompiler.py +++ b/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and diff --git a/testing/cffi1/test_recompiler.py b/testing/cffi1/test_recompiler.py --- a/testing/cffi1/test_recompiler.py +++ b/testing/cffi1/test_recompiler.py @@ -1713,3 +1713,33 @@ # a case where 'onerror' is not callable py.test.raises(TypeError, ffi.def_extern(name='bar', onerror=42), lambda x: x) + +def test_extern_python_stdcall(): + ffi = FFI() + ffi.cdef(""" + extern "Python" int __stdcall foo(int); + extern "Python" int WINAPI bar(int); + int (__stdcall * mycb1)(int); + int indirect_call(int); + """) + lib = verify(ffi, 'test_extern_python_stdcall', """ + #ifndef _MSC_VER + # define __stdcall + #endif + static int (__stdcall * mycb1)(int); + static int indirect_call(int x) { + return mycb1(x); + } + """) + # + @ffi.def_extern() + def foo(x): + return x + 42 + @ffi.def_extern() + def bar(x): + return x + 43 + assert lib.foo(100) == 142 + assert lib.bar(100) == 143 + lib.mycb1 = lib.foo + assert lib.mycb1(200) == 242 + assert lib.indirect_call(300) == 342 From pypy.commits at gmail.com Thu Feb 4 03:41:20 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 04 Feb 2016 00:41:20 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: proper fix for the volatile reg. provided to gc write barrier array Message-ID: <56b30eb0.8ab71c0a.358d7.0a32@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82067:71aa29c678e8 Date: 2016-02-04 08:44 +0100 http://bitbucket.org/pypy/pypy/changeset/71aa29c678e8/ Log: proper fix for the volatile reg. provided to gc write barrier array diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -527,9 +527,21 @@ if not is_frame: mc.LGR(r.r0, loc_base) # unusual argument location + loc_index = arglocs[1] + # loc_index may be in r2 to r5. + # the wb_slow_path may trash these registers + + if loc_index.is_reg() and loc_index.value < 6: + mc.LAY(r.SP, l.addr(-WORD, r.SP)) + mc.STG(loc_index, l.addr(0, r.SP)) + mc.load_imm(r.r14, self.wb_slowpath[helper_num]) mc.BASR(r.r14, r.r14) + if loc_index.is_reg() and loc_index.value < 6: + mc.LG(loc_index, l.addr(0, r.SP)) + mc.LAY(r.SP, l.addr(WORD, r.SP)) + if card_marking_mask: # The helper ends again with a check of the flag in the object. # So here, we can simply write again a beq, which will be @@ -545,10 +557,7 @@ # # case GCFLAG_CARDS_SET: emit a few instructions to do # directly the card flag setting - loc_index = arglocs[1] if loc_index.is_reg(): - # must a register that is preserved across function calls - assert loc_index.value >= 6 tmp_loc = arglocs[2] n = descr.jit_wb_card_page_shift diff --git a/rpython/jit/backend/zarch/regalloc.py b/rpython/jit/backend/zarch/regalloc.py --- a/rpython/jit/backend/zarch/regalloc.py +++ b/rpython/jit/backend/zarch/regalloc.py @@ -984,11 +984,8 @@ return arglocs def prepare_cond_call_gc_wb_array(self, op): - # just calling ensure_reg may return a register r2->r6. - # but in the assembly a sub routine is called that trashes r2->r6. - # thus select two registers that are preserved arglocs = [self.ensure_reg(op.getarg(0), force_in_reg=True), - self.ensure_reg_or_16bit_imm(op.getarg(1), selected_reg=r.r7), + self.ensure_reg_or_16bit_imm(op.getarg(1)), None] if arglocs[1].is_reg(): arglocs[2] = self.get_scratch_reg(INT) From pypy.commits at gmail.com Thu Feb 4 03:41:22 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 04 Feb 2016 00:41:22 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: added two more tests to be sure the assembler behaves correctly Message-ID: <56b30eb2.046f1c0a.b98f.fffff0f4@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82068:ba08e7241de3 Date: 2016-02-04 09:25 +0100 http://bitbucket.org/pypy/pypy/changeset/ba08e7241de3/ Log: added two more tests to be sure the assembler behaves correctly diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -561,25 +561,22 @@ tmp_loc = arglocs[2] n = descr.jit_wb_card_page_shift - assert tmp_loc is not r.SCRATCH - assert tmp_loc is not r.SCRATCH2 + assert tmp_loc is not loc_index # compute in tmp_loc the byte offset: - # ~(index >> (card_page_shift + 3)) + # tmp_loc = ~(index >> (card_page_shift + 3)) mc.SRAG(tmp_loc, loc_index, l.addr(n+3)) + # invert the bits of tmp_loc + mc.XG(tmp_loc, l.pool(self.pool.constant_64_ones)) # compute in SCRATCH the index of the bit inside the byte: - # (index >> card_page_shift) & 7 + # scratch = (index >> card_page_shift) & 7 # 0x80 sets zero flag. will store 0 into all not selected bits mc.RISBGN(r.SCRATCH, loc_index, l.imm(61), l.imm(0x80 | 63), l.imm(64-n)) #mc.SRAG(r.SCRATCH, loc_index, l.addr(n)) #mc.NILL(r.SCRATCH, l.imm(0x7)) # set SCRATCH2 to 1 << r1 - # invert the bits of tmp_loc - #mc.XIHF(tmp_loc, l.imm(0xffffFFFF)) - #mc.XILF(tmp_loc, l.imm(0xffffFFFF)) - mc.XG(tmp_loc, l.pool(self.pool.constant_64_ones)) mc.LGHI(r.SCRATCH2, l.imm(1)) mc.SLAG(r.SCRATCH2, r.SCRATCH2, l.addr(0,r.SCRATCH)) diff --git a/rpython/jit/backend/zarch/test/test_assembler.py b/rpython/jit/backend/zarch/test/test_assembler.py --- a/rpython/jit/backend/zarch/test/test_assembler.py +++ b/rpython/jit/backend/zarch/test/test_assembler.py @@ -210,6 +210,23 @@ self.a.mc.BCR(con.ANY, r.r14) assert run_asm(self.a) == -1 + def test_and_7_with_risbgn(self): + n = 13 + l = loc + self.a.mc.load_imm(r.r2, 7< Author: Richard Plangger Branch: s390x-backend Changeset: r82069:f5ccdddddea0 Date: 2016-02-04 10:04 +0100 http://bitbucket.org/pypy/pypy/changeset/f5ccdddddea0/ Log: an edge case in call release gil that could overwrite values for registers r8-r13, because the stack is not decremented accordingly diff --git a/rpython/jit/backend/zarch/callbuilder.py b/rpython/jit/backend/zarch/callbuilder.py --- a/rpython/jit/backend/zarch/callbuilder.py +++ b/rpython/jit/backend/zarch/callbuilder.py @@ -263,8 +263,13 @@ self.mc.LGR(RSAVEDRES, reg) elif reg.is_fp_reg(): self.mc.STD(reg, l.addr(16*WORD, r.SP)) + # r8-r13 live on the stack and must NOT be overwritten, + # restore_stack_pointer already moved SP + subtracted_to_sp, + self.mc.LAY(r.SP, l.addr(-self.subtracted_to_sp, r.SP)) self.mc.load_imm(self.mc.RAW_CALL_REG, self.asm.reacqgil_addr) self.mc.raw_call() + self.mc.LAY(r.SP, l.addr(self.subtracted_to_sp, r.SP)) + if reg is not None: if reg.is_core_reg(): self.mc.LGR(reg, RSAVEDRES) From pypy.commits at gmail.com Thu Feb 4 04:20:39 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 04 Feb 2016 01:20:39 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: place the saved register (loc_index) at a valid stack position! Message-ID: <56b317e7.d62d1c0a.47663.1f97@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82070:8bc88d808cbf Date: 2016-02-04 10:19 +0100 http://bitbucket.org/pypy/pypy/changeset/8bc88d808cbf/ Log: place the saved register (loc_index) at a valid stack position! diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -533,13 +533,13 @@ if loc_index.is_reg() and loc_index.value < 6: mc.LAY(r.SP, l.addr(-WORD, r.SP)) - mc.STG(loc_index, l.addr(0, r.SP)) + mc.STG(loc_index, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) mc.load_imm(r.r14, self.wb_slowpath[helper_num]) mc.BASR(r.r14, r.r14) if loc_index.is_reg() and loc_index.value < 6: - mc.LG(loc_index, l.addr(0, r.SP)) + mc.LG(loc_index, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) mc.LAY(r.SP, l.addr(WORD, r.SP)) if card_marking_mask: @@ -567,12 +567,12 @@ # tmp_loc = ~(index >> (card_page_shift + 3)) mc.SRAG(tmp_loc, loc_index, l.addr(n+3)) # invert the bits of tmp_loc - mc.XG(tmp_loc, l.pool(self.pool.constant_64_ones)) # compute in SCRATCH the index of the bit inside the byte: # scratch = (index >> card_page_shift) & 7 # 0x80 sets zero flag. will store 0 into all not selected bits mc.RISBGN(r.SCRATCH, loc_index, l.imm(61), l.imm(0x80 | 63), l.imm(64-n)) + mc.XG(tmp_loc, l.pool(self.pool.constant_64_ones)) #mc.SRAG(r.SCRATCH, loc_index, l.addr(n)) #mc.NILL(r.SCRATCH, l.imm(0x7)) From pypy.commits at gmail.com Thu Feb 4 04:27:00 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 04 Feb 2016 01:27:00 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: did not consider normal gc write barrier, crashes with index out of bounds! Message-ID: <56b31964.878e1c0a.e3f02.30fa@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82071:7694772ae0c5 Date: 2016-02-04 10:26 +0100 http://bitbucket.org/pypy/pypy/changeset/7694772ae0c5/ Log: did not consider normal gc write barrier, crashes with index out of bounds! diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -527,20 +527,22 @@ if not is_frame: mc.LGR(r.r0, loc_base) # unusual argument location - loc_index = arglocs[1] - # loc_index may be in r2 to r5. - # the wb_slow_path may trash these registers + if len(arglocs) > 1: + loc_index = arglocs[1] + # loc_index may be in r2 to r5. + # the wb_slow_path may trash these registers - if loc_index.is_reg() and loc_index.value < 6: - mc.LAY(r.SP, l.addr(-WORD, r.SP)) - mc.STG(loc_index, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) + if loc_index.is_reg() and loc_index.value < 6: + mc.LAY(r.SP, l.addr(-WORD, r.SP)) + mc.STG(loc_index, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) mc.load_imm(r.r14, self.wb_slowpath[helper_num]) mc.BASR(r.r14, r.r14) - if loc_index.is_reg() and loc_index.value < 6: - mc.LG(loc_index, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) - mc.LAY(r.SP, l.addr(WORD, r.SP)) + if len(arglocs) > 1: + if loc_index.is_reg() and loc_index.value < 6: + mc.LG(loc_index, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) + mc.LAY(r.SP, l.addr(WORD, r.SP)) if card_marking_mask: # The helper ends again with a check of the flag in the object. From pypy.commits at gmail.com Thu Feb 4 05:13:31 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 04 Feb 2016 02:13:31 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: translation issue Message-ID: <56b3244b.077bc20a.18c83.ffffd864@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82072:8475d0a75240 Date: 2016-02-04 11:12 +0100 http://bitbucket.org/pypy/pypy/changeset/8475d0a75240/ Log: translation issue diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -540,6 +540,7 @@ mc.BASR(r.r14, r.r14) if len(arglocs) > 1: + loc_index = arglocs[1] if loc_index.is_reg() and loc_index.value < 6: mc.LG(loc_index, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) mc.LAY(r.SP, l.addr(WORD, r.SP)) @@ -559,6 +560,7 @@ # # case GCFLAG_CARDS_SET: emit a few instructions to do # directly the card flag setting + loc_index = arglocs[1] if loc_index.is_reg(): tmp_loc = arglocs[2] n = descr.jit_wb_card_page_shift From pypy.commits at gmail.com Thu Feb 4 11:30:09 2016 From: pypy.commits at gmail.com (fijal) Date: Thu, 04 Feb 2016 08:30:09 -0800 (PST) Subject: [pypy-commit] pypy vmprof-newstack: a bit of shotgun debugging on asmgcc Message-ID: <56b37c91.460f1c0a.a6e42.ffffa1bc@mx.google.com> Author: fijal Branch: vmprof-newstack Changeset: r82073:c2e205bfae0d Date: 2016-02-04 17:29 +0100 http://bitbucket.org/pypy/pypy/changeset/c2e205bfae0d/ Log: a bit of shotgun debugging on asmgcc diff --git a/rpython/jit/backend/x86/assembler.py b/rpython/jit/backend/x86/assembler.py --- a/rpython/jit/backend/x86/assembler.py +++ b/rpython/jit/backend/x86/assembler.py @@ -872,6 +872,7 @@ from rpython.rlib.rvmprof.rvmprof import cintf # edx = address of pypy_threadlocal_s self.mc.MOV_rs(edx.value, THREADLOCAL_OFS) + self.mc.AND_ri(edx.value, ~1) # eax = (our local vmprof_tl_stack).next self.mc.MOV_rs(eax.value, (FRAME_FIXED_SIZE - 4 + 0) * WORD) # save in vmprof_tl_stack the value eax From pypy.commits at gmail.com Thu Feb 4 11:33:54 2016 From: pypy.commits at gmail.com (rlamy) Date: Thu, 04 Feb 2016 08:33:54 -0800 (PST) Subject: [pypy-commit] pypy default: Handle pdb.set_trace() in rpython/annotator/builtin.py Message-ID: <56b37d72.0357c20a.730f.6f8d@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82074:e5eb901ac085 Date: 2016-02-04 16:33 +0000 http://bitbucket.org/pypy/pypy/changeset/e5eb901ac085/ Log: Handle pdb.set_trace() in rpython/annotator/builtin.py diff --git a/rpython/annotator/builtin.py b/rpython/annotator/builtin.py --- a/rpython/annotator/builtin.py +++ b/rpython/annotator/builtin.py @@ -311,3 +311,14 @@ @analyzer_for(rpython.rlib.objectmodel.free_non_gc_object) def robjmodel_free_non_gc_object(obj): pass + +#________________________________ +# pdb + +import pdb + + at analyzer_for(pdb.set_trace) +def pdb_set_trace(*args_s): + raise AnnotatorError( + "you left pdb.set_trace() in your interpreter! " + "If you want to attach a gdb instead, call rlib.debug.attach_gdb()") diff --git a/rpython/rtyper/extfuncregistry.py b/rpython/rtyper/extfuncregistry.py --- a/rpython/rtyper/extfuncregistry.py +++ b/rpython/rtyper/extfuncregistry.py @@ -11,7 +11,6 @@ import math from rpython.rtyper.lltypesystem.module import ll_math -from rpython.rtyper.module import ll_pdb from rpython.rlib import rfloat # the following functions all take one float, return one float diff --git a/rpython/rtyper/module/ll_pdb.py b/rpython/rtyper/module/ll_pdb.py deleted file mode 100644 --- a/rpython/rtyper/module/ll_pdb.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -Complain if you leave in pdb.set_trace() in the code -""" - -import pdb -from rpython.rtyper.extfunc import ExtFuncEntry - - -class FunEntry(ExtFuncEntry): - _about_ = pdb.set_trace - def compute_result_annotation(self, *args_s): - raise Exception("you left pdb.set_trace() in your interpreter!" - "If you want to attach a gdb instead, call rlib.debug.attach_gdb()") From pypy.commits at gmail.com Thu Feb 4 11:47:44 2016 From: pypy.commits at gmail.com (rlamy) Date: Thu, 04 Feb 2016 08:47:44 -0800 (PST) Subject: [pypy-commit] pypy default: Use the correct exceptions in rpython/annotator/builtin.py Message-ID: <56b380b0.a3f6c20a.ef3cc.69c9@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82075:b8933a584083 Date: 2016-02-04 16:46 +0000 http://bitbucket.org/pypy/pypy/changeset/b8933a584083/ Log: Use the correct exceptions in rpython/annotator/builtin.py diff --git a/rpython/annotator/builtin.py b/rpython/annotator/builtin.py --- a/rpython/annotator/builtin.py +++ b/rpython/annotator/builtin.py @@ -39,8 +39,9 @@ return s_result s_realresult = immutablevalue(realresult) if not s_result.contains(s_realresult): - raise Exception("%s%r returned %r, which is not contained in %s" % ( - func, args, realresult, s_result)) + raise AnnotatorError( + "%s%r returned %r, which is not contained in %s" % ( + func, args, realresult, s_result)) return s_realresult # ____________________________________________________________ @@ -56,14 +57,14 @@ s_start, s_stop = args[:2] s_step = args[2] else: - raise Exception("range() takes 1 to 3 arguments") + raise AnnotatorError("range() takes 1 to 3 arguments") empty = False # so far if not s_step.is_constant(): step = 0 # this case signals a variable step else: step = s_step.const if step == 0: - raise Exception("range() with step zero") + raise AnnotatorError("range() with step zero") if s_start.is_constant() and s_stop.is_constant(): try: if len(xrange(s_start.const, s_stop.const, step)) == 0: @@ -285,7 +286,8 @@ else: @analyzer_for(unicodedata.decimal) def unicodedata_decimal(s_uchr): - raise TypeError("unicodedate.decimal() calls should not happen at interp-level") + raise AnnotatorError( + "unicodedate.decimal() calls should not happen at interp-level") @analyzer_for(OrderedDict) def analyze(): @@ -299,9 +301,9 @@ @analyzer_for(weakref.ref) def weakref_ref(s_obj): if not isinstance(s_obj, SomeInstance): - raise Exception("cannot take a weakref to %r" % (s_obj,)) + raise AnnotatorError("cannot take a weakref to %r" % (s_obj,)) if s_obj.can_be_None: - raise Exception("should assert that the instance we take " + raise AnnotatorError("should assert that the instance we take " "a weakref to cannot be None") return SomeWeakRef(s_obj.classdef) From pypy.commits at gmail.com Thu Feb 4 12:17:13 2016 From: pypy.commits at gmail.com (fijal) Date: Thu, 04 Feb 2016 09:17:13 -0800 (PST) Subject: [pypy-commit] pypy vmprof-newstack: bump the version one level Message-ID: <56b38799.05bd1c0a.d660.ffffe929@mx.google.com> Author: fijal Branch: vmprof-newstack Changeset: r82076:35f680ad059c Date: 2016-02-04 18:15 +0100 http://bitbucket.org/pypy/pypy/changeset/35f680ad059c/ Log: bump the version one level diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -18,6 +18,7 @@ #define VERSION_BASE '\x00' #define VERSION_THREAD_ID '\x01' +#define VERSION_TAG '\x02' typedef struct prof_stacktrace_s { char padding[sizeof(long) - 1]; @@ -64,7 +65,7 @@ header.hdr[4] = 0; header.interp_name[0] = MARKER_HEADER; header.interp_name[1] = '\x00'; - header.interp_name[2] = VERSION_THREAD_ID; + header.interp_name[2] = VERSION_TAG; header.interp_name[3] = namelen; memcpy(&header.interp_name[4], interp_name, namelen); return _write_all((char*)&header, 5 * sizeof(long) + 4 + namelen); From pypy.commits at gmail.com Thu Feb 4 14:45:29 2016 From: pypy.commits at gmail.com (fijal) Date: Thu, 04 Feb 2016 11:45:29 -0800 (PST) Subject: [pypy-commit] pypy default: merge vmprof-newstack Message-ID: <56b3aa59.4e0e1c0a.558e4.ffffe289@mx.google.com> Author: fijal Branch: Changeset: r82078:cc28605e84eb Date: 2016-02-04 20:44 +0100 http://bitbucket.org/pypy/pypy/changeset/cc28605e84eb/ Log: merge vmprof-newstack diff --git a/pypy/module/_vmprof/__init__.py b/pypy/module/_vmprof/__init__.py --- a/pypy/module/_vmprof/__init__.py +++ b/pypy/module/_vmprof/__init__.py @@ -11,6 +11,7 @@ interpleveldefs = { 'enable': 'interp_vmprof.enable', 'disable': 'interp_vmprof.disable', + 'write_all_code_objects': 'interp_vmprof.write_all_code_objects', 'VMProfError': 'space.fromcache(interp_vmprof.Cache).w_VMProfError', } diff --git a/pypy/module/_vmprof/interp_vmprof.py b/pypy/module/_vmprof/interp_vmprof.py --- a/pypy/module/_vmprof/interp_vmprof.py +++ b/pypy/module/_vmprof/interp_vmprof.py @@ -59,11 +59,21 @@ 'interval' is a float representing the sampling interval, in seconds. Must be smaller than 1.0 """ + w_modules = space.sys.get('modules') + if space.is_true(space.contains(w_modules, space.wrap('_continuation'))): + space.warn(space.wrap("Using _continuation/greenlet/stacklet together " + "with vmprof will crash"), + space.w_RuntimeWarning) try: rvmprof.enable(fileno, period) except rvmprof.VMProfError, e: raise VMProfError(space, e) +def write_all_code_objects(space): + """ Needed on cpython, just empty function here + """ + pass + def disable(space): """Disable vmprof. Remember to close the file descriptor afterwards if necessary. diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -0,0 +1,86 @@ + +import os, py +from rpython.jit.backend.test.support import CCompiledMixin +from rpython.rlib.jit import JitDriver +from rpython.tool.udir import udir +from rpython.translator.translator import TranslationContext +from rpython.jit.backend.detect_cpu import getcpuclass + +class CompiledVmprofTest(CCompiledMixin): + CPUClass = getcpuclass() + + def _get_TranslationContext(self): + t = TranslationContext() + t.config.translation.gc = 'incminimark' + t.config.translation.list_comprehension_operations = True + return t + + def test_vmprof(self): + from rpython.rlib import rvmprof + + class MyCode: + _vmprof_unique_id = 0 + def __init__(self, name): + self.name = name + + def get_name(code): + return code.name + + code2 = MyCode("py:y:foo:4") + rvmprof.register_code(code2, get_name) + + try: + rvmprof.register_code_object_class(MyCode, get_name) + except rvmprof.VMProfPlatformUnsupported, e: + py.test.skip(str(e)) + + def get_unique_id(code): + return rvmprof.get_unique_id(code) + + driver = JitDriver(greens = ['code'], reds = ['i', 's', 'num'], + is_recursive=True, get_unique_id=get_unique_id) + + @rvmprof.vmprof_execute_code("xcode13", lambda code, num: code) + def main(code, num): + return main_jitted(code, num) + + def main_jitted(code, num): + s = 0 + i = 0 + while i < num: + driver.jit_merge_point(code=code, i=i, s=s, num=num) + s += (i << 1) + if i % 3 == 0 and code is not code2: + main(code2, 100) + i += 1 + return s + + tmpfilename = str(udir.join('test_rvmprof')) + + def f(num): + code = MyCode("py:x:foo:3") + rvmprof.register_code(code, get_name) + fd = os.open(tmpfilename, os.O_WRONLY | os.O_CREAT, 0666) + period = 0.0001 + rvmprof.enable(fd, period) + res = main(code, num) + #assert res == 499999500000 + rvmprof.disable() + os.close(fd) + return 0 + + def check_vmprof_output(): + from vmprof import read_profile + tmpfile = str(udir.join('test_rvmprof')) + stats = read_profile(tmpfile) + t = stats.get_tree() + assert t.name == 'py:x:foo:3' + assert len(t.children) == 1 # jit + + self.meta_interp(f, [1000000], inline=True) + try: + import vmprof + except ImportError: + pass + else: + check_vmprof_output() \ No newline at end of file diff --git a/rpython/jit/backend/test/test_rvmprof.py b/rpython/jit/backend/test/test_rvmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/test/test_rvmprof.py @@ -0,0 +1,49 @@ +import py +from rpython.rlib import jit +from rpython.rtyper.annlowlevel import llhelper +from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.rlib.rvmprof import cintf +from rpython.jit.backend.x86.arch import WORD +from rpython.jit.codewriter.policy import JitPolicy + +class BaseRVMProfTest(object): + def test_one(self): + py.test.skip("needs thread-locals in the JIT, which is only available " + "after translation") + visited = [] + + def helper(): + stack = cintf.vmprof_tl_stack.getraw() + if stack: + # not during tracing + visited.append(stack.c_value) + else: + visited.append(0) + + llfn = llhelper(lltype.Ptr(lltype.FuncType([], lltype.Void)), helper) + + driver = jit.JitDriver(greens=[], reds='auto') + + def f(n): + i = 0 + while i < n: + driver.jit_merge_point() + i += 1 + llfn() + + class Hooks(jit.JitHookInterface): + def after_compile(self, debug_info): + self.raw_start = debug_info.asminfo.rawstart + + hooks = Hooks() + + null = lltype.nullptr(cintf.VMPROFSTACK) + cintf.vmprof_tl_stack.setraw(null) # make it empty + self.meta_interp(f, [10], policy=JitPolicy(hooks)) + v = set(visited) + assert 0 in v + v.remove(0) + assert len(v) == 1 + assert 0 <= list(v)[0] - hooks.raw_start <= 10*1024 + assert cintf.vmprof_tl_stack.getraw() == null + # ^^^ make sure we didn't leave anything dangling diff --git a/rpython/jit/backend/x86/arch.py b/rpython/jit/backend/x86/arch.py --- a/rpython/jit/backend/x86/arch.py +++ b/rpython/jit/backend/x86/arch.py @@ -31,7 +31,7 @@ if WORD == 4: # ebp + ebx + esi + edi + 15 extra words = 19 words - FRAME_FIXED_SIZE = 19 + FRAME_FIXED_SIZE = 19 + 4 # 4 for vmprof, XXX make more compact! PASS_ON_MY_FRAME = 15 JITFRAME_FIXED_SIZE = 6 + 8 * 2 # 6 GPR + 8 XMM * 2 WORDS/float # 'threadlocal_addr' is passed as 2nd argument on the stack, @@ -41,7 +41,7 @@ THREADLOCAL_OFS = (FRAME_FIXED_SIZE + 2) * WORD else: # rbp + rbx + r12 + r13 + r14 + r15 + threadlocal + 12 extra words = 19 - FRAME_FIXED_SIZE = 19 + FRAME_FIXED_SIZE = 19 + 4 # 4 for vmprof, XXX make more compact! PASS_ON_MY_FRAME = 12 JITFRAME_FIXED_SIZE = 28 # 13 GPR + 15 XMM # 'threadlocal_addr' is passed as 2nd argument in %esi, diff --git a/rpython/jit/backend/x86/assembler.py b/rpython/jit/backend/x86/assembler.py --- a/rpython/jit/backend/x86/assembler.py +++ b/rpython/jit/backend/x86/assembler.py @@ -12,7 +12,7 @@ from rpython.jit.metainterp.compile import ResumeGuardDescr from rpython.rtyper.lltypesystem import lltype, rffi, rstr, llmemory from rpython.rtyper.lltypesystem.lloperation import llop -from rpython.rtyper.annlowlevel import llhelper, cast_instance_to_gcref +from rpython.rtyper.annlowlevel import cast_instance_to_gcref from rpython.rtyper import rclass from rpython.rlib.jit import AsmInfo from rpython.jit.backend.model import CompiledLoopToken @@ -837,11 +837,56 @@ frame_depth = max(frame_depth, target_frame_depth) return frame_depth + def _call_header_vmprof(self): + from rpython.rlib.rvmprof.rvmprof import cintf, VMPROF_JITTED_TAG + + # tloc = address of pypy_threadlocal_s + if IS_X86_32: + # Can't use esi here, its old value is not saved yet. + # But we can use eax and ecx. + self.mc.MOV_rs(edx.value, THREADLOCAL_OFS) + tloc = edx + old = ecx + else: + # The thread-local value is already in esi. + # We should avoid if possible to use ecx or edx because they + # would be used to pass arguments #3 and #4 (even though, so + # far, the assembler only receives two arguments). + tloc = esi + old = r11 + # eax = address in the stack of a 3-words struct vmprof_stack_s + self.mc.LEA_rs(eax.value, (FRAME_FIXED_SIZE - 4) * WORD) + # old = current value of vmprof_tl_stack + offset = cintf.vmprof_tl_stack.getoffset() + self.mc.MOV_rm(old.value, (tloc.value, offset)) + # eax->next = old + self.mc.MOV_mr((eax.value, 0), old.value) + # eax->value = my esp + self.mc.MOV_mr((eax.value, WORD), esp.value) + # eax->kind = VMPROF_JITTED_TAG + self.mc.MOV_mi((eax.value, WORD * 2), VMPROF_JITTED_TAG) + # save in vmprof_tl_stack the new eax + self.mc.MOV_mr((tloc.value, offset), eax.value) + + def _call_footer_vmprof(self): + from rpython.rlib.rvmprof.rvmprof import cintf + # edx = address of pypy_threadlocal_s + self.mc.MOV_rs(edx.value, THREADLOCAL_OFS) + self.mc.AND_ri(edx.value, ~1) + # eax = (our local vmprof_tl_stack).next + self.mc.MOV_rs(eax.value, (FRAME_FIXED_SIZE - 4 + 0) * WORD) + # save in vmprof_tl_stack the value eax + offset = cintf.vmprof_tl_stack.getoffset() + self.mc.MOV_mr((edx.value, offset), eax.value) + def _call_header(self): self.mc.SUB_ri(esp.value, FRAME_FIXED_SIZE * WORD) self.mc.MOV_sr(PASS_ON_MY_FRAME * WORD, ebp.value) if IS_X86_64: self.mc.MOV_sr(THREADLOCAL_OFS, esi.value) + if self.cpu.translate_support_code: + self._call_header_vmprof() # on X86_64, this uses esi + if IS_X86_64: self.mc.MOV_rr(ebp.value, edi.value) else: self.mc.MOV_rs(ebp.value, (FRAME_FIXED_SIZE + 1) * WORD) @@ -873,6 +918,8 @@ def _call_footer(self): # the return value is the jitframe + if self.cpu.translate_support_code: + self._call_footer_vmprof() self.mc.MOV_rr(eax.value, ebp.value) gcrootmap = self.cpu.gc_ll_descr.gcrootmap diff --git a/rpython/jit/backend/x86/test/test_rvmprof.py b/rpython/jit/backend/x86/test/test_rvmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/x86/test/test_rvmprof.py @@ -0,0 +1,7 @@ + +import py +from rpython.jit.backend.test.test_rvmprof import BaseRVMProfTest +from rpython.jit.backend.x86.test.test_basic import Jit386Mixin + +class TestFfiCall(Jit386Mixin, BaseRVMProfTest): + pass \ No newline at end of file diff --git a/rpython/jit/backend/x86/test/test_zrpy_vmprof.py b/rpython/jit/backend/x86/test/test_zrpy_vmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/x86/test/test_zrpy_vmprof.py @@ -0,0 +1,7 @@ + +from rpython.jit.backend.llsupport.test.zrpy_vmprof_test import CompiledVmprofTest + +class TestZVMprof(CompiledVmprofTest): + + gcrootfinder = "shadowstack" + gc = "incminimark" \ No newline at end of file diff --git a/rpython/jit/backend/x86/test/test_zvmprof.py b/rpython/jit/backend/x86/test/test_zvmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/x86/test/test_zvmprof.py @@ -0,0 +1,7 @@ + +from rpython.jit.backend.llsupport.test.zrpy_vmprof_test import CompiledVmprofTest + +class TestZVMprof(CompiledVmprofTest): + + gcrootfinder = "shadowstack" + gc = "incminimark" \ No newline at end of file diff --git a/rpython/jit/codewriter/test/test_jtransform.py b/rpython/jit/codewriter/test/test_jtransform.py --- a/rpython/jit/codewriter/test/test_jtransform.py +++ b/rpython/jit/codewriter/test/test_jtransform.py @@ -1332,7 +1332,7 @@ tlfield = ThreadLocalField(lltype.Signed, 'foobar_test_', loop_invariant=loop_inv) OS_THREADLOCALREF_GET = effectinfo.EffectInfo.OS_THREADLOCALREF_GET - c = const(tlfield.offset) + c = const(tlfield.getoffset()) v = varoftype(lltype.Signed) op = SpaceOperation('threadlocalref_get', [c], v) cc = FakeBuiltinCallControl() diff --git a/rpython/jit/metainterp/quasiimmut.py b/rpython/jit/metainterp/quasiimmut.py --- a/rpython/jit/metainterp/quasiimmut.py +++ b/rpython/jit/metainterp/quasiimmut.py @@ -51,6 +51,7 @@ class QuasiImmut(object): llopaque = True compress_limit = 30 + looptokens_wrefs = None def __init__(self, cpu): self.cpu = cpu @@ -75,7 +76,7 @@ def compress_looptokens_list(self): self.looptokens_wrefs = [wref for wref in self.looptokens_wrefs if wref() is not None] - # NB. we must keep around the looptoken_wrefs that are + # NB. we must keep around the looptokens_wrefs that are # already invalidated; see below self.compress_limit = (len(self.looptokens_wrefs) + 15) * 2 @@ -83,6 +84,9 @@ # When this is called, all the loops that we record become # invalid: all GUARD_NOT_INVALIDATED in these loops (and # in attached bridges) must now fail. + if self.looptokens_wrefs is None: + # can't happen, but helps compiled tests + return wrefs = self.looptokens_wrefs self.looptokens_wrefs = [] for wref in wrefs: diff --git a/rpython/jit/metainterp/test/test_jitdriver.py b/rpython/jit/metainterp/test/test_jitdriver.py --- a/rpython/jit/metainterp/test/test_jitdriver.py +++ b/rpython/jit/metainterp/test/test_jitdriver.py @@ -193,7 +193,7 @@ return pc + 1 driver = JitDriver(greens=["pc"], reds='auto', - get_unique_id=get_unique_id) + get_unique_id=get_unique_id, is_recursive=True) def f(arg): i = 0 diff --git a/rpython/jit/metainterp/test/test_recursive.py b/rpython/jit/metainterp/test/test_recursive.py --- a/rpython/jit/metainterp/test/test_recursive.py +++ b/rpython/jit/metainterp/test/test_recursive.py @@ -1312,7 +1312,7 @@ return (code + 1) * 2 driver = JitDriver(greens=["pc", "code"], reds='auto', - get_unique_id=get_unique_id) + get_unique_id=get_unique_id, is_recursive=True) def f(pc, code): i = 0 diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py --- a/rpython/rlib/jit.py +++ b/rpython/rlib/jit.py @@ -623,6 +623,8 @@ raise AttributeError("no 'greens' or 'reds' supplied") if virtualizables is not None: self.virtualizables = virtualizables + if get_unique_id is not None: + assert is_recursive, "get_unique_id and is_recursive must be specified at the same time" for v in self.virtualizables: assert v in self.reds # if reds are automatic, they won't be passed to jit_merge_point, so diff --git a/rpython/rlib/rthread.py b/rpython/rlib/rthread.py --- a/rpython/rlib/rthread.py +++ b/rpython/rlib/rthread.py @@ -308,7 +308,7 @@ offset = CDefinedIntSymbolic('RPY_TLOFS_%s' % self.fieldname, default='?') offset.loop_invariant = loop_invariant - self.offset = offset + self._offset = offset def getraw(): if we_are_translated(): @@ -364,7 +364,7 @@ ThreadLocalField.__init__(self, lltype.Signed, 'tlref%d' % unique_id, loop_invariant=loop_invariant) setraw = self.setraw - offset = self.offset + offset = self._offset def get(): if we_are_translated(): diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -5,41 +5,41 @@ from rpython.rtyper.lltypesystem import lltype, llmemory, rffi from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.rtyper.tool import rffi_platform as platform +from rpython.rlib import rthread from rpython.jit.backend import detect_cpu class VMProfPlatformUnsupported(Exception): pass +ROOT = py.path.local(rpythonroot).join('rpython', 'rlib', 'rvmprof') +SRC = ROOT.join('src') + +if sys.platform.startswith('linux'): + _libs = ['dl'] +else: + _libs = [] +eci_kwds = dict( + include_dirs = [SRC], + includes = ['rvmprof.h'], + libraries = _libs, + separate_module_files = [SRC.join('rvmprof.c')], + post_include_bits=['#define RPYTHON_VMPROF\n'], + ) +global_eci = ExternalCompilationInfo(**eci_kwds) + + def setup(): if not detect_cpu.autodetect().startswith(detect_cpu.MODEL_X86_64): raise VMProfPlatformUnsupported("rvmprof only supports" " x86-64 CPUs for now") - - ROOT = py.path.local(rpythonroot).join('rpython', 'rlib', 'rvmprof') - SRC = ROOT.join('src') - - - if sys.platform.startswith('linux'): - libs = ['dl'] - else: - libs = [] - - eci_kwds = dict( - include_dirs = [SRC], - includes = ['rvmprof.h'], - libraries = libs, - separate_module_files = [SRC.join('rvmprof.c')], - post_include_bits=['#define RPYTHON_VMPROF\n'], - ) - eci = ExternalCompilationInfo(**eci_kwds) - platform.verify_eci(ExternalCompilationInfo( compile_extra=['-DRPYTHON_LL2CTYPES'], **eci_kwds)) + eci = global_eci vmprof_init = rffi.llexternal("vmprof_init", [rffi.INT, rffi.DOUBLE, rffi.CCHARP], rffi.CCHARP, compilation_info=eci) @@ -55,7 +55,8 @@ rffi.INT, compilation_info=eci) vmprof_ignore_signals = rffi.llexternal("vmprof_ignore_signals", [rffi.INT], lltype.Void, - compilation_info=eci) + compilation_info=eci, + _nowrapper=True) return CInterface(locals()) @@ -67,112 +68,34 @@ def _freeze_(self): return True -def token2lltype(tok): - if tok == 'i': - return lltype.Signed - if tok == 'r': - return llmemory.GCREF - raise NotImplementedError(repr(tok)) -def make_trampoline_function(name, func, token, restok): - from rpython.jit.backend import detect_cpu +# --- copy a few declarations from src/vmprof_stack.h --- - cont_name = 'rpyvmprof_f_%s_%s' % (name, token) - tramp_name = 'rpyvmprof_t_%s_%s' % (name, token) - orig_tramp_name = tramp_name +VMPROF_CODE_TAG = 1 - func.c_name = cont_name - func._dont_inline_ = True +VMPROFSTACK = lltype.ForwardReference() +PVMPROFSTACK = lltype.Ptr(VMPROFSTACK) +VMPROFSTACK.become(rffi.CStruct("vmprof_stack_s", + ('next', PVMPROFSTACK), + ('value', lltype.Signed), + ('kind', lltype.Signed))) +# ---------- - if sys.platform == 'darwin': - # according to internet "At the time UNIX was written in 1974...." - # "... all C functions are prefixed with _" - cont_name = '_' + cont_name - tramp_name = '_' + tramp_name - PLT = "" - size_decl = "" - type_decl = "" - extra_align = "" - else: - PLT = "@PLT" - type_decl = "\t.type\t%s, @function" % (tramp_name,) - size_decl = "\t.size\t%s, .-%s" % ( - tramp_name, tramp_name) - extra_align = "\t.cfi_def_cfa_offset 8" - assert detect_cpu.autodetect().startswith(detect_cpu.MODEL_X86_64), ( - "rvmprof only supports x86-64 CPUs for now") +vmprof_tl_stack = rthread.ThreadLocalField(PVMPROFSTACK, "vmprof_tl_stack") +do_use_eci = rffi.llexternal_use_eci( + ExternalCompilationInfo(includes=['vmprof_stack.h'], + include_dirs = [SRC])) - # mapping of argument count (not counting the final uid argument) to - # the register that holds this uid argument - reg = {0: '%rdi', - 1: '%rsi', - 2: '%rdx', - 3: '%rcx', - 4: '%r8', - 5: '%r9', - } - try: - reg = reg[len(token)] - except KeyError: - raise NotImplementedError( - "not supported: %r takes more than 5 arguments" % (func,)) +def enter_code(unique_id): + do_use_eci() + s = lltype.malloc(VMPROFSTACK, flavor='raw') + s.c_next = vmprof_tl_stack.get_or_make_raw() + s.c_value = unique_id + s.c_kind = VMPROF_CODE_TAG + vmprof_tl_stack.setraw(s) + return s - target = udir.join('module_cache') - target.ensure(dir=1) - target = target.join('trampoline_%s_%s.vmprof.s' % (name, token)) - # NOTE! the tabs in this file are absolutely essential, things - # that don't start with \t are silently ignored (: WAT!?) - target.write("""\ -\t.text -\t.globl\t%(tramp_name)s -%(type_decl)s -%(tramp_name)s: -\t.cfi_startproc -\tpushq\t%(reg)s -\t.cfi_def_cfa_offset 16 -\tcall %(cont_name)s%(PLT)s -\taddq\t$8, %%rsp -%(extra_align)s -\tret -\t.cfi_endproc -%(size_decl)s -""" % locals()) - - def tok2cname(tok): - if tok == 'i': - return 'long' - if tok == 'r': - return 'void *' - raise NotImplementedError(repr(tok)) - - header = 'RPY_EXTERN %s %s(%s);\n' % ( - tok2cname(restok), - orig_tramp_name, - ', '.join([tok2cname(tok) for tok in token] + ['long'])) - - header += """\ -static int cmp_%s(void *addr) { - if (addr == %s) return 1; -#ifdef VMPROF_ADDR_OF_TRAMPOLINE - return VMPROF_ADDR_OF_TRAMPOLINE(addr); -#undef VMPROF_ADDR_OF_TRAMPOLINE -#else - return 0; -#endif -#define VMPROF_ADDR_OF_TRAMPOLINE cmp_%s -} -""" % (tramp_name, orig_tramp_name, tramp_name) - - eci = ExternalCompilationInfo( - post_include_bits = [header], - separate_module_files = [str(target)], - ) - - return rffi.llexternal( - orig_tramp_name, - [token2lltype(tok) for tok in token] + [lltype.Signed], - token2lltype(restok), - compilation_info=eci, - _nowrapper=True, sandboxsafe=True, - random_effects_on_gcobjs=True) +def leave_code(s): + vmprof_tl_stack.setraw(s.c_next) + lltype.free(s, flavor='raw') diff --git a/rpython/rlib/rvmprof/rvmprof.py b/rpython/rlib/rvmprof/rvmprof.py --- a/rpython/rlib/rvmprof/rvmprof.py +++ b/rpython/rlib/rvmprof/rvmprof.py @@ -4,12 +4,19 @@ from rpython.rlib.rvmprof import cintf from rpython.rtyper.annlowlevel import cast_instance_to_gcref from rpython.rtyper.annlowlevel import cast_base_ptr_to_instance -from rpython.rtyper.lltypesystem import rffi +from rpython.rtyper.lltypesystem import rffi, llmemory +from rpython.rtyper.lltypesystem.lloperation import llop MAX_FUNC_NAME = 1023 # ____________________________________________________________ +# keep in sync with vmprof_stack.h +VMPROF_CODE_TAG = 1 +VMPROF_BLACKHOLE_TAG = 2 +VMPROF_JITTED_TAG = 3 +VMPROF_JITTING_TAG = 4 +VMPROF_GC_TAG = 5 class VMProfError(Exception): def __init__(self, msg): @@ -19,17 +26,16 @@ class VMProf(object): + _immutable_fields_ = ['is_enabled?'] + def __init__(self): "NOT_RPYTHON: use _get_vmprof()" self._code_classes = set() self._gather_all_code_objs = lambda: None self._cleanup_() - if sys.maxint == 2147483647: - self._code_unique_id = 0 # XXX this is wrong, it won't work on 32bit - else: - self._code_unique_id = 0x7000000000000000 + self._code_unique_id = 4 self.cintf = cintf.setup() - + def _cleanup_(self): self.is_enabled = False @@ -127,7 +133,6 @@ if self.cintf.vmprof_register_virtual_function(name, uid, 500000) < 0: raise VMProfError("vmprof buffers full! disk full or too slow") - def vmprof_execute_code(name, get_code_fn, result_class=None): """Decorator to be used on the function that interprets a code object. @@ -136,12 +141,7 @@ 'get_code_fn(*args)' is called to extract the code object from the arguments given to the decorated function. - The original function can return None, an integer, or an instance. - In the latter case (only), 'result_class' must be set. - - NOTE: for now, this assumes that the decorated functions only takes - instances or plain integer arguments, and at most 5 of them - (including 'self' if applicable). + 'result_class' is ignored (backward compatibility). """ def decorate(func): try: @@ -149,52 +149,19 @@ except cintf.VMProfPlatformUnsupported: return func - if hasattr(func, 'im_self'): - assert func.im_self is None - func = func.im_func - - def lower(*args): - if len(args) == 0: - return (), "" - ll_args, token = lower(*args[1:]) - ll_arg = args[0] - if isinstance(ll_arg, int): - tok = "i" - else: - tok = "r" - ll_arg = cast_instance_to_gcref(ll_arg) - return (ll_arg,) + ll_args, tok + token - - @specialize.memo() - def get_ll_trampoline(token): - if result_class is None: - restok = "i" - else: - restok = "r" - return cintf.make_trampoline_function(name, func, token, restok) - def decorated_function(*args): - # go through the asm trampoline ONLY if we are translated but not - # being JITted. - # - # If we are not translated, we obviously don't want to go through - # the trampoline because there is no C function it can call. - # # If we are being JITted, we want to skip the trampoline, else the # JIT cannot see through it. - # - if we_are_translated() and not jit.we_are_jitted(): - # if we are translated, call the trampoline + if not jit.we_are_jitted(): unique_id = get_code_fn(*args)._vmprof_unique_id - ll_args, token = lower(*args) - ll_trampoline = get_ll_trampoline(token) - ll_result = ll_trampoline(*ll_args + (unique_id,)) - if result_class is not None: - return cast_base_ptr_to_instance(result_class, ll_result) - else: - return ll_result + x = cintf.enter_code(unique_id) + try: + return func(*args) + finally: + cintf.leave_code(x) else: return func(*args) + decorated_function.__name__ = func.__name__ + '_rvmprof' return decorated_function diff --git a/rpython/rlib/rvmprof/src/rvmprof.c b/rpython/rlib/rvmprof/src/rvmprof.c --- a/rpython/rlib/rvmprof/src/rvmprof.c +++ b/rpython/rlib/rvmprof/src/rvmprof.c @@ -12,10 +12,12 @@ #else # include "common_header.h" +# include "structdef.h" +# include "src/threadlocal.h" # include "rvmprof.h" -# ifndef VMPROF_ADDR_OF_TRAMPOLINE +/*# ifndef VMPROF_ADDR_OF_TRAMPOLINE # error "RPython program using rvmprof, but not calling vmprof_execute_code()" -# endif +# endif*/ #endif diff --git a/rpython/rlib/rvmprof/src/rvmprof.h b/rpython/rlib/rvmprof/src/rvmprof.h --- a/rpython/rlib/rvmprof/src/rvmprof.h +++ b/rpython/rlib/rvmprof/src/rvmprof.h @@ -4,3 +4,7 @@ RPY_EXTERN int vmprof_enable(void); RPY_EXTERN int vmprof_disable(void); RPY_EXTERN int vmprof_register_virtual_function(char *, long, int); +RPY_EXTERN void* vmprof_stack_new(void); +RPY_EXTERN int vmprof_stack_append(void*, long); +RPY_EXTERN long vmprof_stack_pop(void*); +RPY_EXTERN void vmprof_stack_free(void*); diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h new file mode 100644 --- /dev/null +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -0,0 +1,72 @@ +#include + +#define MAX_FUNC_NAME 1024 + +static int profile_file = -1; +static long prepare_interval_usec = 0; +static long profile_interval_usec = 0; +static int opened_profile(char *interp_name); + +#define MAX_STACK_DEPTH \ + ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) + +#define MARKER_STACKTRACE '\x01' +#define MARKER_VIRTUAL_IP '\x02' +#define MARKER_TRAILER '\x03' +#define MARKER_INTERP_NAME '\x04' /* deprecated */ +#define MARKER_HEADER '\x05' + +#define VERSION_BASE '\x00' +#define VERSION_THREAD_ID '\x01' +#define VERSION_TAG '\x02' + +typedef struct prof_stacktrace_s { + char padding[sizeof(long) - 1]; + char marker; + long count, depth; + void *stack[]; +} prof_stacktrace_s; + + +RPY_EXTERN +char *vmprof_init(int fd, double interval, char *interp_name) +{ + if (interval < 1e-6 || interval >= 1.0) + return "bad value for 'interval'"; + prepare_interval_usec = (int)(interval * 1000000.0); + + if (prepare_concurrent_bufs() < 0) + return "out of memory"; + + assert(fd >= 0); + profile_file = fd; + if (opened_profile(interp_name) < 0) { + profile_file = -1; + return strerror(errno); + } + return NULL; +} + +static int _write_all(const char *buf, size_t bufsize); + +static int opened_profile(char *interp_name) +{ + struct { + long hdr[5]; + char interp_name[259]; + } header; + + size_t namelen = strnlen(interp_name, 255); + + header.hdr[0] = 0; + header.hdr[1] = 3; + header.hdr[2] = 0; + header.hdr[3] = prepare_interval_usec; + header.hdr[4] = 0; + header.interp_name[0] = MARKER_HEADER; + header.interp_name[1] = '\x00'; + header.interp_name[2] = VERSION_TAG; + header.interp_name[3] = namelen; + memcpy(&header.interp_name[4], interp_name, namelen); + return _write_all((char*)&header, 5 * sizeof(long) + 4 + namelen); +} diff --git a/rpython/rlib/rvmprof/src/vmprof_get_custom_offset.h b/rpython/rlib/rvmprof/src/vmprof_get_custom_offset.h --- a/rpython/rlib/rvmprof/src/vmprof_get_custom_offset.h +++ b/rpython/rlib/rvmprof/src/vmprof_get_custom_offset.h @@ -1,119 +1,49 @@ -#ifdef PYPY_JIT_CODEMAP void *pypy_find_codemap_at_addr(long addr, long *start_addr); long pypy_yield_codemap_at_addr(void *codemap_raw, long addr, long *current_pos_addr); long pypy_jit_stack_depth_at_loc(long loc); -#endif -#ifdef CPYTHON_GET_CUSTOM_OFFSET -static void *tramp_start, *tramp_end; -#endif - - -static ptrdiff_t vmprof_unw_get_custom_offset(void* ip, void *cp) { - -#if defined(PYPY_JIT_CODEMAP) - - intptr_t ip_l = (intptr_t)ip; - return pypy_jit_stack_depth_at_loc(ip_l); - -#elif defined(CPYTHON_GET_CUSTOM_OFFSET) - - if (ip >= tramp_start && ip <= tramp_end) { - // XXX the return value is wrong for all the places before push and - // after pop, fix - void *bp; - void *sp; - - /* This is a stage2 trampoline created by hotpatch: - - push %rbx - push %rbp - mov %rsp,%rbp - and $0xfffffffffffffff0,%rsp // make sure the stack is aligned - movabs $0x7ffff687bb10,%rbx - callq *%rbx - leaveq - pop %rbx - retq - - the stack layout is like this: - - +-----------+ high addresses - | ret addr | - +-----------+ - | saved rbx | start of the function frame - +-----------+ - | saved rbp | - +-----------+ - | ........ | <-- rbp - +-----------+ low addresses - - So, the trampoline frame starts at rbp+16, and the return address, - is at rbp+24. The vmprof API requires us to return the offset of - the frame relative to sp, hence we have this weird computation. - - XXX (antocuni): I think we could change the API to return directly - the frame address instead of the offset; however, this require a - change in the PyPy code too - */ - - unw_get_reg (cp, UNW_REG_SP, (unw_word_t*)&sp); - unw_get_reg (cp, UNW_X86_64_RBP, (unw_word_t*)&bp); - return bp+16+8-sp; - } - return -1; - -#else - - return -1; - -#endif -} - -static long vmprof_write_header_for_jit_addr(void **result, long n, - void *ip, int max_depth) +static long vmprof_write_header_for_jit_addr(intptr_t *result, long n, + intptr_t ip, int max_depth) { #ifdef PYPY_JIT_CODEMAP void *codemap; long current_pos = 0; - intptr_t id; + intptr_t ident; long start_addr = 0; intptr_t addr = (intptr_t)ip; int start, k; - void *tmp; + intptr_t tmp; codemap = pypy_find_codemap_at_addr(addr, &start_addr); - if (codemap == NULL) - // not a jit code at all + if (codemap == NULL || n >= max_depth - 2) + // not a jit code at all or almost max depth return n; // modify the last entry to point to start address and not the random one // in the middle - result[n - 1] = (void*)start_addr; - result[n] = (void*)2; - n++; + result[n++] = VMPROF_ASSEMBLER_TAG; + result[n++] = start_addr; start = n; while (n < max_depth) { - id = pypy_yield_codemap_at_addr(codemap, addr, ¤t_pos); - if (id == -1) + ident = pypy_yield_codemap_at_addr(codemap, addr, ¤t_pos); + if (ident == -1) // finish break; - if (id == 0) + if (ident == 0) continue; // not main codemap - result[n++] = (void *)id; + result[n++] = VMPROF_JITTED_TAG; + result[n++] = ident; } - k = 0; + k = 1; + while (k < (n - start) / 2) { tmp = result[start + k]; - result[start + k] = result[n - k - 1]; - result[n - k - 1] = tmp; - k++; - } - if (n < max_depth) { - result[n++] = (void*)3; + result[start + k] = result[n - k]; + result[n - k] = tmp; + k += 2; } #endif return n; diff --git a/rpython/rlib/rvmprof/src/vmprof_getpc.h b/rpython/rlib/rvmprof/src/vmprof_getpc.h --- a/rpython/rlib/rvmprof/src/vmprof_getpc.h +++ b/rpython/rlib/rvmprof/src/vmprof_getpc.h @@ -134,7 +134,7 @@ } }; -void* GetPC(ucontext_t *signal_ucontext) { +intptr_t GetPC(ucontext_t *signal_ucontext) { // See comment above struct CallUnrollInfo. Only try instruction // flow matching if both eip and esp looks reasonable. const int eip = signal_ucontext->uc_mcontext.gregs[REG_EIP]; @@ -146,12 +146,12 @@ if (!memcmp(eip_char + callunrollinfo[i].pc_offset, callunrollinfo[i].ins, callunrollinfo[i].ins_size)) { // We have a match. - void **retaddr = (void**)(esp + callunrollinfo[i].return_sp_offset); + intptr_t *retaddr = (intptr_t*)(esp + callunrollinfo[i].return_sp_offset); return *retaddr; } } } - return (void*)eip; + return eip; } // Special case #2: Windows, which has to do something totally different. @@ -170,7 +170,7 @@ typedef int ucontext_t; #endif -void* GetPC(ucontext_t *signal_ucontext) { +intptr_t GetPC(ucontext_t *signal_ucontext) { RAW_LOG(ERROR, "GetPC is not yet implemented on Windows\n"); return NULL; } @@ -180,11 +180,11 @@ // the right value for your system, and add it to the list in // configure.ac (or set it manually in your config.h). #else -void* GetPC(ucontext_t *signal_ucontext) { +intptr_t GetPC(ucontext_t *signal_ucontext) { #ifdef __APPLE__ - return (void*)(signal_ucontext->uc_mcontext->__ss.__rip); + return (signal_ucontext->uc_mcontext->__ss.__rip); #else - return (void*)signal_ucontext->PC_FROM_UCONTEXT; // defined in config.h + return signal_ucontext->PC_FROM_UCONTEXT; // defined in config.h #endif } diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -25,84 +25,28 @@ #include #include #include +#include #include #include #include #include +#include #include #include "vmprof_getpc.h" -#ifdef __APPLE__ -#include "libunwind.h" -#else -#include "vmprof_unwind.h" -#endif #include "vmprof_mt.h" - +#include "vmprof_stack.h" +#include "vmprof_common.h" /************************************************************/ -// functions copied from libunwind using dlopen - -#ifndef __APPLE__ // should be linux only probably -static int (*unw_get_reg)(unw_cursor_t*, int, unw_word_t*) = NULL; -static int (*unw_step)(unw_cursor_t*) = NULL; -static int (*unw_init_local)(unw_cursor_t *, unw_context_t *) = NULL; -static int (*unw_get_proc_info)(unw_cursor_t *, unw_proc_info_t *) = NULL; -#endif - -static int profile_file = -1; static long prepare_interval_usec; +static long saved_profile_file; static struct profbuf_s *volatile current_codes; static void *(*mainloop_get_virtual_ip)(char *) = 0; static int opened_profile(char *interp_name); static void flush_codes(void); -#ifdef __APPLE__ -#define UNWIND_NAME "/usr/lib/system/libunwind.dylib" -#define UNW_PREFIX "unw" -#else -#define UNWIND_NAME "libunwind.so" -#define UNW_PREFIX "_ULx86_64" -#endif - -RPY_EXTERN -char *vmprof_init(int fd, double interval, char *interp_name) -{ - if (interval < 1e-6 || interval >= 1.0) - return "bad value for 'interval'"; - prepare_interval_usec = (int)(interval * 1000000.0); - -#ifndef __APPLE__ - if (!unw_get_reg) { - void *libhandle; - - if (!(libhandle = dlopen(UNWIND_NAME, RTLD_LAZY | RTLD_LOCAL))) - goto error; - if (!(unw_get_reg = dlsym(libhandle, UNW_PREFIX "_get_reg"))) - goto error; - if (!(unw_get_proc_info = dlsym(libhandle, UNW_PREFIX "_get_proc_info"))) - goto error; - if (!(unw_init_local = dlsym(libhandle, UNW_PREFIX "_init_local"))) - goto error; - if (!(unw_step = dlsym(libhandle, UNW_PREFIX "_step"))) - goto error; - } -#endif - if (prepare_concurrent_bufs() < 0) - return "out of memory"; - - assert(fd >= 0); - profile_file = fd; - if (opened_profile(interp_name) < 0) { - profile_file = -1; - return strerror(errno); - } - return NULL; - - error: - return dlerror(); -} /************************************************************/ @@ -131,131 +75,62 @@ * ************************************************************* */ -#define MAX_FUNC_NAME 128 -#define MAX_STACK_DEPTH \ - ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) - -#define MARKER_STACKTRACE '\x01' -#define MARKER_VIRTUAL_IP '\x02' -#define MARKER_TRAILER '\x03' -#define MARKER_INTERP_NAME '\x04' /* deprecated */ -#define MARKER_HEADER '\x05' - -#define VERSION_BASE '\x00' -#define VERSION_THREAD_ID '\x01' - -struct prof_stacktrace_s { - char padding[sizeof(long) - 1]; - char marker; - long count, depth; - void *stack[]; -}; - -static long profile_interval_usec = 0; static char atfork_hook_installed = 0; -/* ****************************************************** - * libunwind workaround for process JIT frames correctly - * ****************************************************** - */ - #include "vmprof_get_custom_offset.h" -typedef struct { - void* _unused1; - void* _unused2; - void* sp; - void* ip; - void* _unused3[sizeof(unw_cursor_t)/sizeof(void*) - 4]; -} vmprof_hacked_unw_cursor_t; - -static int vmprof_unw_step(unw_cursor_t *cp, int first_run) -{ - void* ip; - void* sp; - ptrdiff_t sp_offset; - unw_get_reg (cp, UNW_REG_IP, (unw_word_t*)&ip); - unw_get_reg (cp, UNW_REG_SP, (unw_word_t*)&sp); - if (!first_run) { - // make sure we're pointing to the CALL and not to the first - // instruction after. If the callee adjusts the stack for us - // it's not safe to be at the instruction after - ip -= 1; - } - sp_offset = vmprof_unw_get_custom_offset(ip, cp); - - if (sp_offset == -1) { - // it means that the ip is NOT in JITted code, so we can use the - // stardard unw_step - return unw_step(cp); - } - else { - // this is a horrible hack to manually walk the stack frame, by - // setting the IP and SP in the cursor - vmprof_hacked_unw_cursor_t *cp2 = (vmprof_hacked_unw_cursor_t*)cp; - void* bp = (void*)sp + sp_offset; - cp2->sp = bp; - bp -= sizeof(void*); - cp2->ip = ((void**)bp)[0]; - // the ret is on the top of the stack minus WORD - return 1; - } -} - - /* ************************************************************* * functions to dump the stack trace * ************************************************************* */ -static int get_stack_trace(void** result, int max_depth, ucontext_t *ucontext) + +#ifndef RPYTHON_LL2CTYPES +static vmprof_stack_t *get_vmprof_stack(void) { - void *ip; - int n = 0; - unw_cursor_t cursor; -#ifdef __APPLE__ - unw_context_t uc; - unw_getcontext(&uc); + return RPY_THREADLOCALREF_GET(vmprof_tl_stack); +} #else - unw_context_t uc = *ucontext; +static vmprof_stack_t *get_vmprof_stack(void) +{ + return 0; +} #endif - int ret = unw_init_local(&cursor, &uc); - assert(ret >= 0); - (void)ret; - - while (n < max_depth) { - if (unw_get_reg(&cursor, UNW_REG_IP, (unw_word_t *) &ip) < 0) { - break; +static int get_stack_trace(intptr_t *result, int max_depth, intptr_t pc, ucontext_t *ucontext) +{ + vmprof_stack_t* stack = get_vmprof_stack(); + int n = 0; + intptr_t addr = 0; + int bottom_jitted = 0; + // check if the pc is in JIT +#ifdef PYPY_JIT_CODEMAP + if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { + // the bottom part is jitted, means we can fill up the first part + // from the JIT + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + stack = stack->next; // skip the first item as it contains garbage + } +#endif + while (n < max_depth - 1 && stack) { + if (stack->kind == VMPROF_CODE_TAG) { + result[n] = stack->kind; + result[n + 1] = stack->value; + n += 2; } - - unw_proc_info_t pip; - unw_get_proc_info(&cursor, &pip); - - /* if n==0, it means that the signal handler interrupted us while we - were in the trampoline, so we are not executing (yet) the real main - loop function; just skip it */ - if (VMPROF_ADDR_OF_TRAMPOLINE((void*)pip.start_ip) && n > 0) { - // found main loop stack frame - void* sp; - unw_get_reg(&cursor, UNW_REG_SP, (unw_word_t *) &sp); - if (mainloop_get_virtual_ip) - ip = mainloop_get_virtual_ip((char *)sp); - else - ip = *(void **)sp; +#ifdef PYPY_JIT_CODEMAP + else if (stack->kind == VMPROF_JITTED_TAG) { + pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); } - - int first_run = (n == 0); - result[n++] = ip; - n = vmprof_write_header_for_jit_addr(result, n, ip, max_depth); - if (vmprof_unw_step(&cursor, first_run) <= 0) - break; +#endif + stack = stack->next; } return n; } -static void *get_current_thread_id(void) +static intptr_t get_current_thread_id(void) { /* xxx This function is a hack on two fronts: @@ -269,7 +144,7 @@ An alternative would be to try to look if the information is available in the ucontext_t in the caller. */ - return (void *)pthread_self(); + return (intptr_t)pthread_self(); } @@ -278,8 +153,43 @@ * ************************************************************* */ +#include + +volatile int spinlock; +jmp_buf restore_point; + +static void segfault_handler(int arg) +{ + longjmp(restore_point, SIGSEGV); +} + static void sigprof_handler(int sig_nr, siginfo_t* info, void *ucontext) { +#ifdef __APPLE__ + // TERRIBLE HACK AHEAD + // on OS X, the thread local storage is sometimes uninitialized + // when the signal handler runs - it means it's impossible to read errno + // or call any syscall or read PyThread_Current or pthread_self. Additionally, + // it seems impossible to read the register gs. + // here we register segfault handler (all guarded by a spinlock) and call + // longjmp in case segfault happens while reading a thread local + while (__sync_lock_test_and_set(&spinlock, 1)) { + } + signal(SIGSEGV, &segfault_handler); + int fault_code = setjmp(restore_point); + if (fault_code == 0) { + pthread_self(); + get_current_thread_id(); + } else { + signal(SIGSEGV, SIG_DFL); + __sync_synchronize(); + spinlock = 0; + return; + } + signal(SIGSEGV, SIG_DFL); + __sync_synchronize(); + spinlock = 0; +#endif long val = __sync_fetch_and_add(&signal_handler_value, 2L); if ((val & 1) == 0) { @@ -296,9 +206,8 @@ struct prof_stacktrace_s *st = (struct prof_stacktrace_s *)p->data; st->marker = MARKER_STACKTRACE; st->count = 1; - st->stack[0] = GetPC((ucontext_t*)ucontext); - depth = get_stack_trace(st->stack+1, MAX_STACK_DEPTH-2, ucontext); - depth++; // To account for pc value in stack[0]; + depth = get_stack_trace(st->stack, + MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext), ucontext); st->depth = depth; st->stack[depth++] = get_current_thread_id(); p->data_offset = offsetof(struct prof_stacktrace_s, marker); @@ -363,12 +272,15 @@ static void atfork_disable_timer(void) { if (profile_interval_usec > 0) { + saved_profile_file = profile_file; + profile_file = -1; remove_sigprof_timer(); } } static void atfork_enable_timer(void) { if (profile_interval_usec > 0) { + profile_file = saved_profile_file; install_sigprof_timer(); } } @@ -415,7 +327,7 @@ return -1; } -static int _write_all(const void *buf, size_t bufsize) +static int _write_all(const char *buf, size_t bufsize) { while (bufsize > 0) { ssize_t count = write(profile_file, buf, bufsize); @@ -427,71 +339,13 @@ return 0; } -static int opened_profile(char *interp_name) -{ - struct { - long hdr[5]; - char interp_name[259]; - } header; - - size_t namelen = strnlen(interp_name, 255); - current_codes = NULL; - - header.hdr[0] = 0; - header.hdr[1] = 3; - header.hdr[2] = 0; - header.hdr[3] = prepare_interval_usec; - header.hdr[4] = 0; - header.interp_name[0] = MARKER_HEADER; - header.interp_name[1] = '\x00'; - header.interp_name[2] = VERSION_THREAD_ID; - header.interp_name[3] = namelen; - memcpy(&header.interp_name[4], interp_name, namelen); - return _write_all(&header, 5 * sizeof(long) + 4 + namelen); -} - static int close_profile(void) { - char buf[4096]; - ssize_t size; unsigned char marker = MARKER_TRAILER; if (_write_all(&marker, 1) < 0) return -1; -#ifdef __linux__ - // copy /proc/self/maps to the end of the profile file - int srcfd = open("/proc/self/maps", O_RDONLY); - if (srcfd < 0) - return -1; - - while ((size = read(srcfd, buf, sizeof buf)) > 0) { - if (_write_all(buf, size) < 0) { - close(srcfd); - return -1; - } - } - close(srcfd); -#else - // freebsd and mac -#if defined(__APPLE__) - sprintf(buf, "vmmap %d", getpid()); -#else - sprintf(buf, "procstat -v %d", getpid()); -#endif - FILE *srcf = popen(buf, "r"); - if (!srcf) - return -1; - - while ((size = fread(buf, 1, sizeof buf, srcf))) { - if (_write_all(buf, size) < 0) { - pclose(srcf); - return -1; - } - } - pclose(srcf); -#endif - /* don't close() the file descriptor from here */ profile_file = -1; return 0; @@ -522,6 +376,9 @@ struct profbuf_s *p; char *t; + if (profile_file == -1) + return 0; // silently don't write it + retry: p = current_codes; if (p != NULL) { @@ -529,7 +386,7 @@ /* grabbed 'current_codes': we will append the current block to it if it contains enough room */ size_t freesize = SINGLE_BUF_SIZE - p->data_size; - if (freesize < blocklen) { + if (freesize < (size_t)blocklen) { /* full: flush it */ commit_buffer(profile_file, p); p = NULL; diff --git a/rpython/rlib/rvmprof/src/vmprof_stack.h b/rpython/rlib/rvmprof/src/vmprof_stack.h new file mode 100644 --- /dev/null +++ b/rpython/rlib/rvmprof/src/vmprof_stack.h @@ -0,0 +1,25 @@ +#ifndef _VMPROF_STACK_H_ +#define _VMPROF_STACK_H_ + +#include + +#define VMPROF_CODE_TAG 1 /* <- also in cintf.py */ +#define VMPROF_BLACKHOLE_TAG 2 +#define VMPROF_JITTED_TAG 3 +#define VMPROF_JITTING_TAG 4 +#define VMPROF_GC_TAG 5 +#define VMPROF_ASSEMBLER_TAG 6 +// whatever we want here + +typedef struct vmprof_stack_s { + struct vmprof_stack_s* next; + intptr_t value; + intptr_t kind; +} vmprof_stack_t; + +// the kind is WORD so we consume exactly 3 WORDs and we don't have +// to worry too much. There is a potential for squeezing it with bit +// patterns into one WORD, but I don't want to care RIGHT NOW, potential +// for future optimization potential + +#endif diff --git a/rpython/rlib/rvmprof/test/test_ztranslation.py b/rpython/rlib/rvmprof/test/test_ztranslation.py --- a/rpython/rlib/rvmprof/test/test_ztranslation.py +++ b/rpython/rlib/rvmprof/test/test_ztranslation.py @@ -64,8 +64,14 @@ def test_interpreted(): # takes forever if the Python process is already big... import subprocess - subprocess.check_call([sys.executable, os.path.basename(__file__)], - cwd=(os.path.dirname(__file__) or '.')) + me = os.path.basename(__file__) + if me.endswith('pyc') or me.endswith('pyo'): + me = me[:-1] + env = os.environ.copy() + env['PYTHONPATH'] = '' + subprocess.check_call([sys.executable, me], + cwd=(os.path.dirname(__file__) or '.'), + env=env) def test_compiled(): fn = compile(main, [], gcpolicy="minimark") From pypy.commits at gmail.com Thu Feb 4 14:45:26 2016 From: pypy.commits at gmail.com (fijal) Date: Thu, 04 Feb 2016 11:45:26 -0800 (PST) Subject: [pypy-commit] pypy vmprof-newstack: closed to be merged branch Message-ID: <56b3aa56.6953c20a.c061.ffffae52@mx.google.com> Author: fijal Branch: vmprof-newstack Changeset: r82077:26a47a71a22d Date: 2016-02-04 20:44 +0100 http://bitbucket.org/pypy/pypy/changeset/26a47a71a22d/ Log: closed to be merged branch From pypy.commits at gmail.com Thu Feb 4 14:46:18 2016 From: pypy.commits at gmail.com (fijal) Date: Thu, 04 Feb 2016 11:46:18 -0800 (PST) Subject: [pypy-commit] pypy default: update whatsnew Message-ID: <56b3aa8a.4c0c1c0a.79fc7.fffff2ae@mx.google.com> Author: fijal Branch: Changeset: r82079:70b375dd3dff Date: 2016-02-04 20:45 +0100 http://bitbucket.org/pypy/pypy/changeset/70b375dd3dff/ Log: update whatsnew diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -139,3 +139,7 @@ Refactor sandboxing to operate at a higher level. .. branch: cpyext-bootstrap + +.. branch: vmprof-newstack + +Refactor vmprof to work cross-operating-system. \ No newline at end of file From pypy.commits at gmail.com Thu Feb 4 16:09:22 2016 From: pypy.commits at gmail.com (cfbolz) Date: Thu, 04 Feb 2016 13:09:22 -0800 (PST) Subject: [pypy-commit] pypy default: the ll_assert has the condition wrong :-( Message-ID: <56b3be02.4c0c1c0a.79fc7.0b8b@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82080:246d32e32de9 Date: 2016-02-04 14:57 +0100 http://bitbucket.org/pypy/pypy/changeset/246d32e32de9/ Log: the ll_assert has the condition wrong :-( diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py --- a/rpython/rlib/jit.py +++ b/rpython/rlib/jit.py @@ -1117,7 +1117,7 @@ from rpython.rtyper.lltypesystem.lloperation import llop from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.rclass import ll_type - ll_assert(ll_value == lltype.nullptr(lltype.typeOf(ll_value).TO), "record_exact_class called with None argument") + ll_assert(ll_value != lltype.nullptr(lltype.typeOf(ll_value).TO), "record_exact_class called with None argument") ll_assert(ll_type(ll_value) is ll_cls, "record_exact_class called with invalid arguments") llop.jit_record_exact_class(lltype.Void, ll_value, ll_cls) From pypy.commits at gmail.com Thu Feb 4 16:09:23 2016 From: pypy.commits at gmail.com (cfbolz) Date: Thu, 04 Feb 2016 13:09:23 -0800 (PST) Subject: [pypy-commit] pypy default: just support exactly the same colors as graphviz Message-ID: <56b3be03.cb571c0a.3d885.10f8@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82081:b1f915c79644 Date: 2016-02-04 16:22 +0100 http://bitbucket.org/pypy/pypy/changeset/b1f915c79644/ Log: just support exactly the same colors as graphviz diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') From pypy.commits at gmail.com Fri Feb 5 04:48:34 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 05 Feb 2016 01:48:34 -0800 (PST) Subject: [pypy-commit] pypy default: remove the conftest that skipped all tests Message-ID: <56b46ff2.4c0c1c0a.79fc7.ffffb924@mx.google.com> Author: fijal Branch: Changeset: r82083:c9632a51530c Date: 2016-02-05 10:47 +0100 http://bitbucket.org/pypy/pypy/changeset/c9632a51530c/ Log: remove the conftest that skipped all tests diff --git a/pypy/module/_vmprof/test/conftest.py b/pypy/module/_vmprof/test/conftest.py deleted file mode 100644 --- a/pypy/module/_vmprof/test/conftest.py +++ /dev/null @@ -1,7 +0,0 @@ -import py -from rpython.jit.backend import detect_cpu - -cpu = detect_cpu.autodetect() -def pytest_runtest_setup(item): - if cpu != detect_cpu.MODEL_X86_64: - py.test.skip("x86_64 tests only") From pypy.commits at gmail.com Fri Feb 5 04:48:32 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 05 Feb 2016 01:48:32 -0800 (PST) Subject: [pypy-commit] pypy default: run actually tests and make them pass Message-ID: <56b46ff0.11301c0a.43dba.ffffe797@mx.google.com> Author: fijal Branch: Changeset: r82082:a56a14e631f0 Date: 2016-02-05 10:46 +0100 http://bitbucket.org/pypy/pypy/changeset/a56a14e631f0/ Log: run actually tests and make them pass diff --git a/pypy/module/_vmprof/test/test__vmprof.py b/pypy/module/_vmprof/test/test__vmprof.py --- a/pypy/module/_vmprof/test/test__vmprof.py +++ b/pypy/module/_vmprof/test/test__vmprof.py @@ -23,7 +23,7 @@ i += 5 * WORD # header assert s[i ] == '\x05' # MARKER_HEADER assert s[i + 1] == '\x00' # 0 - assert s[i + 2] == '\x01' # VERSION_THREAD_ID + assert s[i + 2] == '\x02' # VERSION_THREAD_ID assert s[i + 3] == chr(4) # len('pypy') assert s[i + 4: i + 8] == 'pypy' i += 8 diff --git a/pypy/module/_vmprof/test/test_direct.py b/pypy/module/_vmprof/test/test_direct.py --- a/pypy/module/_vmprof/test/test_direct.py +++ b/pypy/module/_vmprof/test/test_direct.py @@ -19,6 +19,7 @@ lib = ffi.verify(""" #define PYPY_JIT_CODEMAP +#include "vmprof_stack.h" volatile int pypy_codemap_currently_invalid = 0; @@ -42,7 +43,7 @@ } -""" + open(str(srcdir.join("vmprof_get_custom_offset.h"))).read()) +""" + open(str(srcdir.join("vmprof_get_custom_offset.h"))).read(), include_dirs=[str(srcdir)]) class TestDirect(object): def test_infrastructure(self): @@ -67,8 +68,5 @@ buf = ffi.new("long[10]", [0] * 10) result = ffi.cast("void**", buf) res = lib.vmprof_write_header_for_jit_addr(result, 0, ffi.NULL, 100) - assert res == 6 - assert buf[0] == 2 - assert buf[1] == 16 - assert buf[2] == 12 - assert buf[3] == 8 + assert res == 10 + assert [x for x in buf] == [6, 0, 3, 16, 3, 12, 3, 8, 3, 4 diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -21,7 +21,7 @@ _libs = [] eci_kwds = dict( include_dirs = [SRC], - includes = ['rvmprof.h'], + includes = ['rvmprof.h', 'vmprof_stack.h'], libraries = _libs, separate_module_files = [SRC.join('rvmprof.c')], post_include_bits=['#define RPYTHON_VMPROF\n'], @@ -30,10 +30,6 @@ def setup(): - if not detect_cpu.autodetect().startswith(detect_cpu.MODEL_X86_64): - raise VMProfPlatformUnsupported("rvmprof only supports" - " x86-64 CPUs for now") - platform.verify_eci(ExternalCompilationInfo( compile_extra=['-DRPYTHON_LL2CTYPES'], **eci_kwds)) diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -32,9 +32,9 @@ #include #include #include +#include "vmprof_stack.h" #include "vmprof_getpc.h" #include "vmprof_mt.h" -#include "vmprof_stack.h" #include "vmprof_common.h" /************************************************************/ From pypy.commits at gmail.com Fri Feb 5 04:48:36 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 05 Feb 2016 01:48:36 -0800 (PST) Subject: [pypy-commit] pypy default: merge Message-ID: <56b46ff4.c7d8c20a.15468.ffff88e8@mx.google.com> Author: fijal Branch: Changeset: r82084:7c2815cadc38 Date: 2016-02-05 10:47 +0100 http://bitbucket.org/pypy/pypy/changeset/7c2815cadc38/ Log: merge diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py --- a/rpython/rlib/jit.py +++ b/rpython/rlib/jit.py @@ -1117,7 +1117,7 @@ from rpython.rtyper.lltypesystem.lloperation import llop from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.rclass import ll_type - ll_assert(ll_value == lltype.nullptr(lltype.typeOf(ll_value).TO), "record_exact_class called with None argument") + ll_assert(ll_value != lltype.nullptr(lltype.typeOf(ll_value).TO), "record_exact_class called with None argument") ll_assert(ll_type(ll_value) is ll_cls, "record_exact_class called with invalid arguments") llop.jit_record_exact_class(lltype.Void, ll_value, ll_cls) From pypy.commits at gmail.com Fri Feb 5 07:31:26 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 05 Feb 2016 04:31:26 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: allocating the back chain correctly, one problem when hitting a memory error, stack size would have been incorrect! Message-ID: <56b4961e.89bd1c0a.202b4.0c7a@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82085:719886e6865e Date: 2016-02-05 13:30 +0100 http://bitbucket.org/pypy/pypy/changeset/719886e6865e/ Log: allocating the back chain correctly, one problem when hitting a memory error, stack size would have been incorrect! diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -198,6 +198,8 @@ # the RPython exception that occurred in the CALL, if any). # off = STD_FRAME_SIZE_IN_BYTES + mc.LG(r.SCRATCH, l.addr(0, r.SP)) + mc.STG(r.SCRATCH, l.addr(-extra_stack_size, r.SP)) mc.LAY(r.SP, l.addr(-extra_stack_size, r.SP)) mc.STMG(r.r10, r.r12, l.addr(off, r.SP)) mc.STG(r.r2, l.addr(off+3*WORD, r.SP)) @@ -347,7 +349,7 @@ mc.load(r.r5, r.r5, diff) mc.store(r.r2, r.r5, -WORD) - self._pop_core_regs_from_jitframe(mc) + self._pop_core_regs_from_jitframe(mc, r.MANAGED_REGS) self._pop_fp_regs_from_jitframe(mc) mc.restore_link() @@ -490,7 +492,7 @@ # Check that we don't get NULL; if we do, we always interrupt the # current loop, as a "good enough" approximation (same as # emit_call_malloc_gc()). - self.propagate_memoryerror_if_r2_is_null() + self.propagate_memoryerror_if_r2_is_null(True) self._pop_core_regs_from_jitframe(mc, saved_regs) self._pop_fp_regs_from_jitframe(mc) @@ -599,7 +601,7 @@ raise JitFrameTooDeep # XXX for traps_pos, jmp_target in self.frame_depth_to_patch: pmc = OverwritingBuilder(self.mc, traps_pos, 3) - # three traps, so exactly three instructions to patch here + # patch 3 instructions as shown above pmc.CGFI(r.r1, l.imm(frame_depth)) pmc.BRC(c.GE, l.imm(jmp_target - (traps_pos + 6))) pmc.LGHI(r.r0, l.imm(frame_depth)) @@ -756,18 +758,28 @@ # sadly we cannot use LOCGHI # it is included in some extension that seem to be NOT installed # by default. - self.mc.LGHI(result_loc, l.imm(1)) + self.mc.LGHI(result_loc, l.imm(-1)) off = self.mc.XGR_byte_count + self.mc.BRC_byte_count - self.mc.BRC(condition, l.imm(off)) # branch over LGHI + self.mc.BRC(condition, l.imm(off)) # branch over XGR self.mc.XGR(result_loc, result_loc) - def propagate_memoryerror_if_r2_is_null(self): + def propagate_memoryerror_if_r2_is_null(self, pop_one_stackframe=False): # if self.propagate_exception_path == 0 (tests), this may jump to 0 # and segfaults. too bad. the alternative is to continue anyway # with r2==0, but that will segfault too. + jmp_pos = self.mc.get_relative_pos() + # bails to propagate exception path if r2 != 0 + self.mc.reserve_cond_jump() + self.mc.load_imm(r.RETURN, self.propagate_exception_path) - self.mc.cmp_op(r.r2, l.imm(0), imm=True) - self.mc.BCR(c.EQ, r.RETURN) + if pop_one_stackframe: + self.mc.LAY(r.SP, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) + self.mc.BCR(c.ANY, r.RETURN) + + currpos = self.mc.currpos() + pmc = OverwritingBuilder(self.mc, jmp_pos, 1) + pmc.CGIJ(r.r2, l.imm(0), c.EQ, l.imm(curpos - jmp_pos)) + pmc.overwrite() def regalloc_push(self, loc, already_pushed): """Pushes the value stored in loc to the stack diff --git a/rpython/jit/backend/zarch/callbuilder.py b/rpython/jit/backend/zarch/callbuilder.py --- a/rpython/jit/backend/zarch/callbuilder.py +++ b/rpython/jit/backend/zarch/callbuilder.py @@ -143,15 +143,16 @@ def emit_raw_call(self): # always allocate a stack frame for the new function # save the SP back chain - self.mc.STG(r.SP, l.addr(-self.subtracted_to_sp, r.SP)) # move the frame pointer if self.subtracted_to_sp != 0: + # rewrite the back chain + self.mc.LG(r.SCRATCH, l.addr(0, r.SP)) + self.mc.STG(r.SCRATCH, l.addr(-self.subtracted_to_sp, r.SP)) self.mc.LAY(r.SP, l.addr(-self.subtracted_to_sp, r.SP)) self.mc.raw_call() def restore_stack_pointer(self): - # it must at LEAST be 160 bytes if self.subtracted_to_sp != 0: self.mc.LAY(r.SP, l.addr(self.subtracted_to_sp, r.SP)) diff --git a/rpython/jit/backend/zarch/codebuilder.py b/rpython/jit/backend/zarch/codebuilder.py --- a/rpython/jit/backend/zarch/codebuilder.py +++ b/rpython/jit/backend/zarch/codebuilder.py @@ -218,8 +218,9 @@ self.LMG(r.r14, r.r15, l.addr(off+14*WORD, r.SP)) def push_std_frame(self, additional_bytes=0): - self.STG(r.SP, l.addr(0, r.SP)) - self.LAY(r.SP, l.addr(-(STD_FRAME_SIZE_IN_BYTES + additional_bytes), r.SP)) + off = (STD_FRAME_SIZE_IN_BYTES + additional_bytes) + self.STG(r.SP, l.addr(off, r.SP)) + self.LAY(r.SP, l.addr(-off, r.SP)) def pop_std_frame(self, additional_bytes=0): self.LAY(r.SP, l.addr(STD_FRAME_SIZE_IN_BYTES + additional_bytes, r.SP)) diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -369,8 +369,7 @@ fcond = c.negate(fcond) jmp_adr = self.mc.get_relative_pos() - self.mc.trap() # patched later to a relative branch - self.mc.write('\x00' * 4) + self.mc.reserve_cond_jump() # patched later to a relative branch # save away r2, r3, r4, r5, r12 into the jitframe should_be_saved = [ @@ -378,6 +377,7 @@ if reg in self._COND_CALL_SAVE_REGS] self._push_core_regs_to_jitframe(self.mc, should_be_saved) + # load gc map into unusual location: r0 self.load_gcmap(self.mc, r.SCRATCH2, regalloc.get_gcmap()) # # load the 0-to-4 arguments into these registers, with the address of @@ -751,10 +751,9 @@ self._read_typeid(r.SCRATCH2, loc_object) self.mc.load_imm(r.SCRATCH, base_type_info + infobits_offset) assert shift_by == 0 - self.mc.AGR(r.SCRATCH, r.SCRATCH2) - self.mc.LLGC(r.SCRATCH2, l.addr(0, r.SCRATCH)) - self.mc.LGHI(r.SCRATCH, l.imm(IS_OBJECT_FLAG & 0xff)) - self.mc.NGR(r.SCRATCH2, r.SCRATCH) + self.mc.LGR(r.SCRATCH, r.SCRATCH2) + self.mc.LLGC(r.SCRATCH2, l.addr(0, r.SCRATCH)) # cannot use r.r0 as index reg + self.mc.NILL(r.SCRATCH2, l.imm(IS_OBJECT_FLAG & 0xff)) self.guard_success_cc = c.NE self._emit_guard(op, arglocs[1:]) From pypy.commits at gmail.com Fri Feb 5 09:00:29 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 05 Feb 2016 06:00:29 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: backchain was not correct (now it is, tested it), flush_cc must write 1, it must not be something != 1 Message-ID: <56b4aafd.046f1c0a.b98f.1af7@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82086:e108cd4706e3 Date: 2016-02-05 14:59 +0100 http://bitbucket.org/pypy/pypy/changeset/e108cd4706e3/ Log: backchain was not correct (now it is, tested it), flush_cc must write 1, it must not be something != 1 diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -758,7 +758,7 @@ # sadly we cannot use LOCGHI # it is included in some extension that seem to be NOT installed # by default. - self.mc.LGHI(result_loc, l.imm(-1)) + self.mc.LGHI(result_loc, l.imm(1)) off = self.mc.XGR_byte_count + self.mc.BRC_byte_count self.mc.BRC(condition, l.imm(off)) # branch over XGR self.mc.XGR(result_loc, result_loc) @@ -776,7 +776,7 @@ self.mc.LAY(r.SP, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) self.mc.BCR(c.ANY, r.RETURN) - currpos = self.mc.currpos() + curpos = self.mc.currpos() pmc = OverwritingBuilder(self.mc, jmp_pos, 1) pmc.CGIJ(r.r2, l.imm(0), c.EQ, l.imm(curpos - jmp_pos)) pmc.overwrite() @@ -1025,6 +1025,7 @@ if gcrootmap and gcrootmap.is_shadow_stack: self._call_header_shadowstack(gcrootmap) + def _call_header_shadowstack(self, gcrootmap): # we need to put one word into the shadowstack: the jitframe (SPP) # we saved all registers to the stack diff --git a/rpython/jit/backend/zarch/codebuilder.py b/rpython/jit/backend/zarch/codebuilder.py --- a/rpython/jit/backend/zarch/codebuilder.py +++ b/rpython/jit/backend/zarch/codebuilder.py @@ -219,7 +219,7 @@ def push_std_frame(self, additional_bytes=0): off = (STD_FRAME_SIZE_IN_BYTES + additional_bytes) - self.STG(r.SP, l.addr(off, r.SP)) + self.STG(r.SP, l.addr(-off, r.SP)) self.LAY(r.SP, l.addr(-off, r.SP)) def pop_std_frame(self, additional_bytes=0): From pypy.commits at gmail.com Fri Feb 5 09:05:27 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 05 Feb 2016 06:05:27 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: must branch if not equal (error introduced in the last 2 commits) Message-ID: <56b4ac27.02931c0a.17cfd.4ae7@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82087:b95937cb9eb3 Date: 2016-02-05 15:04 +0100 http://bitbucket.org/pypy/pypy/changeset/b95937cb9eb3/ Log: must branch if not equal (error introduced in the last 2 commits) diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -778,7 +778,7 @@ curpos = self.mc.currpos() pmc = OverwritingBuilder(self.mc, jmp_pos, 1) - pmc.CGIJ(r.r2, l.imm(0), c.EQ, l.imm(curpos - jmp_pos)) + pmc.CGIJ(r.r2, l.imm(0), c.NE, l.imm(curpos - jmp_pos)) pmc.overwrite() def regalloc_push(self, loc, already_pushed): From pypy.commits at gmail.com Fri Feb 5 10:59:41 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 05 Feb 2016 07:59:41 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: test checks that trace is formed for a little endian read, forcing to read little endian on s390x Message-ID: <56b4c6ed.162f1c0a.9c500.709a@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82088:cfb1f88ae10c Date: 2016-02-05 16:58 +0100 http://bitbucket.org/pypy/pypy/changeset/cfb1f88ae10c/ Log: test checks that trace is formed for a little endian read, forcing to read little endian on s390x diff --git a/pypy/module/pypyjit/test_pypy_c/test_buffers.py b/pypy/module/pypyjit/test_pypy_c/test_buffers.py --- a/pypy/module/pypyjit/test_pypy_c/test_buffers.py +++ b/pypy/module/pypyjit/test_pypy_c/test_buffers.py @@ -34,7 +34,7 @@ i = 0 while i < n: i += 1 - struct.unpack('i', a) # ID: unpack + struct.unpack(' Author: Manuel Jacob Branch: Changeset: r82089:e2685b2b1e87 Date: 2016-02-05 17:06 +0100 http://bitbucket.org/pypy/pypy/changeset/e2685b2b1e87/ Log: Skip vmprof jit tests on non-x86-64 CPUs. diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -9,6 +9,10 @@ class CompiledVmprofTest(CCompiledMixin): CPUClass = getcpuclass() + def setup(self): + if self.CPUClass.backend_name != 'x86_64': + py.test.skip("vmprof only supports x86-64 CPUs at the moment") + def _get_TranslationContext(self): t = TranslationContext() t.config.translation.gc = 'incminimark' @@ -83,4 +87,4 @@ except ImportError: pass else: - check_vmprof_output() \ No newline at end of file + check_vmprof_output() From pypy.commits at gmail.com Fri Feb 5 11:24:00 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 05 Feb 2016 08:24:00 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: fixed a bug where pool did not allocate space for constant parameter (gc_load_indexed, and store) Message-ID: <56b4cca0.4e8e1c0a.31da2.42ca@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82090:ed27d3e4b9d8 Date: 2016-02-05 17:23 +0100 http://bitbucket.org/pypy/pypy/changeset/ed27d3e4b9d8/ Log: fixed a bug where pool did not allocate space for constant parameter (gc_load_indexed, and store) diff --git a/rpython/jit/backend/zarch/pool.py b/rpython/jit/backend/zarch/pool.py --- a/rpython/jit/backend/zarch/pool.py +++ b/rpython/jit/backend/zarch/pool.py @@ -65,6 +65,9 @@ arg = op.getarg(0) if arg.is_constant(): self.reserve_literal(8, arg) + arg = op.getarg(1) + if arg.is_constant(): + self.reserve_literal(8, arg) arg = op.getarg(2) if arg.is_constant(): self.reserve_literal(8, arg) @@ -78,10 +81,9 @@ arg = op.getarg(0) if arg.is_constant(): self.reserve_literal(8, arg) - if opnum == rop.GC_LOAD_INDEXED_R: - arg = op.getarg(1) - if arg.is_constant(): - self.reserve_literal(8, arg) + arg = op.getarg(1) + if arg.is_constant(): + self.reserve_literal(8, arg) return elif op.is_call_release_gil(): for arg in op.getarglist()[1:]: From pypy.commits at gmail.com Fri Feb 5 11:52:33 2016 From: pypy.commits at gmail.com (rlamy) Date: Fri, 05 Feb 2016 08:52:33 -0800 (PST) Subject: [pypy-commit] pypy default: Create rpython.rtyper.debug and move ll_assert() and fatalerror() there. Message-ID: <56b4d351.c5321c0a.907b6.7f41@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82091:45cce2525cc9 Date: 2016-02-05 03:45 +0000 http://bitbucket.org/pypy/pypy/changeset/45cce2525cc9/ Log: Create rpython.rtyper.debug and move ll_assert() and fatalerror() there. This reduces the number of imports from rpython/rtyper/ to rpython.rlib and avoids a cyclical import dependency. diff --git a/rpython/rlib/debug.py b/rpython/rlib/debug.py --- a/rpython/rlib/debug.py +++ b/rpython/rlib/debug.py @@ -1,76 +1,41 @@ -import sys, time +import sys +import time + from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rlib.objectmodel import we_are_translated from rpython.rlib.rarithmetic import is_valid_int from rpython.rtyper.extfunc import ExtFuncEntry from rpython.rtyper.lltypesystem import lltype +from rpython.rtyper.lltypesystem import rffi from rpython.translator.tool.cbuild import ExternalCompilationInfo - -def ll_assert(x, msg): - """After translation to C, this becomes an RPyAssert.""" - assert type(x) is bool, "bad type! got %r" % (type(x),) - assert x, msg - -class Entry(ExtRegistryEntry): - _about_ = ll_assert - - def compute_result_annotation(self, s_x, s_msg): - assert s_msg.is_constant(), ("ll_assert(x, msg): " - "the msg must be constant") - return None - - def specialize_call(self, hop): - vlist = hop.inputargs(lltype.Bool, lltype.Void) - hop.exception_cannot_occur() - hop.genop('debug_assert', vlist) - -class FatalError(Exception): - pass - -def fatalerror(msg): - # print the RPython traceback and abort with a fatal error - if not we_are_translated(): - raise FatalError(msg) - from rpython.rtyper.lltypesystem import lltype - from rpython.rtyper.lltypesystem.lloperation import llop - llop.debug_print_traceback(lltype.Void) - llop.debug_fatalerror(lltype.Void, msg) -fatalerror._dont_inline_ = True -fatalerror._jit_look_inside_ = False -fatalerror._annenforceargs_ = [str] - -def fatalerror_notb(msg): - # a variant of fatalerror() that doesn't print the RPython traceback - if not we_are_translated(): - raise FatalError(msg) - from rpython.rtyper.lltypesystem import lltype - from rpython.rtyper.lltypesystem.lloperation import llop - llop.debug_fatalerror(lltype.Void, msg) -fatalerror_notb._dont_inline_ = True -fatalerror_notb._jit_look_inside_ = False -fatalerror_notb._annenforceargs_ = [str] +# Expose these here (public interface) +from rpython.rtyper.debug import ( + ll_assert, FatalError, fatalerror, fatalerror_notb) class DebugLog(list): def debug_print(self, *args): self.append(('debug_print',) + args) + def debug_start(self, category, time=None): self.append(('debug_start', category, time)) + def debug_stop(self, category, time=None): - for i in xrange(len(self)-1, -1, -1): + for i in xrange(len(self) - 1, -1, -1): if self[i][0] == 'debug_start': assert self[i][1] == category, ( "nesting error: starts with %r but stops with %r" % (self[i][1], category)) starttime = self[i][2] if starttime is not None or time is not None: - self[i:] = [(category, starttime, time, self[i+1:])] + self[i:] = [(category, starttime, time, self[i + 1:])] else: - self[i:] = [(category, self[i+1:])] + self[i:] = [(category, self[i + 1:])] return assert False, ("nesting error: no start corresponding to stop %r" % (category,)) + def __repr__(self): import pprint return pprint.pformat(list(self)) @@ -161,7 +126,6 @@ return self.bookkeeper.immutablevalue(False) def specialize_call(self, hop): - from rpython.rtyper.lltypesystem import lltype t = hop.rtyper.annotator.translator hop.exception_cannot_occur() if t.config.translation.log: @@ -189,7 +153,6 @@ return annmodel.SomeInteger() def specialize_call(self, hop): - from rpython.rtyper.lltypesystem import lltype hop.exception_cannot_occur() return hop.genop('debug_offset', [], resulttype=lltype.Signed) @@ -223,7 +186,6 @@ return None def specialize_call(self, hop): - from rpython.rtyper.lltypesystem import lltype vlist = hop.inputargs(lltype.Signed) hop.exception_cannot_occur() return hop.genop('debug_forked', vlist) @@ -244,7 +206,6 @@ def compute_result_annotation(self, s_RESTYPE, s_pythonfunction, *args_s): from rpython.annotator import model as annmodel from rpython.rtyper.llannotation import lltype_to_annotation - from rpython.rtyper.lltypesystem import lltype assert s_RESTYPE.is_constant() assert s_pythonfunction.is_constant() s_result = s_RESTYPE.const @@ -255,7 +216,6 @@ def specialize_call(self, hop): from rpython.annotator import model as annmodel - from rpython.rtyper.lltypesystem import lltype RESTYPE = hop.args_s[0].const if not isinstance(RESTYPE, lltype.LowLevelType): assert isinstance(RESTYPE, annmodel.SomeObject) @@ -283,7 +243,8 @@ def compute_result_annotation(self, s_arg, s_checker): if not s_checker.is_constant(): - raise ValueError("Second argument of check_annotation must be constant") + raise ValueError( + "Second argument of check_annotation must be constant") checker = s_checker.const checker(s_arg, self.bookkeeper) return s_arg @@ -308,11 +269,14 @@ assert isinstance(s_arg, SomeList) # the logic behind it is that we try not to propagate # make_sure_not_resized, when list comprehension is not on - if self.bookkeeper.annotator.translator.config.translation.list_comprehension_operations: + config = self.bookkeeper.annotator.translator.config + if config.translation.list_comprehension_operations: s_arg.listdef.never_resize() else: from rpython.annotator.annrpython import log - log.WARNING('make_sure_not_resized called, but has no effect since list_comprehension is off') + log.WARNING( + "make_sure_not_resized called, but has no effect since " + "list_comprehension is off") return s_arg def specialize_call(self, hop): @@ -434,9 +398,6 @@ if not sys.platform.startswith('win'): def _make_impl_attach_gdb(): - # circular imports fun :-( - import sys - from rpython.rtyper.lltypesystem import rffi if sys.platform.startswith('linux'): # Only necessary on Linux eci = ExternalCompilationInfo(includes=['string.h', 'assert.h', @@ -482,7 +443,7 @@ os.strerror(e.errno))) raise SystemExit else: - time.sleep(1) # give the GDB time to attach + time.sleep(1) # give the GDB time to attach return impl_attach_gdb else: diff --git a/rpython/rtyper/lltypesystem/rbuilder.py b/rpython/rtyper/lltypesystem/rbuilder.py --- a/rpython/rtyper/lltypesystem/rbuilder.py +++ b/rpython/rtyper/lltypesystem/rbuilder.py @@ -1,7 +1,7 @@ from rpython.rlib import rgc, jit from rpython.rlib.objectmodel import enforceargs from rpython.rlib.rarithmetic import ovfcheck, r_uint, intmask -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.unroll import unrolling_iterable from rpython.rtyper.rptr import PtrRepr from rpython.rtyper.lltypesystem import lltype, rffi, rstr @@ -11,7 +11,7 @@ from rpython.rtyper.rbuilder import AbstractStringBuilderRepr from rpython.tool.sourcetools import func_with_new_name from rpython.rtyper.annlowlevel import llstr, llunicode - + # ------------------------------------------------------------ diff --git a/rpython/rtyper/lltypesystem/rbytearray.py b/rpython/rtyper/lltypesystem/rbytearray.py --- a/rpython/rtyper/lltypesystem/rbytearray.py +++ b/rpython/rtyper/lltypesystem/rbytearray.py @@ -1,7 +1,7 @@ from rpython.rtyper.rbytearray import AbstractByteArrayRepr from rpython.rtyper.lltypesystem import lltype, rstr -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert BYTEARRAY = lltype.GcForwardReference() diff --git a/rpython/rtyper/lltypesystem/rdict.py b/rpython/rtyper/lltypesystem/rdict.py --- a/rpython/rtyper/lltypesystem/rdict.py +++ b/rpython/rtyper/lltypesystem/rdict.py @@ -4,7 +4,7 @@ from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.lltypesystem.lloperation import llop from rpython.rlib import objectmodel, jit -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.rarithmetic import r_uint, intmask, LONG_BIT from rpython.rtyper import rmodel from rpython.rtyper.error import TyperError diff --git a/rpython/rtyper/lltypesystem/rlist.py b/rpython/rtyper/lltypesystem/rlist.py --- a/rpython/rtyper/lltypesystem/rlist.py +++ b/rpython/rtyper/lltypesystem/rlist.py @@ -1,5 +1,5 @@ from rpython.rlib import rgc, jit, types -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.signature import signature from rpython.rtyper.error import TyperError from rpython.rtyper.lltypesystem import rstr diff --git a/rpython/rtyper/lltypesystem/rordereddict.py b/rpython/rtyper/lltypesystem/rordereddict.py --- a/rpython/rtyper/lltypesystem/rordereddict.py +++ b/rpython/rtyper/lltypesystem/rordereddict.py @@ -6,7 +6,7 @@ from rpython.rlib import objectmodel, jit, rgc, types from rpython.rlib.signature import signature from rpython.rlib.objectmodel import specialize, likely -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.rarithmetic import r_uint, intmask from rpython.rtyper import rmodel from rpython.rtyper.error import TyperError diff --git a/rpython/rtyper/lltypesystem/rstr.py b/rpython/rtyper/lltypesystem/rstr.py --- a/rpython/rtyper/lltypesystem/rstr.py +++ b/rpython/rtyper/lltypesystem/rstr.py @@ -2,12 +2,12 @@ from rpython.annotator import model as annmodel from rpython.rlib import jit, types -from rpython.rlib.debug import ll_assert from rpython.rlib.objectmodel import (malloc_zero_filled, we_are_translated, _hash_string, keepalive_until_here, specialize, enforceargs) from rpython.rlib.signature import signature from rpython.rlib.rarithmetic import ovfcheck from rpython.rtyper.error import TyperError +from rpython.rtyper.debug import ll_assert from rpython.rtyper.lltypesystem import ll_str, llmemory from rpython.rtyper.lltypesystem.lltype import (GcStruct, Signed, Array, Char, UniChar, Ptr, malloc, Bool, Void, GcArray, nullptr, cast_primitive, diff --git a/rpython/rtyper/rlist.py b/rpython/rtyper/rlist.py --- a/rpython/rtyper/rlist.py +++ b/rpython/rtyper/rlist.py @@ -1,7 +1,7 @@ from rpython.annotator import model as annmodel from rpython.flowspace.model import Constant from rpython.rlib import rgc, jit, types -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.objectmodel import malloc_zero_filled, enforceargs, specialize from rpython.rlib.signature import signature from rpython.rlib.rarithmetic import ovfcheck, widen, r_uint, intmask diff --git a/rpython/rtyper/rpbc.py b/rpython/rtyper/rpbc.py --- a/rpython/rtyper/rpbc.py +++ b/rpython/rtyper/rpbc.py @@ -7,7 +7,7 @@ from rpython.annotator.classdesc import ClassDesc from rpython.flowspace.model import Constant from rpython.annotator.argument import simple_args -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.unroll import unrolling_iterable from rpython.rtyper import rclass, callparse from rpython.rtyper.rclass import CLASSTYPE, OBJECT_VTABLE, OBJECTPTR diff --git a/rpython/rtyper/test/test_rlist.py b/rpython/rtyper/test/test_rlist.py --- a/rpython/rtyper/test/test_rlist.py +++ b/rpython/rtyper/test/test_rlist.py @@ -3,7 +3,7 @@ import py -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rtyper.error import TyperError from rpython.rtyper.llinterp import LLException, LLAssertFailure from rpython.rtyper.lltypesystem import rlist as ll_rlist From pypy.commits at gmail.com Fri Feb 5 12:27:06 2016 From: pypy.commits at gmail.com (rlamy) Date: Fri, 05 Feb 2016 09:27:06 -0800 (PST) Subject: [pypy-commit] pypy default: Use regular register_external() call for attach_gdb() Message-ID: <56b4db6a.25fac20a.7fb8.2139@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82092:e415ef72b422 Date: 2016-02-05 17:26 +0000 http://bitbucket.org/pypy/pypy/changeset/e415ef72b422/ Log: Use regular register_external() call for attach_gdb() diff --git a/rpython/rlib/debug.py b/rpython/rlib/debug.py --- a/rpython/rlib/debug.py +++ b/rpython/rlib/debug.py @@ -4,7 +4,7 @@ from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rlib.objectmodel import we_are_translated from rpython.rlib.rarithmetic import is_valid_int -from rpython.rtyper.extfunc import ExtFuncEntry +from rpython.rtyper.extfunc import register_external from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.lltypesystem import rffi from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -397,12 +397,11 @@ import pdb; pdb.set_trace() if not sys.platform.startswith('win'): - def _make_impl_attach_gdb(): - if sys.platform.startswith('linux'): - # Only necessary on Linux - eci = ExternalCompilationInfo(includes=['string.h', 'assert.h', - 'sys/prctl.h'], - post_include_bits=[""" + if sys.platform.startswith('linux'): + # Only necessary on Linux + eci = ExternalCompilationInfo(includes=['string.h', 'assert.h', + 'sys/prctl.h'], + post_include_bits=[""" /* If we have an old Linux kernel (or compile with old system headers), the following two macros are not defined. But we would still like a pypy translated on such a system to run on a more modern system. */ @@ -416,55 +415,38 @@ prctl(PR_SET_PTRACER, PR_SET_PTRACER_ANY); } """]) - allow_attach = rffi.llexternal( - "pypy__allow_attach", [], lltype.Void, - compilation_info=eci, _nowrapper=True) + allow_attach = rffi.llexternal( + "pypy__allow_attach", [], lltype.Void, + compilation_info=eci, _nowrapper=True) + else: + # Do nothing, there's no prctl + def allow_attach(): + pass + + def impl_attach_gdb(): + import os + allow_attach() + pid = os.getpid() + gdbpid = os.fork() + if gdbpid == 0: + shell = os.environ.get("SHELL") or "/bin/sh" + sepidx = shell.rfind(os.sep) + 1 + if sepidx > 0: + argv0 = shell[sepidx:] + else: + argv0 = shell + try: + os.execv(shell, [argv0, "-c", "gdb -p %d" % pid]) + except OSError as e: + os.write(2, "Could not start GDB: %s" % ( + os.strerror(e.errno))) + raise SystemExit else: - # Do nothing, there's no prctl - def allow_attach(): - pass + time.sleep(1) # give the GDB time to attach - def impl_attach_gdb(): - import os - allow_attach() - pid = os.getpid() - gdbpid = os.fork() - if gdbpid == 0: - shell = os.environ.get("SHELL") or "/bin/sh" - sepidx = shell.rfind(os.sep) + 1 - if sepidx > 0: - argv0 = shell[sepidx:] - else: - argv0 = shell - try: - os.execv(shell, [argv0, "-c", "gdb -p %d" % pid]) - except OSError as e: - os.write(2, "Could not start GDB: %s" % ( - os.strerror(e.errno))) - raise SystemExit - else: - time.sleep(1) # give the GDB time to attach +else: + def impl_attach_gdb(): + print "Don't know how to attach GDB on Windows" - return impl_attach_gdb -else: - def _make_impl_attach_gdb(): - def impl_attach_gdb(): - print "Don't know how to attach GDB on Windows" - return impl_attach_gdb - - -class FunEntry(ExtFuncEntry): - _about_ = attach_gdb - signature_args = [] - #lltypeimpl = staticmethod(impl_attach_gdb) --- done lazily below - name = "impl_attach_gdb" - - @property - def lltypeimpl(self): - if not hasattr(self.__class__, '_lltypeimpl'): - self.__class__._lltypeimpl = staticmethod(_make_impl_attach_gdb()) - return self._lltypeimpl - - def compute_result_annotation(self, *args_s): - from rpython.annotator.model import s_None - return s_None +register_external(attach_gdb, [], result=None, + export_name="impl_attach_gdb", llimpl=impl_attach_gdb) From pypy.commits at gmail.com Fri Feb 5 14:30:50 2016 From: pypy.commits at gmail.com (rlamy) Date: Fri, 05 Feb 2016 11:30:50 -0800 (PST) Subject: [pypy-commit] pypy default: forgot to add rpython/rtyper/debug.py Message-ID: <56b4f86a.8e811c0a.dc917.0aa7@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82093:ca082804de94 Date: 2016-02-05 19:29 +0000 http://bitbucket.org/pypy/pypy/changeset/ca082804de94/ Log: forgot to add rpython/rtyper/debug.py diff --git a/rpython/rtyper/debug.py b/rpython/rtyper/debug.py new file mode 100644 --- /dev/null +++ b/rpython/rtyper/debug.py @@ -0,0 +1,47 @@ +from rpython.rlib.objectmodel import we_are_translated +from rpython.rtyper.extregistry import ExtRegistryEntry +from rpython.rtyper.lltypesystem import lltype + +def ll_assert(x, msg): + """After translation to C, this becomes an RPyAssert.""" + assert type(x) is bool, "bad type! got %r" % (type(x),) + assert x, msg + +class Entry(ExtRegistryEntry): + _about_ = ll_assert + + def compute_result_annotation(self, s_x, s_msg): + assert s_msg.is_constant(), ("ll_assert(x, msg): " + "the msg must be constant") + return None + + def specialize_call(self, hop): + vlist = hop.inputargs(lltype.Bool, lltype.Void) + hop.exception_cannot_occur() + hop.genop('debug_assert', vlist) + +class FatalError(Exception): + pass + +def fatalerror(msg): + # print the RPython traceback and abort with a fatal error + if not we_are_translated(): + raise FatalError(msg) + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop + llop.debug_print_traceback(lltype.Void) + llop.debug_fatalerror(lltype.Void, msg) +fatalerror._dont_inline_ = True +fatalerror._jit_look_inside_ = False +fatalerror._annenforceargs_ = [str] + +def fatalerror_notb(msg): + # a variant of fatalerror() that doesn't print the RPython traceback + if not we_are_translated(): + raise FatalError(msg) + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop + llop.debug_fatalerror(lltype.Void, msg) +fatalerror_notb._dont_inline_ = True +fatalerror_notb._jit_look_inside_ = False +fatalerror_notb._annenforceargs_ = [str] From pypy.commits at gmail.com Fri Feb 5 14:47:05 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 05 Feb 2016 11:47:05 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: call int should provide rffi.INT instead of rffi.SIGNED, this works on little endian, but not big Message-ID: <56b4fc39.2a06c20a.98d3f.52b2@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82094:ca18f251c6bf Date: 2016-02-05 20:46 +0100 http://bitbucket.org/pypy/pypy/changeset/ca18f251c6bf/ Log: call int should provide rffi.INT instead of rffi.SIGNED, this works on little endian, but not big diff --git a/rpython/jit/backend/zarch/pool.py b/rpython/jit/backend/zarch/pool.py --- a/rpython/jit/backend/zarch/pool.py +++ b/rpython/jit/backend/zarch/pool.py @@ -47,7 +47,6 @@ descr = op.getdescr() if descr not in asm.target_tokens_currently_compiling: # this is a 'long' jump instead of a relative jump - # TODO why no reserve literal? self.offset_map[descr] = self.size self.offset_descr[descr] = self.size self.allocate_slot(8) elif op.getopnum() == rop.INT_INVERT: diff --git a/rpython/rlib/libffi.py b/rpython/rlib/libffi.py --- a/rpython/rlib/libffi.py +++ b/rpython/rlib/libffi.py @@ -326,7 +326,7 @@ #@jit.oopspec('libffi_call_int(self, funcsym, ll_args)') @jit.dont_look_inside def _do_call_int(self, funcsym, ll_args): - return self._do_call(funcsym, ll_args, rffi.SIGNED) + return self._do_call(funcsym, ll_args, rffi.INT) #@jit.oopspec('libffi_call_float(self, funcsym, ll_args)') @jit.dont_look_inside From pypy.commits at gmail.com Fri Feb 5 16:38:36 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 05 Feb 2016 13:38:36 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: ffi call fixed in deprecated api that is still used (fix before that was not sufficient), fixed legacy tests test_libffi Message-ID: <56b5165c.45611c0a.c9696.2e6e@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82095:d40d932f8349 Date: 2016-02-05 22:37 +0100 http://bitbucket.org/pypy/pypy/changeset/d40d932f8349/ Log: ffi call fixed in deprecated api that is still used (fix before that was not sufficient), fixed legacy tests test_libffi diff --git a/pypy/module/_rawffi/interp_rawffi.py b/pypy/module/_rawffi/interp_rawffi.py --- a/pypy/module/_rawffi/interp_rawffi.py +++ b/pypy/module/_rawffi/interp_rawffi.py @@ -4,7 +4,6 @@ from pypy.interpreter.gateway import interp2app, unwrap_spec from pypy.interpreter.typedef import TypeDef, GetSetProperty -from rpython.jit.backend.llsupport.symbolic import WORD from rpython.rlib.clibffi import * from rpython.rtyper.lltypesystem import lltype, rffi from rpython.rtyper.tool import rffi_platform @@ -447,6 +446,9 @@ self.ptr = ptr self.argshapes = argshapes self.resshape = resshape + self.narrow_integer = False + if resshape is not None: + self.narrow_integer = resshape.itemcode.lower() in ('c','h','i') def getbuffer(self, space): return space.wrap(rffi.cast(lltype.Unsigned, self.ptr.funcsym)) @@ -506,9 +508,9 @@ result = self.resshape.allocate(space, 1, autofree=True) # adjust_return_size() was used here on result.ll_buffer self.ptr.call(args_ll, result.ll_buffer) - if BIGENDIAN and result.shape.itemcode in ('c','h','i','C','H','I'): + if BIGENDIAN and self.narrow_integer: # we get a 8 byte value in big endian - n = WORD - result.shape.size + n = rffi.sizeof(lltype.Signed) - result.shape.size result.buffer_advance(n) return space.wrap(result) diff --git a/rpython/rlib/clibffi.py b/rpython/rlib/clibffi.py --- a/rpython/rlib/clibffi.py +++ b/rpython/rlib/clibffi.py @@ -594,10 +594,10 @@ intmask(argtypes[i].c_size), flavor='raw') if restype != ffi_type_void: - size = adjust_return_size(intmask(restype.c_size)) + self.restype_size = intmask(restype.c_size) + size = adjust_return_size(self.restype_size) self.ll_result = lltype.malloc(rffi.VOIDP.TO, size, flavor='raw') - self.restype_size = intmask(restype.c_size) else: self.restype_size = -1 @@ -637,7 +637,7 @@ if RES_TP is not lltype.Void: TP = lltype.Ptr(rffi.CArray(RES_TP)) ptr = self.ll_result - if _BIG_ENDIAN and self.restype_size != -1: + if _BIG_ENDIAN and RES_TP in TYPE_MAP_INT: # we get a 8 byte value in big endian n = rffi.sizeof(lltype.Signed) - self.restype_size ptr = rffi.ptradd(ptr, n) diff --git a/rpython/rlib/libffi.py b/rpython/rlib/libffi.py --- a/rpython/rlib/libffi.py +++ b/rpython/rlib/libffi.py @@ -4,6 +4,7 @@ from __future__ import with_statement from rpython.rtyper.lltypesystem import rffi, lltype +from rpython.rlib.unroll import unrolling_iterable from rpython.rlib.objectmodel import specialize, enforceargs from rpython.rlib.rarithmetic import intmask, r_uint, r_singlefloat, r_longlong from rpython.rlib import jit @@ -15,6 +16,9 @@ from rpython.rlib.rdynload import DLLHANDLE import os +import sys + +_BIG_ENDIAN = sys.byteorder == 'big' class types(object): """ @@ -211,6 +215,8 @@ # ====================================================================== +NARROW_INTEGER_TYPES = unrolling_iterable([rffi.CHAR, + rffi.UCHAR, rffi.SHORT, rffi.USHORT, rffi.INT, rffi.UINT]) class Func(AbstractFuncPtr): @@ -263,7 +269,12 @@ res = self._do_call_raw(self.funcsym, ll_args) elif _fits_into_signed(RESULT): assert not types.is_struct(self.restype) - res = self._do_call_int(self.funcsym, ll_args) + for res in NARROW_INTEGER_TYPES: + if RESULT is res: + res = self._do_call_int(self.funcsym, ll_args, rffi.CHAR) + break + else: + res = self._do_call_int(self.funcsym, ll_args, rffi.SIGNED) elif RESULT is rffi.DOUBLE: return self._do_call_float(self.funcsym, ll_args) elif RESULT is rffi.FLOAT: @@ -325,8 +336,9 @@ #@jit.oopspec('libffi_call_int(self, funcsym, ll_args)') @jit.dont_look_inside - def _do_call_int(self, funcsym, ll_args): - return self._do_call(funcsym, ll_args, rffi.INT) + @specialize.arg(3) + def _do_call_int(self, funcsym, ll_args, TP): + return self._do_call(funcsym, ll_args, TP) #@jit.oopspec('libffi_call_float(self, funcsym, ll_args)') @jit.dont_look_inside @@ -368,10 +380,10 @@ @specialize.arg(3) def _do_call(self, funcsym, ll_args, RESULT): # XXX: check len(args)? - ll_result = lltype.nullptr(rffi.CCHARP.TO) + ll_result = lltype.nullptr(rffi.VOIDP.TO) if self.restype != types.void: size = adjust_return_size(intmask(self.restype.c_size)) - ll_result = lltype.malloc(rffi.CCHARP.TO, size, + ll_result = lltype.malloc(rffi.VOIDP.TO, size, flavor='raw') ffires = c_ffi_call(self.ll_cif, self.funcsym, @@ -379,14 +391,20 @@ rffi.cast(rffi.VOIDPP, ll_args)) if RESULT is not lltype.Void: TP = lltype.Ptr(rffi.CArray(RESULT)) - buf = rffi.cast(TP, ll_result) if types.is_struct(self.restype): assert RESULT == rffi.SIGNED # for structs, we directly return the buffer and transfer the # ownership + buf = rffi.cast(TP, ll_result) res = rffi.cast(RESULT, buf) else: - res = buf[0] + if _BIG_ENDIAN and types.getkind(self.restype) in ('i','u'): + ptr = ll_result + n = rffi.sizeof(lltype.Signed) - self.restype.c_size + ptr = rffi.ptradd(ptr, n) + res = rffi.cast(TP, ptr)[0] + else: + res = rffi.cast(TP, ll_result)[0] else: res = None self._free_buffers(ll_result, ll_args) diff --git a/rpython/rlib/test/test_libffi.py b/rpython/rlib/test/test_libffi.py --- a/rpython/rlib/test/test_libffi.py +++ b/rpython/rlib/test/test_libffi.py @@ -274,7 +274,7 @@ """ libfoo = self.get_libfoo() func = (libfoo, 'diff_xy', [types.sint, types.signed], types.sint) - res = self.call(func, [50, 8], lltype.Signed) + res = self.call(func, [50, 8], rffi.INT) assert res == 42 def test_simple(self): @@ -287,7 +287,7 @@ """ libfoo = self.get_libfoo() func = (libfoo, 'sum_xy', [types.sint, types.double], types.sint) - res = self.call(func, [38, 4.2], lltype.Signed, jitif=["floats"]) + res = self.call(func, [38, 4.2], rffi.INT, jitif=["floats"]) assert res == 42 def test_float_result(self): @@ -319,7 +319,7 @@ """ libfoo = self.get_libfoo() func = (libfoo, 'many_args', [types.uchar, types.sint], types.sint) - res = self.call(func, [chr(20), 22], rffi.SIGNED) + res = self.call(func, [chr(20), 22], rffi.INT) assert res == 42 def test_char_args(self): @@ -418,12 +418,12 @@ set_dummy = (libfoo, 'set_dummy', [types.sint], types.void) get_dummy = (libfoo, 'get_dummy', [], types.sint) # - initval = self.call(get_dummy, [], rffi.SIGNED) + initval = self.call(get_dummy, [], rffi.INT) # res = self.call(set_dummy, [initval+1], lltype.Void) assert res is None # - res = self.call(get_dummy, [], rffi.SIGNED) + res = self.call(get_dummy, [], rffi.INT) assert res == initval+1 def test_single_float_args(self): From pypy.commits at gmail.com Fri Feb 5 16:43:26 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 05 Feb 2016 13:43:26 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: refactoring issue Message-ID: <56b5177e.0772c20a.50d1d.ffff8125@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82096:a84f5c5bcec6 Date: 2016-02-05 22:42 +0100 http://bitbucket.org/pypy/pypy/changeset/a84f5c5bcec6/ Log: refactoring issue diff --git a/rpython/rlib/libffi.py b/rpython/rlib/libffi.py --- a/rpython/rlib/libffi.py +++ b/rpython/rlib/libffi.py @@ -271,7 +271,7 @@ assert not types.is_struct(self.restype) for res in NARROW_INTEGER_TYPES: if RESULT is res: - res = self._do_call_int(self.funcsym, ll_args, rffi.CHAR) + res = self._do_call_int(self.funcsym, ll_args, RESULT) break else: res = self._do_call_int(self.funcsym, ll_args, rffi.SIGNED) From pypy.commits at gmail.com Sat Feb 6 05:29:22 2016 From: pypy.commits at gmail.com (fijal) Date: Sat, 06 Feb 2016 02:29:22 -0800 (PST) Subject: [pypy-commit] pypy default: somehow it ate my bracket Message-ID: <56b5cb02.0cb81c0a.6083f.ffffd7f0@mx.google.com> Author: fijal Branch: Changeset: r82097:32b3efa1db67 Date: 2016-02-06 11:27 +0100 http://bitbucket.org/pypy/pypy/changeset/32b3efa1db67/ Log: somehow it ate my bracket diff --git a/pypy/module/_vmprof/test/test_direct.py b/pypy/module/_vmprof/test/test_direct.py --- a/pypy/module/_vmprof/test/test_direct.py +++ b/pypy/module/_vmprof/test/test_direct.py @@ -69,4 +69,4 @@ result = ffi.cast("void**", buf) res = lib.vmprof_write_header_for_jit_addr(result, 0, ffi.NULL, 100) assert res == 10 - assert [x for x in buf] == [6, 0, 3, 16, 3, 12, 3, 8, 3, 4 + assert [x for x in buf] == [6, 0, 3, 16, 3, 12, 3, 8, 3, 4] From pypy.commits at gmail.com Sat Feb 6 05:29:24 2016 From: pypy.commits at gmail.com (fijal) Date: Sat, 06 Feb 2016 02:29:24 -0800 (PST) Subject: [pypy-commit] pypy default: start working on windows Message-ID: <56b5cb04.4e8e1c0a.12513.ffffd459@mx.google.com> Author: fijal Branch: Changeset: r82098:515907d579a2 Date: 2016-02-06 11:28 +0100 http://bitbucket.org/pypy/pypy/changeset/515907d579a2/ Log: start working on windows diff --git a/rpython/rlib/rvmprof/src/rvmprof.c b/rpython/rlib/rvmprof/src/rvmprof.c --- a/rpython/rlib/rvmprof/src/rvmprof.c +++ b/rpython/rlib/rvmprof/src/rvmprof.c @@ -22,4 +22,8 @@ #endif +#if defined(__unix__) || defined(__APPLE__) #include "vmprof_main.h" +#else +#include "vmprof_main_win32.h" +#endif diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -70,3 +70,15 @@ memcpy(&header.interp_name[4], interp_name, namelen); return _write_all((char*)&header, 5 * sizeof(long) + 4 + namelen); } + +#ifndef RPYTHON_LL2CTYPES +static vmprof_stack_t *get_vmprof_stack(void) +{ + return RPY_THREADLOCALREF_GET(vmprof_tl_stack); +} +#else +static vmprof_stack_t *get_vmprof_stack(void) +{ + return 0; +} +#endif diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -86,18 +86,6 @@ */ -#ifndef RPYTHON_LL2CTYPES -static vmprof_stack_t *get_vmprof_stack(void) -{ - return RPY_THREADLOCALREF_GET(vmprof_tl_stack); -} -#else -static vmprof_stack_t *get_vmprof_stack(void) -{ - return 0; -} -#endif - static int get_stack_trace(intptr_t *result, int max_depth, intptr_t pc, ucontext_t *ucontext) { vmprof_stack_t* stack = get_vmprof_stack(); diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h new file mode 100644 --- /dev/null +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -0,0 +1,134 @@ + +#include "windows.h" + +HANDLE write_mutex; + +int prepare_concurrent_bufs(void) +{ + if (!(write_mutex = CreateMutex(NULL, FALSE, NULL))) + return -1; + return 0; +} + +#include "vmprof_stack.h" +#include "vmprof_common.h" +#include + +// This file has been inspired (but not copied from since the LICENSE +// would not allow it) from verysleepy profiler + +volatile int thread_started = 0; +volatile int enabled = 0; + +static int _write_all(const char *buf, size_t bufsize) +{ + int res; + res = WaitForSingleObject(write_mutex, INFINITE); + if (profile_file == -1) { + ReleaseMutex(write_mutex); + return -1; + } + while (bufsize > 0) { + ssize_t count = write(profile_file, buf, bufsize); + if (count <= 0) { + ReleaseMutex(write_mutex); + return -1; /* failed */ + } + buf += count; + bufsize -= count; + } + ReleaseMutex(write_mutex); + return 0; +} + +RPY_EXTERN +int vmprof_register_virtual_function(char *code_name, long code_uid, + int auto_retry) +{ + char buf[2048]; + int namelen = strnlen(code_name, 1023); + buf[0] = MARKER_VIRTUAL_IP; + *(long*)(buf + 1) = code_uid; + *(long*)(buf + 1 + sizeof(long)) = namelen; + memcpy(buf + 1 + 2 * sizeof(long), code_name, namelen); + _write_all(buf, namelen + 2 * sizeof(long) + 1); + return 0; +} + +int vmprof_snapshot_thread(DWORD thread_id, PyThreadState *tstate, prof_stacktrace_s *stack) +{ + HRESULT result; + HANDLE hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, thread_id); + int depth; + if (!hThread) { + return -1; + } + result = SuspendThread(hThread); + if(result == 0xffffffff) + return -1; // possible, e.g. attached debugger or thread alread suspended + // find the correct thread + depth = read_trace_from_cpy_frame(tstate->frame, stack->stack, + MAX_STACK_DEPTH); + stack->depth = depth; + stack->stack[depth++] = (void*)thread_id; + stack->count = 1; + stack->marker = MARKER_STACKTRACE; + ResumeThread(hThread); + return depth; +} + +long __stdcall vmprof_mainloop(void *arg) +{ + prof_stacktrace_s *stack = (prof_stacktrace_s*)malloc(SINGLE_BUF_SIZE); + HANDLE hThreadSnap = INVALID_HANDLE_VALUE; + int depth; + PyThreadState *tstate; + + while (1) { + Sleep(profile_interval_usec * 1000); + if (!enabled) { + continue; + } + tstate = PyInterpreterState_Head()->tstate_head; + while (tstate) { + depth = vmprof_snapshot_thread(tstate->thread_id, tstate, stack); + if (depth > 0) { + _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), + depth * sizeof(void *) + + sizeof(struct prof_stacktrace_s) - + offsetof(struct prof_stacktrace_s, marker)); + } + tstate = tstate->next; + } + } +} + +RPY_EXTERN +int vmprof_enable(void) +{ + if (!thread_started) { + if (!CreateThread(NULL, 0, vmprof_mainloop, NULL, 0, NULL)) { + return -1; + } + thread_started = 1; + } + enabled = 1; + return 0; +} + +RPY_EXTERN +int vmprof_disable(void) +{ + char marker = MARKER_TRAILER; + + enabled = 0; + if (_write_all(&marker, 1) < 0) + return -1; + profile_file = -1; + return 0; +} + +RPY_EXTERN +void vmprof_ignore_signals(int ignored) +{ +} From pypy.commits at gmail.com Sat Feb 6 05:29:26 2016 From: pypy.commits at gmail.com (fijal) Date: Sat, 06 Feb 2016 02:29:26 -0800 (PST) Subject: [pypy-commit] pypy default: merge Message-ID: <56b5cb06.05e41c0a.992b1.ffff9627@mx.google.com> Author: fijal Branch: Changeset: r82099:abe7c26d3bb5 Date: 2016-02-06 11:28 +0100 http://bitbucket.org/pypy/pypy/changeset/abe7c26d3bb5/ Log: merge diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -9,6 +9,10 @@ class CompiledVmprofTest(CCompiledMixin): CPUClass = getcpuclass() + def setup(self): + if self.CPUClass.backend_name != 'x86_64': + py.test.skip("vmprof only supports x86-64 CPUs at the moment") + def _get_TranslationContext(self): t = TranslationContext() t.config.translation.gc = 'incminimark' @@ -83,4 +87,4 @@ except ImportError: pass else: - check_vmprof_output() \ No newline at end of file + check_vmprof_output() diff --git a/rpython/rlib/debug.py b/rpython/rlib/debug.py --- a/rpython/rlib/debug.py +++ b/rpython/rlib/debug.py @@ -1,76 +1,41 @@ -import sys, time +import sys +import time + from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rlib.objectmodel import we_are_translated from rpython.rlib.rarithmetic import is_valid_int -from rpython.rtyper.extfunc import ExtFuncEntry +from rpython.rtyper.extfunc import register_external from rpython.rtyper.lltypesystem import lltype +from rpython.rtyper.lltypesystem import rffi from rpython.translator.tool.cbuild import ExternalCompilationInfo - -def ll_assert(x, msg): - """After translation to C, this becomes an RPyAssert.""" - assert type(x) is bool, "bad type! got %r" % (type(x),) - assert x, msg - -class Entry(ExtRegistryEntry): - _about_ = ll_assert - - def compute_result_annotation(self, s_x, s_msg): - assert s_msg.is_constant(), ("ll_assert(x, msg): " - "the msg must be constant") - return None - - def specialize_call(self, hop): - vlist = hop.inputargs(lltype.Bool, lltype.Void) - hop.exception_cannot_occur() - hop.genop('debug_assert', vlist) - -class FatalError(Exception): - pass - -def fatalerror(msg): - # print the RPython traceback and abort with a fatal error - if not we_are_translated(): - raise FatalError(msg) - from rpython.rtyper.lltypesystem import lltype - from rpython.rtyper.lltypesystem.lloperation import llop - llop.debug_print_traceback(lltype.Void) - llop.debug_fatalerror(lltype.Void, msg) -fatalerror._dont_inline_ = True -fatalerror._jit_look_inside_ = False -fatalerror._annenforceargs_ = [str] - -def fatalerror_notb(msg): - # a variant of fatalerror() that doesn't print the RPython traceback - if not we_are_translated(): - raise FatalError(msg) - from rpython.rtyper.lltypesystem import lltype - from rpython.rtyper.lltypesystem.lloperation import llop - llop.debug_fatalerror(lltype.Void, msg) -fatalerror_notb._dont_inline_ = True -fatalerror_notb._jit_look_inside_ = False -fatalerror_notb._annenforceargs_ = [str] +# Expose these here (public interface) +from rpython.rtyper.debug import ( + ll_assert, FatalError, fatalerror, fatalerror_notb) class DebugLog(list): def debug_print(self, *args): self.append(('debug_print',) + args) + def debug_start(self, category, time=None): self.append(('debug_start', category, time)) + def debug_stop(self, category, time=None): - for i in xrange(len(self)-1, -1, -1): + for i in xrange(len(self) - 1, -1, -1): if self[i][0] == 'debug_start': assert self[i][1] == category, ( "nesting error: starts with %r but stops with %r" % (self[i][1], category)) starttime = self[i][2] if starttime is not None or time is not None: - self[i:] = [(category, starttime, time, self[i+1:])] + self[i:] = [(category, starttime, time, self[i + 1:])] else: - self[i:] = [(category, self[i+1:])] + self[i:] = [(category, self[i + 1:])] return assert False, ("nesting error: no start corresponding to stop %r" % (category,)) + def __repr__(self): import pprint return pprint.pformat(list(self)) @@ -161,7 +126,6 @@ return self.bookkeeper.immutablevalue(False) def specialize_call(self, hop): - from rpython.rtyper.lltypesystem import lltype t = hop.rtyper.annotator.translator hop.exception_cannot_occur() if t.config.translation.log: @@ -189,7 +153,6 @@ return annmodel.SomeInteger() def specialize_call(self, hop): - from rpython.rtyper.lltypesystem import lltype hop.exception_cannot_occur() return hop.genop('debug_offset', [], resulttype=lltype.Signed) @@ -223,7 +186,6 @@ return None def specialize_call(self, hop): - from rpython.rtyper.lltypesystem import lltype vlist = hop.inputargs(lltype.Signed) hop.exception_cannot_occur() return hop.genop('debug_forked', vlist) @@ -244,7 +206,6 @@ def compute_result_annotation(self, s_RESTYPE, s_pythonfunction, *args_s): from rpython.annotator import model as annmodel from rpython.rtyper.llannotation import lltype_to_annotation - from rpython.rtyper.lltypesystem import lltype assert s_RESTYPE.is_constant() assert s_pythonfunction.is_constant() s_result = s_RESTYPE.const @@ -255,7 +216,6 @@ def specialize_call(self, hop): from rpython.annotator import model as annmodel - from rpython.rtyper.lltypesystem import lltype RESTYPE = hop.args_s[0].const if not isinstance(RESTYPE, lltype.LowLevelType): assert isinstance(RESTYPE, annmodel.SomeObject) @@ -283,7 +243,8 @@ def compute_result_annotation(self, s_arg, s_checker): if not s_checker.is_constant(): - raise ValueError("Second argument of check_annotation must be constant") + raise ValueError( + "Second argument of check_annotation must be constant") checker = s_checker.const checker(s_arg, self.bookkeeper) return s_arg @@ -308,11 +269,14 @@ assert isinstance(s_arg, SomeList) # the logic behind it is that we try not to propagate # make_sure_not_resized, when list comprehension is not on - if self.bookkeeper.annotator.translator.config.translation.list_comprehension_operations: + config = self.bookkeeper.annotator.translator.config + if config.translation.list_comprehension_operations: s_arg.listdef.never_resize() else: from rpython.annotator.annrpython import log - log.WARNING('make_sure_not_resized called, but has no effect since list_comprehension is off') + log.WARNING( + "make_sure_not_resized called, but has no effect since " + "list_comprehension is off") return s_arg def specialize_call(self, hop): @@ -433,15 +397,11 @@ import pdb; pdb.set_trace() if not sys.platform.startswith('win'): - def _make_impl_attach_gdb(): - # circular imports fun :-( - import sys - from rpython.rtyper.lltypesystem import rffi - if sys.platform.startswith('linux'): - # Only necessary on Linux - eci = ExternalCompilationInfo(includes=['string.h', 'assert.h', - 'sys/prctl.h'], - post_include_bits=[""" + if sys.platform.startswith('linux'): + # Only necessary on Linux + eci = ExternalCompilationInfo(includes=['string.h', 'assert.h', + 'sys/prctl.h'], + post_include_bits=[""" /* If we have an old Linux kernel (or compile with old system headers), the following two macros are not defined. But we would still like a pypy translated on such a system to run on a more modern system. */ @@ -455,55 +415,38 @@ prctl(PR_SET_PTRACER, PR_SET_PTRACER_ANY); } """]) - allow_attach = rffi.llexternal( - "pypy__allow_attach", [], lltype.Void, - compilation_info=eci, _nowrapper=True) + allow_attach = rffi.llexternal( + "pypy__allow_attach", [], lltype.Void, + compilation_info=eci, _nowrapper=True) + else: + # Do nothing, there's no prctl + def allow_attach(): + pass + + def impl_attach_gdb(): + import os + allow_attach() + pid = os.getpid() + gdbpid = os.fork() + if gdbpid == 0: + shell = os.environ.get("SHELL") or "/bin/sh" + sepidx = shell.rfind(os.sep) + 1 + if sepidx > 0: + argv0 = shell[sepidx:] + else: + argv0 = shell + try: + os.execv(shell, [argv0, "-c", "gdb -p %d" % pid]) + except OSError as e: + os.write(2, "Could not start GDB: %s" % ( + os.strerror(e.errno))) + raise SystemExit else: - # Do nothing, there's no prctl - def allow_attach(): - pass + time.sleep(1) # give the GDB time to attach - def impl_attach_gdb(): - import os - allow_attach() - pid = os.getpid() - gdbpid = os.fork() - if gdbpid == 0: - shell = os.environ.get("SHELL") or "/bin/sh" - sepidx = shell.rfind(os.sep) + 1 - if sepidx > 0: - argv0 = shell[sepidx:] - else: - argv0 = shell - try: - os.execv(shell, [argv0, "-c", "gdb -p %d" % pid]) - except OSError as e: - os.write(2, "Could not start GDB: %s" % ( - os.strerror(e.errno))) - raise SystemExit - else: - time.sleep(1) # give the GDB time to attach +else: + def impl_attach_gdb(): + print "Don't know how to attach GDB on Windows" - return impl_attach_gdb -else: - def _make_impl_attach_gdb(): - def impl_attach_gdb(): - print "Don't know how to attach GDB on Windows" - return impl_attach_gdb - - -class FunEntry(ExtFuncEntry): - _about_ = attach_gdb - signature_args = [] - #lltypeimpl = staticmethod(impl_attach_gdb) --- done lazily below - name = "impl_attach_gdb" - - @property - def lltypeimpl(self): - if not hasattr(self.__class__, '_lltypeimpl'): - self.__class__._lltypeimpl = staticmethod(_make_impl_attach_gdb()) - return self._lltypeimpl - - def compute_result_annotation(self, *args_s): - from rpython.annotator.model import s_None - return s_None +register_external(attach_gdb, [], result=None, + export_name="impl_attach_gdb", llimpl=impl_attach_gdb) diff --git a/rpython/rtyper/debug.py b/rpython/rtyper/debug.py new file mode 100644 --- /dev/null +++ b/rpython/rtyper/debug.py @@ -0,0 +1,47 @@ +from rpython.rlib.objectmodel import we_are_translated +from rpython.rtyper.extregistry import ExtRegistryEntry +from rpython.rtyper.lltypesystem import lltype + +def ll_assert(x, msg): + """After translation to C, this becomes an RPyAssert.""" + assert type(x) is bool, "bad type! got %r" % (type(x),) + assert x, msg + +class Entry(ExtRegistryEntry): + _about_ = ll_assert + + def compute_result_annotation(self, s_x, s_msg): + assert s_msg.is_constant(), ("ll_assert(x, msg): " + "the msg must be constant") + return None + + def specialize_call(self, hop): + vlist = hop.inputargs(lltype.Bool, lltype.Void) + hop.exception_cannot_occur() + hop.genop('debug_assert', vlist) + +class FatalError(Exception): + pass + +def fatalerror(msg): + # print the RPython traceback and abort with a fatal error + if not we_are_translated(): + raise FatalError(msg) + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop + llop.debug_print_traceback(lltype.Void) + llop.debug_fatalerror(lltype.Void, msg) +fatalerror._dont_inline_ = True +fatalerror._jit_look_inside_ = False +fatalerror._annenforceargs_ = [str] + +def fatalerror_notb(msg): + # a variant of fatalerror() that doesn't print the RPython traceback + if not we_are_translated(): + raise FatalError(msg) + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop + llop.debug_fatalerror(lltype.Void, msg) +fatalerror_notb._dont_inline_ = True +fatalerror_notb._jit_look_inside_ = False +fatalerror_notb._annenforceargs_ = [str] diff --git a/rpython/rtyper/lltypesystem/rbuilder.py b/rpython/rtyper/lltypesystem/rbuilder.py --- a/rpython/rtyper/lltypesystem/rbuilder.py +++ b/rpython/rtyper/lltypesystem/rbuilder.py @@ -1,7 +1,7 @@ from rpython.rlib import rgc, jit from rpython.rlib.objectmodel import enforceargs from rpython.rlib.rarithmetic import ovfcheck, r_uint, intmask -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.unroll import unrolling_iterable from rpython.rtyper.rptr import PtrRepr from rpython.rtyper.lltypesystem import lltype, rffi, rstr @@ -11,7 +11,7 @@ from rpython.rtyper.rbuilder import AbstractStringBuilderRepr from rpython.tool.sourcetools import func_with_new_name from rpython.rtyper.annlowlevel import llstr, llunicode - + # ------------------------------------------------------------ diff --git a/rpython/rtyper/lltypesystem/rbytearray.py b/rpython/rtyper/lltypesystem/rbytearray.py --- a/rpython/rtyper/lltypesystem/rbytearray.py +++ b/rpython/rtyper/lltypesystem/rbytearray.py @@ -1,7 +1,7 @@ from rpython.rtyper.rbytearray import AbstractByteArrayRepr from rpython.rtyper.lltypesystem import lltype, rstr -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert BYTEARRAY = lltype.GcForwardReference() diff --git a/rpython/rtyper/lltypesystem/rdict.py b/rpython/rtyper/lltypesystem/rdict.py --- a/rpython/rtyper/lltypesystem/rdict.py +++ b/rpython/rtyper/lltypesystem/rdict.py @@ -4,7 +4,7 @@ from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.lltypesystem.lloperation import llop from rpython.rlib import objectmodel, jit -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.rarithmetic import r_uint, intmask, LONG_BIT from rpython.rtyper import rmodel from rpython.rtyper.error import TyperError diff --git a/rpython/rtyper/lltypesystem/rlist.py b/rpython/rtyper/lltypesystem/rlist.py --- a/rpython/rtyper/lltypesystem/rlist.py +++ b/rpython/rtyper/lltypesystem/rlist.py @@ -1,5 +1,5 @@ from rpython.rlib import rgc, jit, types -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.signature import signature from rpython.rtyper.error import TyperError from rpython.rtyper.lltypesystem import rstr diff --git a/rpython/rtyper/lltypesystem/rordereddict.py b/rpython/rtyper/lltypesystem/rordereddict.py --- a/rpython/rtyper/lltypesystem/rordereddict.py +++ b/rpython/rtyper/lltypesystem/rordereddict.py @@ -6,7 +6,7 @@ from rpython.rlib import objectmodel, jit, rgc, types from rpython.rlib.signature import signature from rpython.rlib.objectmodel import specialize, likely -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.rarithmetic import r_uint, intmask from rpython.rtyper import rmodel from rpython.rtyper.error import TyperError diff --git a/rpython/rtyper/lltypesystem/rstr.py b/rpython/rtyper/lltypesystem/rstr.py --- a/rpython/rtyper/lltypesystem/rstr.py +++ b/rpython/rtyper/lltypesystem/rstr.py @@ -2,12 +2,12 @@ from rpython.annotator import model as annmodel from rpython.rlib import jit, types -from rpython.rlib.debug import ll_assert from rpython.rlib.objectmodel import (malloc_zero_filled, we_are_translated, _hash_string, keepalive_until_here, specialize, enforceargs) from rpython.rlib.signature import signature from rpython.rlib.rarithmetic import ovfcheck from rpython.rtyper.error import TyperError +from rpython.rtyper.debug import ll_assert from rpython.rtyper.lltypesystem import ll_str, llmemory from rpython.rtyper.lltypesystem.lltype import (GcStruct, Signed, Array, Char, UniChar, Ptr, malloc, Bool, Void, GcArray, nullptr, cast_primitive, diff --git a/rpython/rtyper/rlist.py b/rpython/rtyper/rlist.py --- a/rpython/rtyper/rlist.py +++ b/rpython/rtyper/rlist.py @@ -1,7 +1,7 @@ from rpython.annotator import model as annmodel from rpython.flowspace.model import Constant from rpython.rlib import rgc, jit, types -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.objectmodel import malloc_zero_filled, enforceargs, specialize from rpython.rlib.signature import signature from rpython.rlib.rarithmetic import ovfcheck, widen, r_uint, intmask diff --git a/rpython/rtyper/rpbc.py b/rpython/rtyper/rpbc.py --- a/rpython/rtyper/rpbc.py +++ b/rpython/rtyper/rpbc.py @@ -7,7 +7,7 @@ from rpython.annotator.classdesc import ClassDesc from rpython.flowspace.model import Constant from rpython.annotator.argument import simple_args -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rlib.unroll import unrolling_iterable from rpython.rtyper import rclass, callparse from rpython.rtyper.rclass import CLASSTYPE, OBJECT_VTABLE, OBJECTPTR diff --git a/rpython/rtyper/test/test_rlist.py b/rpython/rtyper/test/test_rlist.py --- a/rpython/rtyper/test/test_rlist.py +++ b/rpython/rtyper/test/test_rlist.py @@ -3,7 +3,7 @@ import py -from rpython.rlib.debug import ll_assert +from rpython.rtyper.debug import ll_assert from rpython.rtyper.error import TyperError from rpython.rtyper.llinterp import LLException, LLAssertFailure from rpython.rtyper.lltypesystem import rlist as ll_rlist From pypy.commits at gmail.com Sat Feb 6 13:47:26 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 06 Feb 2016 10:47:26 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: update the values Message-ID: <56b63fbe.d22e1c0a.4468c.6fcb@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r699:e27956d33e01 Date: 2016-02-06 19:47 +0100 http://bitbucket.org/pypy/pypy.org/changeset/e27956d33e01/ Log: update the values diff --git a/don1.html b/don1.html --- a/don1.html +++ b/don1.html @@ -15,7 +15,7 @@ - $62793 of $105000 (59.8%) + $62841 of $105000 (59.8%)
@@ -23,7 +23,7 @@
  • diff --git a/don4.html b/don4.html --- a/don4.html +++ b/don4.html @@ -17,7 +17,7 @@ 2nd call: - $30379 of $80000 (38.0%) + $30383 of $80000 (38.0%)
    @@ -25,7 +25,7 @@
  • From pypy.commits at gmail.com Sat Feb 6 14:26:18 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 06 Feb 2016 11:26:18 -0800 (PST) Subject: [pypy-commit] cffi default: issue #246: trying to be more robust against CPython's fragile Message-ID: <56b648da.2851c20a.765b9.ffffcd30@mx.google.com> Author: Armin Rigo Branch: Changeset: r2619:bdcc6eeb3de4 Date: 2016-02-06 20:25 +0100 http://bitbucket.org/cffi/cffi/changeset/bdcc6eeb3de4/ Log: issue #246: trying to be more robust against CPython's fragile interpreter shutdown logic diff --git a/c/call_python.c b/c/call_python.c --- a/c/call_python.c +++ b/c/call_python.c @@ -115,6 +115,7 @@ static int _update_cache_to_call_python(struct _cffi_externpy_s *externpy) { PyObject *interpstate_dict, *interpstate_key, *infotuple, *old1, *new1; + PyObject *old2; interpstate_dict = _get_interpstate_dict(); if (interpstate_dict == NULL) @@ -127,14 +128,17 @@ infotuple = PyDict_GetItem(interpstate_dict, interpstate_key); Py_DECREF(interpstate_key); if (infotuple == NULL) - return 1; /* no ffi.def_extern() from this subinterpreter */ + return 3; /* no ffi.def_extern() from this subinterpreter */ new1 = PyThreadState_GET()->interp->modules; Py_INCREF(new1); + Py_INCREF(infotuple); old1 = (PyObject *)externpy->reserved1; + old2 = (PyObject *)externpy->reserved2; externpy->reserved1 = new1; /* holds a reference */ - externpy->reserved2 = infotuple; /* doesn't hold a reference */ + externpy->reserved2 = infotuple; /* holds a reference (issue #246) */ Py_XDECREF(old1); + Py_XDECREF(old2); return 0; /* no error */ @@ -213,9 +217,11 @@ gil_release(state); } if (err) { - static const char *msg[2] = { + static const char *msg[] = { "no code was attached to it yet with @ffi.def_extern()", - "got internal exception (out of memory?)" }; + "got internal exception (out of memory / shutdown issue)", + "@ffi.def_extern() was not called in the current subinterpreter", + }; fprintf(stderr, "extern \"Python\": function %s() called, " "but %s. Returning 0.\n", externpy->name, msg[err-1]); memset(args, 0, externpy->size_of_result); From pypy.commits at gmail.com Sat Feb 6 15:02:41 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 06 Feb 2016 12:02:41 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: borrowed results (copying from cpyext-gc-support) Message-ID: <56b65161.41df1c0a.b6f30.ffff89ae@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82101:da0f029832ef Date: 2016-02-06 19:13 +0100 http://bitbucket.org/pypy/pypy/changeset/da0f029832ef/ Log: borrowed results (copying from cpyext-gc-support) diff --git a/pypy/module/cpyext/modsupport.py b/pypy/module/cpyext/modsupport.py --- a/pypy/module/cpyext/modsupport.py +++ b/pypy/module/cpyext/modsupport.py @@ -1,7 +1,7 @@ from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import cpython_api, cpython_struct, \ METH_STATIC, METH_CLASS, METH_COEXIST, CANNOT_FAIL, CONST_STRING -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.interpreter.module import Module from pypy.module.cpyext.methodobject import ( W_PyCFunctionObject, PyCFunction_NewEx, PyDescr_NewMethod, @@ -34,7 +34,7 @@ # This is actually the Py_InitModule4 function, # renamed to refuse modules built against CPython headers. @cpython_api([CONST_STRING, lltype.Ptr(PyMethodDef), CONST_STRING, - PyObject, rffi.INT_real], PyObject) + PyObject, rffi.INT_real], PyObject, result_borrowed=True) def _Py_InitPyPyModule(space, name, methods, doc, w_self, apiver): """ Create a new module object based on a name and table of functions, returning @@ -69,7 +69,7 @@ if doc: space.setattr(w_mod, space.wrap("__doc__"), space.wrap(rffi.charp2str(doc))) - return borrow_from(None, w_mod) + return w_mod # borrowed result kept alive in PyImport_AddModule() def convert_method_defs(space, dict_w, methods, w_type, w_self=None, name=None): @@ -114,12 +114,12 @@ return int(space.is_w(w_type, w_obj_type) or space.is_true(space.issubtype(w_obj_type, w_type))) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyModule_GetDict(space, w_mod): if PyModule_Check(space, w_mod): assert isinstance(w_mod, Module) w_dict = w_mod.getdict(space) - return borrow_from(w_mod, w_dict) + return w_dict # borrowed reference, likely from w_mod.w_dict else: PyErr_BadInternalCall(space) From pypy.commits at gmail.com Sat Feb 6 15:02:40 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 06 Feb 2016 12:02:40 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Pass a few more tests Message-ID: <56b65160.2a06c20a.98d3f.ffffceaa@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82100:2eef9245adc0 Date: 2016-02-06 19:10 +0100 http://bitbucket.org/pypy/pypy/changeset/2eef9245adc0/ Log: Pass a few more tests diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -616,8 +616,7 @@ @specialize.ll() def wrapper(*args): - from pypy.module.cpyext.pyobject import make_ref, from_ref - from pypy.module.cpyext.pyobject import Reference + from pypy.module.cpyext.pyobject import make_ref, from_ref, is_pyobj # we hope that malloc removal removes the newtuple() that is # inserted exactly here by the varargs specializer if gil_acquire: diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -227,7 +227,7 @@ def debug_collect(): - rawrefcount._collect(track_allocation=False) + rawrefcount._collect() def as_pyobj(space, w_obj): diff --git a/pypy/module/cpyext/test/test_cpyext.py b/pypy/module/cpyext/test/test_cpyext.py --- a/pypy/module/cpyext/test/test_cpyext.py +++ b/pypy/module/cpyext/test/test_cpyext.py @@ -130,11 +130,10 @@ def check_and_print_leaks(self): debug_collect() - return #ZZZ # check for sane refcnts import gc - if not self.enable_leak_checking: + if 1: #ZZZ not self.enable_leak_checking: leakfinder.stop_tracking_allocations(check=False) return False @@ -199,6 +198,9 @@ "the test actually passed in the first place; if it failed " "it is likely to reach this place.") + def test_only_import(self): + import cpyext + def test_load_error(self): import cpyext raises(ImportError, cpyext.load_module, "missing.file", "foo") From pypy.commits at gmail.com Sat Feb 6 21:08:29 2016 From: pypy.commits at gmail.com (Manuel Jacob) Date: Sat, 06 Feb 2016 18:08:29 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Replace test using running_on_llinterp directly by another test that uses running_on_llinterp indirectly through register_external. Message-ID: <56b6a71d.05e41c0a.992b1.ffffa301@mx.google.com> Author: Manuel Jacob Branch: llimpl Changeset: r82102:16c586e89bf4 Date: 2014-01-27 19:58 +0100 http://bitbucket.org/pypy/pypy/changeset/16c586e89bf4/ Log: Replace test using running_on_llinterp directly by another test that uses running_on_llinterp indirectly through register_external. (grafted from cd8bd484b255) diff --git a/rpython/rtyper/test/test_extfunc.py b/rpython/rtyper/test/test_extfunc.py --- a/rpython/rtyper/test/test_extfunc.py +++ b/rpython/rtyper/test/test_extfunc.py @@ -182,3 +182,22 @@ # fails with TooLateForChange a.build_types(g, [[str]]) a.build_types(g, [[str0]]) # Does not raise + + def test_register_external_llfakeimpl(self): + def a(i): + return i + def a_llimpl(i): + return i * 2 + def a_llfakeimpl(i): + return i * 3 + register_external(a, [int], int, llimpl=a_llimpl, + llfakeimpl=a_llfakeimpl) + def f(i): + return a(i) + + res = interpret(f, [7]) + assert res == 21 + + from rpython.translator.c.test.test_genc import compile + fc = compile(f, [int]) + assert fc(7) == 14 diff --git a/rpython/rtyper/test/test_rbuiltin.py b/rpython/rtyper/test/test_rbuiltin.py --- a/rpython/rtyper/test/test_rbuiltin.py +++ b/rpython/rtyper/test/test_rbuiltin.py @@ -3,8 +3,7 @@ import py -from rpython.rlib.debug import llinterpcall -from rpython.rlib.objectmodel import instantiate, running_on_llinterp, compute_unique_id, current_object_addr_as_int +from rpython.rlib.objectmodel import instantiate, compute_unique_id, current_object_addr_as_int from rpython.rlib.rarithmetic import (intmask, longlongmask, r_int64, is_valid_int, r_int, r_uint, r_longlong, r_ulonglong) from rpython.rlib.rstring import StringBuilder, UnicodeBuilder @@ -456,26 +455,6 @@ res = self.interpret(fn, [3.25]) assert res == 7.25 - def test_debug_llinterpcall(self): - S = lltype.Struct('S', ('m', lltype.Signed)) - SPTR = lltype.Ptr(S) - def foo(n): - "NOT_RPYTHON" - s = lltype.malloc(S, immortal=True) - s.m = eval("n*6", locals()) - return s - def fn(n): - if running_on_llinterp: - return llinterpcall(SPTR, foo, n).m - else: - return 321 - res = self.interpret(fn, [7]) - assert res == 42 - from rpython.translator.c.test.test_genc import compile - f = compile(fn, [int]) - res = f(7) - assert res == 321 - def test_id(self): class A: pass From pypy.commits at gmail.com Sat Feb 6 21:08:31 2016 From: pypy.commits at gmail.com (Manuel Jacob) Date: Sat, 06 Feb 2016 18:08:31 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Kill running_on_llinterp. Instead we attach the fakeimpl to the function object and fish it in llinterp. Message-ID: <56b6a71f.4e8e1c0a.12513.ffffe131@mx.google.com> Author: Manuel Jacob Branch: llimpl Changeset: r82103:dbd0602d5f13 Date: 2014-01-27 21:58 +0100 http://bitbucket.org/pypy/pypy/changeset/dbd0602d5f13/ Log: Kill running_on_llinterp. Instead we attach the fakeimpl to the function object and fish it in llinterp. (grafted from 39eb895a3a295c0a20a0fdd467e33dec0eb2fcfb) diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py --- a/rpython/rlib/objectmodel.py +++ b/rpython/rlib/objectmodel.py @@ -275,8 +275,6 @@ return lltype.Signed malloc_zero_filled = CDefinedIntSymbolic('MALLOC_ZERO_FILLED', default=0) -running_on_llinterp = CDefinedIntSymbolic('RUNNING_ON_LLINTERP', default=1) -# running_on_llinterp is meant to have the value 0 in all backends # ____________________________________________________________ diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -52,38 +52,12 @@ make_sandbox_trampoline) impl = make_sandbox_trampoline( self.name, signature_args, s_result) - if hasattr(self, 'lltypefakeimpl'): - # If we have both an llimpl and an llfakeimpl, - # we need a wrapper that selects the proper one and calls it - from rpython.tool.sourcetools import func_with_new_name - # Using '*args' is delicate because this wrapper is also - # created for init-time functions like llarena.arena_malloc - # which are called before the GC is fully initialized - args = ', '.join(['arg%d' % i for i in range(len(args_ll))]) - d = {'original_impl': impl, - 's_result': s_result, - 'fakeimpl': fakeimpl, - '__name__': __name__, - } - exec py.code.compile(""" - from rpython.rlib.objectmodel import running_on_llinterp - from rpython.rlib.debug import llinterpcall - from rpython.rlib.jit import dont_look_inside - # note: we say 'dont_look_inside' mostly because the - # JIT does not support 'running_on_llinterp', but in - # theory it is probably right to stop jitting anyway. - @dont_look_inside - def ll_wrapper(%s): - if running_on_llinterp: - return llinterpcall(s_result, fakeimpl, %s) - else: - return original_impl(%s) - """ % (args, args, args)) in d - impl = func_with_new_name(d['ll_wrapper'], name + '_wrapper') # store some attributes to the 'impl' function, where # the eventual call to rtyper.getcallable() will find them # and transfer them to the final lltype.functionptr(). impl._llfnobjattrs_ = {'_name': self.name} + if hasattr(self, 'lltypefakeimpl'): + impl._llfnobjattrs_['_fakeimpl'] = fakeimpl obj = rtyper.getannmixlevel().delayedfunction( impl, signature_args, hop.s_result) else: diff --git a/rpython/rtyper/llinterp.py b/rpython/rtyper/llinterp.py --- a/rpython/rtyper/llinterp.py +++ b/rpython/rtyper/llinterp.py @@ -667,6 +667,14 @@ return frame.eval() def op_direct_call(self, f, *args): + pythonfunction = getattr(f._obj, '_fakeimpl', None) + if pythonfunction is not None: + try: + return pythonfunction(*args) + except: + self.make_llexception() + return + FTYPE = lltype.typeOf(f).TO return self.perform_call(f, FTYPE.ARGS, args) From pypy.commits at gmail.com Sat Feb 6 21:08:33 2016 From: pypy.commits at gmail.com (rlamy) Date: Sat, 06 Feb 2016 18:08:33 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Hack the functionptr when passing an llexternal as llimpl in register_external() Message-ID: <56b6a721.cb571c0a.55fbd.ffffe2d8@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82104:58ecd8c5102c Date: 2016-02-07 02:00 +0000 http://bitbucket.org/pypy/pypy/changeset/58ecd8c5102c/ Log: Hack the functionptr when passing an llexternal as llimpl in register_external() diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -1,10 +1,8 @@ from rpython.rtyper.extregistry import ExtRegistryEntry -from rpython.rtyper.lltypesystem.lltype import typeOf, FuncType, functionptr +from rpython.rtyper.lltypesystem.lltype import typeOf, FuncType, functionptr, _ptr from rpython.annotator.model import unionof from rpython.annotator.signature import annotation, SignatureError -import py - class ExtFuncEntry(ExtRegistryEntry): safe_not_sandboxed = False @@ -52,14 +50,17 @@ make_sandbox_trampoline) impl = make_sandbox_trampoline( self.name, signature_args, s_result) - # store some attributes to the 'impl' function, where - # the eventual call to rtyper.getcallable() will find them - # and transfer them to the final lltype.functionptr(). - impl._llfnobjattrs_ = {'_name': self.name} - if hasattr(self, 'lltypefakeimpl'): - impl._llfnobjattrs_['_fakeimpl'] = fakeimpl - obj = rtyper.getannmixlevel().delayedfunction( - impl, signature_args, hop.s_result) + if isinstance(impl, _ptr): + obj = impl + else: + # store some attributes to the 'impl' function, where + # the eventual call to rtyper.getcallable() will find them + # and transfer them to the final lltype.functionptr(). + impl._llfnobjattrs_ = {'_name': self.name} + if hasattr(self, 'lltypefakeimpl'): + impl._llfnobjattrs_['_fakeimpl'] = fakeimpl + obj = rtyper.getannmixlevel().delayedfunction( + impl, signature_args, hop.s_result) else: FT = FuncType(args_ll, ll_result) obj = functionptr(FT, name, _external_name=self.name, @@ -84,6 +85,10 @@ if export_name is None: export_name = function.__name__ + if isinstance(llimpl, _ptr) and llfakeimpl: + llimpl._obj.__dict__['_fakeimpl'] = llfakeimpl + llfakeimpl = None + class FunEntry(ExtFuncEntry): _about_ = function safe_not_sandboxed = sandboxsafe From pypy.commits at gmail.com Sun Feb 7 07:56:48 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 07 Feb 2016 04:56:48 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: Change my departure day from 28th to 27th. Message-ID: <56b73f10.6953c20a.c061.ffffc6da@mx.google.com> Author: Manuel Jacob Branch: extradoc Changeset: r5603:600e94781178 Date: 2016-02-07 13:56 +0100 http://bitbucket.org/pypy/extradoc/changeset/600e94781178/ Log: Change my departure day from 28th to 27th. diff --git a/sprintinfo/leysin-winter-2016/people.txt b/sprintinfo/leysin-winter-2016/people.txt --- a/sprintinfo/leysin-winter-2016/people.txt +++ b/sprintinfo/leysin-winter-2016/people.txt @@ -14,7 +14,7 @@ Remi Meier 21-27 Ermina Carl Friedrich Bolz 20-27 Ermina? Matti Picus 20-25 Ermina -Manuel Jacob 20-28 Ermina +Manuel Jacob 20-27 Ermina Richard Plangger 20-28 Ermina Maciej Fijalkowski 20-? Ermina (big room preferred) Ronan Lamy 20-27 Ermina? From pypy.commits at gmail.com Sun Feb 7 08:34:50 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 07 Feb 2016 05:34:50 -0800 (PST) Subject: [pypy-commit] pypy default: issue #2236: ignore xchgb Message-ID: <56b747fa.86e31c0a.fc09c.ffffb208@mx.google.com> Author: Armin Rigo Branch: Changeset: r82105:8581149fc8c5 Date: 2016-02-07 14:33 +0100 http://bitbucket.org/pypy/pypy/changeset/8581149fc8c5/ Log: issue #2236: ignore xchgb diff --git a/rpython/translator/c/gcc/trackgcroot.py b/rpython/translator/c/gcc/trackgcroot.py --- a/rpython/translator/c/gcc/trackgcroot.py +++ b/rpython/translator/c/gcc/trackgcroot.py @@ -528,6 +528,8 @@ 'rex64', # movbe, converts from big-endian, so most probably not GC pointers 'movbe', + # xchgb, byte-sized, so not GC pointers + 'xchgb', ]) # a partial list is hopefully good enough for now; it's all to support From pypy.commits at gmail.com Sun Feb 7 08:34:52 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 07 Feb 2016 05:34:52 -0800 (PST) Subject: [pypy-commit] pypy default: merge heads Message-ID: <56b747fc.ccaa1c0a.c3d25.ffff8515@mx.google.com> Author: Armin Rigo Branch: Changeset: r82106:90f38a4668e2 Date: 2016-02-07 14:34 +0100 http://bitbucket.org/pypy/pypy/changeset/90f38a4668e2/ Log: merge heads diff too long, truncating to 2000 out of 3793 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -139,3 +139,7 @@ Refactor sandboxing to operate at a higher level. .. branch: cpyext-bootstrap + +.. branch: vmprof-newstack + +Refactor vmprof to work cross-operating-system. \ No newline at end of file diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -84,11 +84,68 @@ return rffi.cast(rffi.INT, res) # ____________________________________________________________ +if os.name == 'nt': + do_startup = r''' +#include +#define WIN32_LEAN_AND_MEAN +#include +RPY_EXPORTED void rpython_startup_code(void); +RPY_EXPORTED int pypy_setup_home(char *, int); +static unsigned char _cffi_ready = 0; +static const char *volatile _cffi_module_name; -eci = ExternalCompilationInfo(separate_module_sources=[ -r""" -/* XXX Windows missing */ +static void _cffi_init_error(const char *msg, const char *extra) +{ + fprintf(stderr, + "\nPyPy initialization failure when loading module '%s':\n%s%s\n", + _cffi_module_name, msg, extra); +} + +BOOL CALLBACK _cffi_init(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *lpContex) +{ + + HMODULE hModule; + TCHAR home[_MAX_PATH]; + rpython_startup_code(); + RPyGilAllocate(); + + GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + (LPCTSTR)&_cffi_init, &hModule); + if (hModule == 0 ) { + /* TODO turn the int into a string with FormatMessage */ + + _cffi_init_error("dladdr() failed: ", ""); + return TRUE; + } + GetModuleFileName(hModule, home, _MAX_PATH); + if (pypy_setup_home(home, 1) != 0) { + _cffi_init_error("pypy_setup_home() failed", ""); + return TRUE; + } + _cffi_ready = 1; + fprintf(stderr, "startup succeeded, home %s\n", home); + return TRUE; +} + +RPY_EXPORTED +int pypy_carefully_make_gil(const char *name) +{ + /* For CFFI: this initializes the GIL and loads the home path. + It can be called completely concurrently from unrelated threads. + It assumes that we don't hold the GIL before (if it exists), and we + don't hold it afterwards. + */ + static INIT_ONCE s_init_once; + + _cffi_module_name = name; /* not really thread-safe, but better than + nothing */ + InitOnceExecuteOnce(&s_init_once, _cffi_init, NULL, NULL); + return (int)_cffi_ready - 1; +}''' +else: + do_startup = r""" #include #include #include @@ -141,6 +198,7 @@ pthread_once(&once_control, _cffi_init); return (int)_cffi_ready - 1; } -"""]) +""" +eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -102,7 +102,7 @@ fd = space.appexec([w_socket, space.wrap(orig_fd.fileno()), space.wrap(socket.AF_INET), space.wrap(socket.SOCK_STREAM), space.wrap(0)], - """(_socket, fd, family, type, proto): + """(_socket, fd, family, type, proto): return _socket.fromfd(fd, family, type, proto)""") assert space.unwrap(space.call_method(fd, 'fileno')) @@ -326,7 +326,7 @@ def test_ntoa_exception(self): import _socket - raises(_socket.error, _socket.inet_ntoa, "ab") + raises(_socket.error, _socket.inet_ntoa, b"ab") def test_aton_exceptions(self): import _socket @@ -418,7 +418,7 @@ # it if there is no connection. try: s.connect(("www.python.org", 80)) - except _socket.gaierror, ex: + except _socket.gaierror as ex: skip("GAIError - probably no connection: %s" % str(ex.args)) name = s.getpeername() # Will raise socket.error if not connected assert name[1] == 80 @@ -465,7 +465,7 @@ sizes = {socket.htonl: 32, socket.ntohl: 32, socket.htons: 16, socket.ntohs: 16} for func, size in sizes.items(): - mask = (1L<f_lineno = 48; /* Does not work with CPython */ @@ -51,6 +52,7 @@ Py_XDECREF(empty_string); Py_XDECREF(empty_tuple); Py_XDECREF(py_globals); + Py_XDECREF(py_locals); Py_XDECREF(py_code); Py_XDECREF(py_frame); return NULL; diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -116,7 +116,7 @@ def _find_map_attr(self, name, index): while isinstance(self, PlainAttribute): - if name == self.name and index == self.index: + if index == self.index and name == self.name: return self self = self.back return None @@ -156,7 +156,6 @@ jit.isconstant(name) and jit.isconstant(index)) def add_attr(self, obj, name, index, w_value): - # grumble, jit needs this attr = self._get_new_attr(name, index) oldattr = obj._get_mapdict_map() if not jit.we_are_jitted(): @@ -296,7 +295,7 @@ new_obj._get_mapdict_map().add_attr(new_obj, self.name, self.index, w_value) def delete(self, obj, name, index): - if name == self.name and index == self.index: + if index == self.index and name == self.name: # ok, attribute is deleted if not self.ever_mutated: self.ever_mutated = True diff --git a/pypy/objspace/std/setobject.py b/pypy/objspace/std/setobject.py --- a/pypy/objspace/std/setobject.py +++ b/pypy/objspace/std/setobject.py @@ -942,7 +942,7 @@ return False if w_set.length() == 0: return True - # it's possible to have 0-lenght strategy that's not empty + # it's possible to have 0-length strategy that's not empty if w_set.strategy is w_other.strategy: return self._issubset_unwrapped(w_set, w_other) if not self.may_contain_equal_elements(w_other.strategy): diff --git a/pypy/test_all.py b/pypy/test_all.py --- a/pypy/test_all.py +++ b/pypy/test_all.py @@ -26,11 +26,10 @@ #Add toplevel repository dir to sys.path sys.path.insert(0,os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) import pytest - import pytest_cov if sys.platform == 'win32': #Try to avoid opeing a dialog box if one of the tests causes a system error # We do this in runner.py, but buildbots run twisted which ruins inheritance - # in windows subprocesses. + # in windows subprocesses. import ctypes winapi = ctypes.windll.kernel32 SetErrorMode = winapi.SetErrorMode @@ -44,4 +43,4 @@ old_mode = SetErrorMode(flags) SetErrorMode(old_mode | flags) - sys.exit(pytest.main(plugins=[pytest_cov])) + sys.exit(pytest.main()) diff --git a/pytest_cov.py b/pytest_cov.py deleted file mode 100644 --- a/pytest_cov.py +++ /dev/null @@ -1,353 +0,0 @@ -"""produce code coverage reports using the 'coverage' package, including support for distributed testing. - -This plugin produces coverage reports. It supports centralised testing and distributed testing in -both load and each modes. It also supports coverage of subprocesses. - -All features offered by the coverage package should be available, either through pytest-cov or -through coverage's config file. - - -Installation ------------- - -The `pytest-cov`_ package may be installed with pip or easy_install:: - - pip install pytest-cov - easy_install pytest-cov - -.. _`pytest-cov`: http://pypi.python.org/pypi/pytest-cov/ - - -Uninstallation --------------- - -Uninstalling packages is supported by pip:: - - pip uninstall pytest-cov - -However easy_install does not provide an uninstall facility. - -.. IMPORTANT:: - - Ensure that you manually delete the init_cov_core.pth file in your site-packages directory. - - This file starts coverage collection of subprocesses if appropriate during site initialisation - at python startup. - - -Usage ------ - -Centralised Testing -~~~~~~~~~~~~~~~~~~~ - -Centralised testing will report on the combined coverage of the main process and all of it's -subprocesses. - -Running centralised testing:: - - py.test --cov myproj tests/ - -Shows a terminal report:: - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Distributed Testing: Load -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Distributed testing with dist mode set to load will report on the combined coverage of all slaves. -The slaves may be spread out over any number of hosts and each slave may be located anywhere on the -file system. Each slave will have it's subprocesses measured. - -Running distributed testing with dist mode set to load:: - - py.test --cov myproj -n 2 tests/ - -Shows a terminal report:: - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Again but spread over different hosts and different directories:: - - py.test --cov myproj --dist load - --tx ssh=memedough at host1//chdir=testenv1 - --tx ssh=memedough at host2//chdir=/tmp/testenv2//python=/tmp/env1/bin/python - --rsyncdir myproj --rsyncdir tests --rsync examples - tests/ - -Shows a terminal report:: - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Distributed Testing: Each -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Distributed testing with dist mode set to each will report on the combined coverage of all slaves. -Since each slave is running all tests this allows generating a combined coverage report for multiple -environments. - -Running distributed testing with dist mode set to each:: - - py.test --cov myproj --dist each - --tx popen//chdir=/tmp/testenv3//python=/usr/local/python27/bin/python - --tx ssh=memedough at host2//chdir=/tmp/testenv4//python=/tmp/env2/bin/python - --rsyncdir myproj --rsyncdir tests --rsync examples - tests/ - -Shows a terminal report:: - - ---------------------------------------- coverage ---------------------------------------- - platform linux2, python 2.6.5-final-0 - platform linux2, python 2.7.0-final-0 - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Reporting ---------- - -It is possible to generate any combination of the reports for a single test run. - -The available reports are terminal (with or without missing line numbers shown), HTML, XML and -annotated source code. - -The terminal report without line numbers (default):: - - py.test --cov-report term --cov myproj tests/ - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -The terminal report with line numbers:: - - py.test --cov-report term-missing --cov myproj tests/ - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover Missing - -------------------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% 24-26, 99, 149, 233-236, 297-298, 369-370 - myproj/feature4286 94 7 92% 183-188, 197 - -------------------------------------------------- - TOTAL 353 20 94% - - -The remaining three reports output to files without showing anything on the terminal (useful for -when the output is going to a continuous integration server):: - - py.test --cov-report html - --cov-report xml - --cov-report annotate - --cov myproj tests/ - - -Coverage Data File ------------------- - -The data file is erased at the beginning of testing to ensure clean data for each test run. - -The data file is left at the end of testing so that it is possible to use normal coverage tools to -examine it. - - -Coverage Config File --------------------- - -This plugin provides a clean minimal set of command line options that are added to pytest. For -further control of coverage use a coverage config file. - -For example if tests are contained within the directory tree being measured the tests may be -excluded if desired by using a .coveragerc file with the omit option set:: - - py.test --cov-config .coveragerc - --cov myproj - myproj/tests/ - -Where the .coveragerc file contains file globs:: - - [run] - omit = tests/* - -For full details refer to the `coverage config file`_ documentation. - -.. _`coverage config file`: http://nedbatchelder.com/code/coverage/config.html - -Note that this plugin controls some options and setting the option in the config file will have no -effect. These include specifying source to be measured (source option) and all data file handling -(data_file and parallel options). - - -Limitations ------------ - -For distributed testing the slaves must have the pytest-cov package installed. This is needed since -the plugin must be registered through setuptools / distribute for pytest to start the plugin on the -slave. - -For subprocess measurement environment variables must make it from the main process to the -subprocess. The python used by the subprocess must have pytest-cov installed. The subprocess must -do normal site initialisation so that the environment variables can be detected and coverage -started. - - -Acknowledgements ----------------- - -Whilst this plugin has been built fresh from the ground up it has been influenced by the work done -on pytest-coverage (Ross Lawley, James Mills, Holger Krekel) and nose-cover (Jason Pellerin) which are -other coverage plugins. - -Ned Batchelder for coverage and its ability to combine the coverage results of parallel runs. - -Holger Krekel for pytest with its distributed testing support. - -Jason Pellerin for nose. - -Michael Foord for unittest2. - -No doubt others have contributed to these tools as well. -""" - - -def pytest_addoption(parser): - """Add options to control coverage.""" - - group = parser.getgroup('coverage reporting with distributed testing support') - group.addoption('--cov', action='append', default=[], metavar='path', - dest='cov_source', - help='measure coverage for filesystem path (multi-allowed)') - group.addoption('--cov-report', action='append', default=[], metavar='type', - choices=['term', 'term-missing', 'annotate', 'html', 'xml'], - dest='cov_report', - help='type of report to generate: term, term-missing, annotate, html, xml (multi-allowed)') - group.addoption('--cov-config', action='store', default='.coveragerc', metavar='path', - dest='cov_config', - help='config file for coverage, default: .coveragerc') - - -def pytest_configure(config): - """Activate coverage plugin if appropriate.""" - - if config.getvalue('cov_source'): - config.pluginmanager.register(CovPlugin(), '_cov') - - -class CovPlugin(object): - """Use coverage package to produce code coverage reports. - - Delegates all work to a particular implementation based on whether - this test process is centralised, a distributed master or a - distributed slave. - """ - - def __init__(self): - """Creates a coverage pytest plugin. - - We read the rc file that coverage uses to get the data file - name. This is needed since we give coverage through it's API - the data file name. - """ - - # Our implementation is unknown at this time. - self.cov_controller = None - - def pytest_sessionstart(self, session): - """At session start determine our implementation and delegate to it.""" - - import cov_core - - cov_source = session.config.getvalue('cov_source') - cov_report = session.config.getvalue('cov_report') or ['term'] - cov_config = session.config.getvalue('cov_config') - - session_name = session.__class__.__name__ - is_master = (session.config.pluginmanager.hasplugin('dsession') or - session_name == 'DSession') - is_slave = (hasattr(session.config, 'slaveinput') or - session_name == 'SlaveSession') - nodeid = None - - if is_master: - controller_cls = cov_core.DistMaster - elif is_slave: - controller_cls = cov_core.DistSlave - nodeid = session.config.slaveinput.get('slaveid', getattr(session, 'nodeid')) - else: - controller_cls = cov_core.Central - - self.cov_controller = controller_cls(cov_source, - cov_report, - cov_config, - session.config, - nodeid) - - self.cov_controller.start() - - def pytest_configure_node(self, node): - """Delegate to our implementation.""" - - self.cov_controller.configure_node(node) - pytest_configure_node.optionalhook = True - - def pytest_testnodedown(self, node, error): - """Delegate to our implementation.""" - - self.cov_controller.testnodedown(node, error) - pytest_testnodedown.optionalhook = True - - def pytest_sessionfinish(self, session, exitstatus): - """Delegate to our implementation.""" - - self.cov_controller.finish() - - def pytest_terminal_summary(self, terminalreporter): - """Delegate to our implementation.""" - - self.cov_controller.summary(terminalreporter._tw) - - -def pytest_funcarg__cov(request): - """A pytest funcarg that provides access to the underlying coverage object.""" - - # Check with hasplugin to avoid getplugin exception in older pytest. - if request.config.pluginmanager.hasplugin('_cov'): - plugin = request.config.pluginmanager.getplugin('_cov') - if plugin.cov_controller: - return plugin.cov_controller.cov - return None diff --git a/rpython/annotator/builtin.py b/rpython/annotator/builtin.py --- a/rpython/annotator/builtin.py +++ b/rpython/annotator/builtin.py @@ -39,8 +39,9 @@ return s_result s_realresult = immutablevalue(realresult) if not s_result.contains(s_realresult): - raise Exception("%s%r returned %r, which is not contained in %s" % ( - func, args, realresult, s_result)) + raise AnnotatorError( + "%s%r returned %r, which is not contained in %s" % ( + func, args, realresult, s_result)) return s_realresult # ____________________________________________________________ @@ -56,14 +57,14 @@ s_start, s_stop = args[:2] s_step = args[2] else: - raise Exception("range() takes 1 to 3 arguments") + raise AnnotatorError("range() takes 1 to 3 arguments") empty = False # so far if not s_step.is_constant(): step = 0 # this case signals a variable step else: step = s_step.const if step == 0: - raise Exception("range() with step zero") + raise AnnotatorError("range() with step zero") if s_start.is_constant() and s_stop.is_constant(): try: if len(xrange(s_start.const, s_stop.const, step)) == 0: @@ -285,7 +286,8 @@ else: @analyzer_for(unicodedata.decimal) def unicodedata_decimal(s_uchr): - raise TypeError("unicodedate.decimal() calls should not happen at interp-level") + raise AnnotatorError( + "unicodedate.decimal() calls should not happen at interp-level") @analyzer_for(OrderedDict) def analyze(): @@ -299,9 +301,9 @@ @analyzer_for(weakref.ref) def weakref_ref(s_obj): if not isinstance(s_obj, SomeInstance): - raise Exception("cannot take a weakref to %r" % (s_obj,)) + raise AnnotatorError("cannot take a weakref to %r" % (s_obj,)) if s_obj.can_be_None: - raise Exception("should assert that the instance we take " + raise AnnotatorError("should assert that the instance we take " "a weakref to cannot be None") return SomeWeakRef(s_obj.classdef) @@ -311,3 +313,14 @@ @analyzer_for(rpython.rlib.objectmodel.free_non_gc_object) def robjmodel_free_non_gc_object(obj): pass + +#________________________________ +# pdb + +import pdb + + at analyzer_for(pdb.set_trace) +def pdb_set_trace(*args_s): + raise AnnotatorError( + "you left pdb.set_trace() in your interpreter! " + "If you want to attach a gdb instead, call rlib.debug.attach_gdb()") diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -0,0 +1,90 @@ + +import os, py +from rpython.jit.backend.test.support import CCompiledMixin +from rpython.rlib.jit import JitDriver +from rpython.tool.udir import udir +from rpython.translator.translator import TranslationContext +from rpython.jit.backend.detect_cpu import getcpuclass + +class CompiledVmprofTest(CCompiledMixin): + CPUClass = getcpuclass() + + def setup(self): + if self.CPUClass.backend_name != 'x86_64': + py.test.skip("vmprof only supports x86-64 CPUs at the moment") + + def _get_TranslationContext(self): + t = TranslationContext() + t.config.translation.gc = 'incminimark' + t.config.translation.list_comprehension_operations = True + return t + + def test_vmprof(self): + from rpython.rlib import rvmprof + + class MyCode: + _vmprof_unique_id = 0 + def __init__(self, name): + self.name = name + + def get_name(code): + return code.name + + code2 = MyCode("py:y:foo:4") + rvmprof.register_code(code2, get_name) + + try: + rvmprof.register_code_object_class(MyCode, get_name) + except rvmprof.VMProfPlatformUnsupported, e: + py.test.skip(str(e)) + + def get_unique_id(code): + return rvmprof.get_unique_id(code) + + driver = JitDriver(greens = ['code'], reds = ['i', 's', 'num'], + is_recursive=True, get_unique_id=get_unique_id) + + @rvmprof.vmprof_execute_code("xcode13", lambda code, num: code) + def main(code, num): + return main_jitted(code, num) + + def main_jitted(code, num): + s = 0 + i = 0 + while i < num: + driver.jit_merge_point(code=code, i=i, s=s, num=num) + s += (i << 1) + if i % 3 == 0 and code is not code2: + main(code2, 100) + i += 1 + return s + + tmpfilename = str(udir.join('test_rvmprof')) + + def f(num): + code = MyCode("py:x:foo:3") + rvmprof.register_code(code, get_name) + fd = os.open(tmpfilename, os.O_WRONLY | os.O_CREAT, 0666) + period = 0.0001 + rvmprof.enable(fd, period) + res = main(code, num) + #assert res == 499999500000 + rvmprof.disable() + os.close(fd) + return 0 + + def check_vmprof_output(): + from vmprof import read_profile + tmpfile = str(udir.join('test_rvmprof')) + stats = read_profile(tmpfile) + t = stats.get_tree() + assert t.name == 'py:x:foo:3' + assert len(t.children) == 1 # jit + + self.meta_interp(f, [1000000], inline=True) + try: + import vmprof + except ImportError: + pass + else: + check_vmprof_output() diff --git a/rpython/jit/backend/test/test_rvmprof.py b/rpython/jit/backend/test/test_rvmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/test/test_rvmprof.py @@ -0,0 +1,49 @@ +import py +from rpython.rlib import jit +from rpython.rtyper.annlowlevel import llhelper +from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.rlib.rvmprof import cintf +from rpython.jit.backend.x86.arch import WORD +from rpython.jit.codewriter.policy import JitPolicy + +class BaseRVMProfTest(object): + def test_one(self): + py.test.skip("needs thread-locals in the JIT, which is only available " + "after translation") + visited = [] + + def helper(): + stack = cintf.vmprof_tl_stack.getraw() + if stack: + # not during tracing + visited.append(stack.c_value) + else: + visited.append(0) + + llfn = llhelper(lltype.Ptr(lltype.FuncType([], lltype.Void)), helper) + + driver = jit.JitDriver(greens=[], reds='auto') + + def f(n): + i = 0 + while i < n: + driver.jit_merge_point() + i += 1 + llfn() + + class Hooks(jit.JitHookInterface): + def after_compile(self, debug_info): + self.raw_start = debug_info.asminfo.rawstart + + hooks = Hooks() + + null = lltype.nullptr(cintf.VMPROFSTACK) + cintf.vmprof_tl_stack.setraw(null) # make it empty + self.meta_interp(f, [10], policy=JitPolicy(hooks)) + v = set(visited) + assert 0 in v + v.remove(0) + assert len(v) == 1 + assert 0 <= list(v)[0] - hooks.raw_start <= 10*1024 + assert cintf.vmprof_tl_stack.getraw() == null + # ^^^ make sure we didn't leave anything dangling diff --git a/rpython/jit/backend/x86/arch.py b/rpython/jit/backend/x86/arch.py --- a/rpython/jit/backend/x86/arch.py +++ b/rpython/jit/backend/x86/arch.py @@ -31,7 +31,7 @@ if WORD == 4: # ebp + ebx + esi + edi + 15 extra words = 19 words - FRAME_FIXED_SIZE = 19 + FRAME_FIXED_SIZE = 19 + 4 # 4 for vmprof, XXX make more compact! PASS_ON_MY_FRAME = 15 JITFRAME_FIXED_SIZE = 6 + 8 * 2 # 6 GPR + 8 XMM * 2 WORDS/float # 'threadlocal_addr' is passed as 2nd argument on the stack, @@ -41,7 +41,7 @@ THREADLOCAL_OFS = (FRAME_FIXED_SIZE + 2) * WORD else: # rbp + rbx + r12 + r13 + r14 + r15 + threadlocal + 12 extra words = 19 - FRAME_FIXED_SIZE = 19 + FRAME_FIXED_SIZE = 19 + 4 # 4 for vmprof, XXX make more compact! PASS_ON_MY_FRAME = 12 JITFRAME_FIXED_SIZE = 28 # 13 GPR + 15 XMM # 'threadlocal_addr' is passed as 2nd argument in %esi, diff --git a/rpython/jit/backend/x86/assembler.py b/rpython/jit/backend/x86/assembler.py --- a/rpython/jit/backend/x86/assembler.py +++ b/rpython/jit/backend/x86/assembler.py @@ -12,7 +12,7 @@ from rpython.jit.metainterp.compile import ResumeGuardDescr from rpython.rtyper.lltypesystem import lltype, rffi, rstr, llmemory from rpython.rtyper.lltypesystem.lloperation import llop -from rpython.rtyper.annlowlevel import llhelper, cast_instance_to_gcref +from rpython.rtyper.annlowlevel import cast_instance_to_gcref from rpython.rtyper import rclass from rpython.rlib.jit import AsmInfo from rpython.jit.backend.model import CompiledLoopToken @@ -837,11 +837,56 @@ frame_depth = max(frame_depth, target_frame_depth) return frame_depth + def _call_header_vmprof(self): + from rpython.rlib.rvmprof.rvmprof import cintf, VMPROF_JITTED_TAG + + # tloc = address of pypy_threadlocal_s + if IS_X86_32: + # Can't use esi here, its old value is not saved yet. + # But we can use eax and ecx. + self.mc.MOV_rs(edx.value, THREADLOCAL_OFS) + tloc = edx + old = ecx + else: + # The thread-local value is already in esi. + # We should avoid if possible to use ecx or edx because they + # would be used to pass arguments #3 and #4 (even though, so + # far, the assembler only receives two arguments). + tloc = esi + old = r11 + # eax = address in the stack of a 3-words struct vmprof_stack_s + self.mc.LEA_rs(eax.value, (FRAME_FIXED_SIZE - 4) * WORD) + # old = current value of vmprof_tl_stack + offset = cintf.vmprof_tl_stack.getoffset() + self.mc.MOV_rm(old.value, (tloc.value, offset)) + # eax->next = old + self.mc.MOV_mr((eax.value, 0), old.value) + # eax->value = my esp + self.mc.MOV_mr((eax.value, WORD), esp.value) + # eax->kind = VMPROF_JITTED_TAG + self.mc.MOV_mi((eax.value, WORD * 2), VMPROF_JITTED_TAG) + # save in vmprof_tl_stack the new eax + self.mc.MOV_mr((tloc.value, offset), eax.value) + + def _call_footer_vmprof(self): + from rpython.rlib.rvmprof.rvmprof import cintf + # edx = address of pypy_threadlocal_s + self.mc.MOV_rs(edx.value, THREADLOCAL_OFS) + self.mc.AND_ri(edx.value, ~1) + # eax = (our local vmprof_tl_stack).next + self.mc.MOV_rs(eax.value, (FRAME_FIXED_SIZE - 4 + 0) * WORD) + # save in vmprof_tl_stack the value eax + offset = cintf.vmprof_tl_stack.getoffset() + self.mc.MOV_mr((edx.value, offset), eax.value) + def _call_header(self): self.mc.SUB_ri(esp.value, FRAME_FIXED_SIZE * WORD) self.mc.MOV_sr(PASS_ON_MY_FRAME * WORD, ebp.value) if IS_X86_64: self.mc.MOV_sr(THREADLOCAL_OFS, esi.value) + if self.cpu.translate_support_code: + self._call_header_vmprof() # on X86_64, this uses esi + if IS_X86_64: self.mc.MOV_rr(ebp.value, edi.value) else: self.mc.MOV_rs(ebp.value, (FRAME_FIXED_SIZE + 1) * WORD) @@ -873,6 +918,8 @@ def _call_footer(self): # the return value is the jitframe + if self.cpu.translate_support_code: + self._call_footer_vmprof() self.mc.MOV_rr(eax.value, ebp.value) gcrootmap = self.cpu.gc_ll_descr.gcrootmap diff --git a/rpython/jit/backend/x86/test/test_rvmprof.py b/rpython/jit/backend/x86/test/test_rvmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/x86/test/test_rvmprof.py @@ -0,0 +1,7 @@ + +import py +from rpython.jit.backend.test.test_rvmprof import BaseRVMProfTest +from rpython.jit.backend.x86.test.test_basic import Jit386Mixin + +class TestFfiCall(Jit386Mixin, BaseRVMProfTest): + pass \ No newline at end of file diff --git a/rpython/jit/backend/x86/test/test_zrpy_vmprof.py b/rpython/jit/backend/x86/test/test_zrpy_vmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/x86/test/test_zrpy_vmprof.py @@ -0,0 +1,7 @@ + +from rpython.jit.backend.llsupport.test.zrpy_vmprof_test import CompiledVmprofTest + +class TestZVMprof(CompiledVmprofTest): + + gcrootfinder = "shadowstack" + gc = "incminimark" \ No newline at end of file diff --git a/rpython/jit/backend/x86/test/test_zvmprof.py b/rpython/jit/backend/x86/test/test_zvmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/x86/test/test_zvmprof.py @@ -0,0 +1,7 @@ + +from rpython.jit.backend.llsupport.test.zrpy_vmprof_test import CompiledVmprofTest + +class TestZVMprof(CompiledVmprofTest): + + gcrootfinder = "shadowstack" + gc = "incminimark" \ No newline at end of file diff --git a/rpython/jit/codewriter/test/test_jtransform.py b/rpython/jit/codewriter/test/test_jtransform.py --- a/rpython/jit/codewriter/test/test_jtransform.py +++ b/rpython/jit/codewriter/test/test_jtransform.py @@ -1332,7 +1332,7 @@ tlfield = ThreadLocalField(lltype.Signed, 'foobar_test_', loop_invariant=loop_inv) OS_THREADLOCALREF_GET = effectinfo.EffectInfo.OS_THREADLOCALREF_GET - c = const(tlfield.offset) + c = const(tlfield.getoffset()) v = varoftype(lltype.Signed) op = SpaceOperation('threadlocalref_get', [c], v) cc = FakeBuiltinCallControl() diff --git a/rpython/jit/metainterp/quasiimmut.py b/rpython/jit/metainterp/quasiimmut.py --- a/rpython/jit/metainterp/quasiimmut.py +++ b/rpython/jit/metainterp/quasiimmut.py @@ -51,6 +51,7 @@ class QuasiImmut(object): llopaque = True compress_limit = 30 + looptokens_wrefs = None def __init__(self, cpu): self.cpu = cpu @@ -75,7 +76,7 @@ def compress_looptokens_list(self): self.looptokens_wrefs = [wref for wref in self.looptokens_wrefs if wref() is not None] - # NB. we must keep around the looptoken_wrefs that are + # NB. we must keep around the looptokens_wrefs that are # already invalidated; see below self.compress_limit = (len(self.looptokens_wrefs) + 15) * 2 @@ -83,6 +84,9 @@ # When this is called, all the loops that we record become # invalid: all GUARD_NOT_INVALIDATED in these loops (and # in attached bridges) must now fail. + if self.looptokens_wrefs is None: + # can't happen, but helps compiled tests + return wrefs = self.looptokens_wrefs self.looptokens_wrefs = [] for wref in wrefs: diff --git a/rpython/jit/metainterp/test/test_jitdriver.py b/rpython/jit/metainterp/test/test_jitdriver.py --- a/rpython/jit/metainterp/test/test_jitdriver.py +++ b/rpython/jit/metainterp/test/test_jitdriver.py @@ -193,7 +193,7 @@ return pc + 1 driver = JitDriver(greens=["pc"], reds='auto', - get_unique_id=get_unique_id) + get_unique_id=get_unique_id, is_recursive=True) def f(arg): i = 0 diff --git a/rpython/jit/metainterp/test/test_recursive.py b/rpython/jit/metainterp/test/test_recursive.py --- a/rpython/jit/metainterp/test/test_recursive.py +++ b/rpython/jit/metainterp/test/test_recursive.py @@ -1312,7 +1312,7 @@ return (code + 1) * 2 driver = JitDriver(greens=["pc", "code"], reds='auto', - get_unique_id=get_unique_id) + get_unique_id=get_unique_id, is_recursive=True) def f(pc, code): i = 0 diff --git a/rpython/rlib/debug.py b/rpython/rlib/debug.py --- a/rpython/rlib/debug.py +++ b/rpython/rlib/debug.py @@ -1,76 +1,41 @@ -import sys, time +import sys +import time + from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rlib.objectmodel import we_are_translated from rpython.rlib.rarithmetic import is_valid_int -from rpython.rtyper.extfunc import ExtFuncEntry +from rpython.rtyper.extfunc import register_external from rpython.rtyper.lltypesystem import lltype +from rpython.rtyper.lltypesystem import rffi from rpython.translator.tool.cbuild import ExternalCompilationInfo - -def ll_assert(x, msg): - """After translation to C, this becomes an RPyAssert.""" - assert type(x) is bool, "bad type! got %r" % (type(x),) - assert x, msg - -class Entry(ExtRegistryEntry): - _about_ = ll_assert - - def compute_result_annotation(self, s_x, s_msg): - assert s_msg.is_constant(), ("ll_assert(x, msg): " - "the msg must be constant") - return None - - def specialize_call(self, hop): - vlist = hop.inputargs(lltype.Bool, lltype.Void) - hop.exception_cannot_occur() - hop.genop('debug_assert', vlist) From pypy.commits at gmail.com Sun Feb 7 13:41:28 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 07 Feb 2016 10:41:28 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Create rtyper.backend and use it to choose the implementation when using register_external() Message-ID: <56b78fd8.8e301c0a.b4751.fffff05f@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82108:cea1893831f4 Date: 2016-02-07 18:40 +0000 http://bitbucket.org/pypy/pypy/changeset/cea1893831f4/ Log: Create rtyper.backend and use it to choose the implementation when using register_external() diff --git a/rpython/memory/test/test_transformed_gc.py b/rpython/memory/test/test_transformed_gc.py --- a/rpython/memory/test/test_transformed_gc.py +++ b/rpython/memory/test/test_transformed_gc.py @@ -14,6 +14,7 @@ from rpython.conftest import option from rpython.rlib.rstring import StringBuilder from rpython.rlib.rarithmetic import LONG_BIT +from rpython.rtyper.rtyper import llinterp_backend WORD = LONG_BIT // 8 @@ -29,9 +30,11 @@ t.config.set(**extraconfigopts) ann = t.buildannotator() ann.build_types(func, inputtypes) + rtyper = t.buildrtyper() + rtyper.backend = llinterp_backend if specialize: - t.buildrtyper().specialize() + rtyper.specialize() if backendopt: from rpython.translator.backendopt.all import backend_optimizations backend_optimizations(t) diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -1,3 +1,4 @@ +from rpython.tool.sourcetools import func_with_new_name from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rtyper.lltypesystem.lltype import typeOf, FuncType, functionptr, _ptr from rpython.annotator.model import unionof @@ -33,6 +34,7 @@ return self.signature_result def specialize_call(self, hop): + from rpython.rtyper.rtyper import llinterp_backend rtyper = hop.rtyper signature_args = self.normalize_args(*hop.args_s) args_r = [rtyper.getrepr(s_arg) for s_arg in signature_args] @@ -49,14 +51,18 @@ if isinstance(impl, _ptr): obj = impl else: - # store some attributes to the 'impl' function, where - # the eventual call to rtyper.getcallable() will find them - # and transfer them to the final lltype.functionptr(). - impl._llfnobjattrs_ = {'_name': self.name} - if hasattr(self, 'lltypefakeimpl'): - impl._llfnobjattrs_['_fakeimpl'] = fakeimpl - obj = rtyper.getannmixlevel().delayedfunction( - impl, signature_args, hop.s_result) + if hasattr(self, 'lltypefakeimpl') and rtyper.backend is llinterp_backend: + FT = FuncType(args_ll, ll_result) + obj = functionptr(FT, name, _external_name=self.name, + _callable=fakeimpl, + _safe_not_sandboxed=self.safe_not_sandboxed) + else: + # store some attributes to the 'impl' function, where + # the eventual call to rtyper.getcallable() will find them + # and transfer them to the final lltype.functionptr(). + impl._llfnobjattrs_ = {'_name': self.name} + obj = rtyper.getannmixlevel().delayedfunction( + impl, signature_args, hop.s_result) else: FT = FuncType(args_ll, ll_result) obj = functionptr(FT, name, _external_name=self.name, diff --git a/rpython/rtyper/rtyper.py b/rpython/rtyper/rtyper.py --- a/rpython/rtyper/rtyper.py +++ b/rpython/rtyper/rtyper.py @@ -32,11 +32,24 @@ from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline +class RTyperBackend(object): + pass + +class GenCBackend(RTyperBackend): + pass +genc_backend = GenCBackend() + +class LLInterpBackend(RTyperBackend): + pass +llinterp_backend = LLInterpBackend() + + class RPythonTyper(object): from rpython.rtyper.rmodel import log - def __init__(self, annotator): + def __init__(self, annotator, backend=genc_backend): self.annotator = annotator + self.backend = backend self.lowlevel_ann_policy = LowLevelAnnotatorPolicy(self) self.reprs = {} self._reprs_must_call_setup = [] diff --git a/rpython/rtyper/test/test_llinterp.py b/rpython/rtyper/test/test_llinterp.py --- a/rpython/rtyper/test/test_llinterp.py +++ b/rpython/rtyper/test/test_llinterp.py @@ -13,7 +13,7 @@ from rpython.rlib.rarithmetic import r_uint, ovfcheck from rpython.tool import leakfinder from rpython.conftest import option - +from rpython.rtyper.rtyper import llinterp_backend # switch on logging of interp to show more info on failing tests @@ -39,6 +39,7 @@ t.view() global typer # we need it for find_exception typer = t.buildrtyper() + typer.backend = llinterp_backend typer.specialize() #t.view() t.checkgraphs() diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -913,6 +913,7 @@ return [] def new_funcnode(db, T, obj, forcename=None): + from rpython.rtyper.rtyper import llinterp_backend if db.sandbox: if (getattr(obj, 'external', None) is not None and not obj._safe_not_sandboxed): @@ -934,6 +935,9 @@ return ExternalFuncNode(db, T, obj, name) elif hasattr(obj._callable, "c_name"): return ExternalFuncNode(db, T, obj, name) # this case should only be used for entrypoints + elif db.translator.rtyper.backend is llinterp_backend: + # on llinterp, anything goes + return ExternalFuncNode(db, T, obj, name) else: raise ValueError("don't know how to generate code for %r" % (obj,)) From pypy.commits at gmail.com Sun Feb 7 13:41:26 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 07 Feb 2016 10:41:26 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Extract some code out of ExtFuncEntry Message-ID: <56b78fd6.01cdc20a.19169.3f37@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82107:ad357db1fd3f Date: 2016-02-07 02:01 +0000 http://bitbucket.org/pypy/pypy/changeset/ad357db1fd3f/ Log: Extract some code out of ExtFuncEntry diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -44,12 +44,8 @@ impl = getattr(self, 'lltypeimpl', None) fakeimpl = getattr(self, 'lltypefakeimpl', self.instance) if impl: - if (rtyper.annotator.translator.config.translation.sandbox - and not self.safe_not_sandboxed): - from rpython.translator.sandbox.rsandbox import ( - make_sandbox_trampoline) - impl = make_sandbox_trampoline( - self.name, signature_args, s_result) + impl = make_impl(rtyper, impl, self.safe_not_sandboxed, self.name, + signature_args, s_result) if isinstance(impl, _ptr): obj = impl else: @@ -70,6 +66,13 @@ hop.exception_is_here() return hop.genop('direct_call', vlist, r_result) +def make_impl(rtyper, impl, sandboxsafe, name, args_s, s_result): + if (rtyper.annotator.translator.config.translation.sandbox + and not sandboxsafe): + from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline + impl = make_sandbox_trampoline(name, args_s, s_result) + return impl + def register_external(function, args, result=None, export_name=None, llimpl=None, llfakeimpl=None, sandboxsafe=False): """ From pypy.commits at gmail.com Sun Feb 7 15:22:59 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 07 Feb 2016 12:22:59 -0800 (PST) Subject: [pypy-commit] pypy llimpl: fix gctransform tests Message-ID: <56b7a7a3.c8921c0a.1e5c9.166f@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82109:d6f84983b21e Date: 2016-02-07 20:21 +0000 http://bitbucket.org/pypy/pypy/changeset/d6f84983b21e/ Log: fix gctransform tests diff --git a/rpython/memory/gctransform/test/test_transform.py b/rpython/memory/gctransform/test/test_transform.py --- a/rpython/memory/gctransform/test/test_transform.py +++ b/rpython/memory/gctransform/test/test_transform.py @@ -5,6 +5,7 @@ from rpython.translator.exceptiontransform import ExceptionTransformer from rpython.rtyper.lltypesystem import lltype from rpython.conftest import option +from rpython.rtyper.rtyper import llinterp_backend class LLInterpedTranformerTests: @@ -131,8 +132,10 @@ def rtype(func, inputtypes, specialize=True): t = TranslationContext() t.buildannotator().build_types(func, inputtypes) + rtyper = t.buildrtyper() + rtyper.backend = llinterp_backend if specialize: - t.buildrtyper().specialize() + rtyper.specialize() if option.view: t.view() return t From pypy.commits at gmail.com Sun Feb 7 15:43:35 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 07 Feb 2016 12:43:35 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Don't hack the funcptr in register_external() Message-ID: <56b7ac77.cf821c0a.e346a.1db7@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82110:5c4c76a4b9c4 Date: 2016-02-07 20:42 +0000 http://bitbucket.org/pypy/pypy/changeset/5c4c76a4b9c4/ Log: Don't hack the funcptr in register_external() diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -48,21 +48,19 @@ if impl: impl = make_impl(rtyper, impl, self.safe_not_sandboxed, self.name, signature_args, s_result) - if isinstance(impl, _ptr): + if hasattr(self, 'lltypefakeimpl') and rtyper.backend is llinterp_backend: + FT = FuncType(args_ll, ll_result) + obj = functionptr(FT, name, _external_name=self.name, + _callable=fakeimpl) + elif isinstance(impl, _ptr): obj = impl else: - if hasattr(self, 'lltypefakeimpl') and rtyper.backend is llinterp_backend: - FT = FuncType(args_ll, ll_result) - obj = functionptr(FT, name, _external_name=self.name, - _callable=fakeimpl, - _safe_not_sandboxed=self.safe_not_sandboxed) - else: - # store some attributes to the 'impl' function, where - # the eventual call to rtyper.getcallable() will find them - # and transfer them to the final lltype.functionptr(). - impl._llfnobjattrs_ = {'_name': self.name} - obj = rtyper.getannmixlevel().delayedfunction( - impl, signature_args, hop.s_result) + # store some attributes to the 'impl' function, where + # the eventual call to rtyper.getcallable() will find them + # and transfer them to the final lltype.functionptr(). + impl._llfnobjattrs_ = {'_name': self.name} + obj = rtyper.getannmixlevel().delayedfunction( + impl, signature_args, hop.s_result) else: FT = FuncType(args_ll, ll_result) obj = functionptr(FT, name, _external_name=self.name, @@ -94,10 +92,6 @@ if export_name is None: export_name = function.__name__ - if isinstance(llimpl, _ptr) and llfakeimpl: - llimpl._obj.__dict__['_fakeimpl'] = llfakeimpl - llfakeimpl = None - class FunEntry(ExtFuncEntry): _about_ = function safe_not_sandboxed = sandboxsafe From pypy.commits at gmail.com Sun Feb 7 21:37:24 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 07 Feb 2016 18:37:24 -0800 (PST) Subject: [pypy-commit] pypy llimpl: '_fakeimpl' is not used any more Message-ID: <56b7ff64.53ad1c0a.ac3ba.5ca9@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82111:ec8879a186b6 Date: 2016-02-08 02:35 +0000 http://bitbucket.org/pypy/pypy/changeset/ec8879a186b6/ Log: '_fakeimpl' is not used any more diff --git a/rpython/rtyper/llinterp.py b/rpython/rtyper/llinterp.py --- a/rpython/rtyper/llinterp.py +++ b/rpython/rtyper/llinterp.py @@ -667,14 +667,6 @@ return frame.eval() def op_direct_call(self, f, *args): - pythonfunction = getattr(f._obj, '_fakeimpl', None) - if pythonfunction is not None: - try: - return pythonfunction(*args) - except: - self.make_llexception() - return - FTYPE = lltype.typeOf(f).TO return self.perform_call(f, FTYPE.ARGS, args) From pypy.commits at gmail.com Sun Feb 7 21:37:26 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 07 Feb 2016 18:37:26 -0800 (PST) Subject: [pypy-commit] pypy llimpl: ExtFuncEntry always has a .name defined Message-ID: <56b7ff66.6953c20a.c061.ffffad6c@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82112:1a459b2a58cb Date: 2016-02-08 02:36 +0000 http://bitbucket.org/pypy/pypy/changeset/1a459b2a58cb/ Log: ExtFuncEntry always has a .name defined diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -17,16 +17,10 @@ for i, expected in enumerate(signature_args): arg = unionof(args_s[i], expected) if not expected.contains(arg): - name = getattr(self, 'name', None) - if not name: - try: - name = self.instance.__name__ - except AttributeError: - name = '?' raise SignatureError("In call to external function %r:\n" "arg %d must be %s,\n" " got %s" % ( - name, i+1, expected, args_s[i])) + self.name, i+1, expected, args_s[i])) return signature_args def compute_result_annotation(self, *args_s): @@ -42,7 +36,6 @@ s_result = hop.s_result r_result = rtyper.getrepr(s_result) ll_result = r_result.lowleveltype - name = getattr(self, 'name', None) or self.instance.__name__ impl = getattr(self, 'lltypeimpl', None) fakeimpl = getattr(self, 'lltypefakeimpl', self.instance) if impl: @@ -50,7 +43,7 @@ signature_args, s_result) if hasattr(self, 'lltypefakeimpl') and rtyper.backend is llinterp_backend: FT = FuncType(args_ll, ll_result) - obj = functionptr(FT, name, _external_name=self.name, + obj = functionptr(FT, self.name, _external_name=self.name, _callable=fakeimpl) elif isinstance(impl, _ptr): obj = impl @@ -63,7 +56,7 @@ impl, signature_args, hop.s_result) else: FT = FuncType(args_ll, ll_result) - obj = functionptr(FT, name, _external_name=self.name, + obj = functionptr(FT, self.name, _external_name=self.name, _callable=fakeimpl, _safe_not_sandboxed=self.safe_not_sandboxed) vlist = [hop.inputconst(typeOf(obj), obj)] + hop.inputargs(*args_r) From pypy.commits at gmail.com Mon Feb 8 02:26:39 2016 From: pypy.commits at gmail.com (plan_rich) Date: Sun, 07 Feb 2016 23:26:39 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: adding signed char to the list of bigendian conversions in rawffi.alt module Message-ID: <56b8432f.8e811c0a.dc917.ffffa2b6@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82113:398668348638 Date: 2016-02-08 08:25 +0100 http://bitbucket.org/pypy/pypy/changeset/398668348638/ Log: adding signed char to the list of bigendian conversions in rawffi.alt module diff --git a/rpython/rlib/libffi.py b/rpython/rlib/libffi.py --- a/rpython/rlib/libffi.py +++ b/rpython/rlib/libffi.py @@ -215,7 +215,7 @@ # ====================================================================== -NARROW_INTEGER_TYPES = unrolling_iterable([rffi.CHAR, +NARROW_INTEGER_TYPES = unrolling_iterable([rffi.CHAR, rffi.SIGNEDCHAR, rffi.UCHAR, rffi.SHORT, rffi.USHORT, rffi.INT, rffi.UINT]) class Func(AbstractFuncPtr): @@ -338,7 +338,7 @@ @jit.dont_look_inside @specialize.arg(3) def _do_call_int(self, funcsym, ll_args, TP): - return self._do_call(funcsym, ll_args, TP) + return rffi.cast(rffi.SIGNED, self._do_call(funcsym, ll_args, TP)) #@jit.oopspec('libffi_call_float(self, funcsym, ll_args)') @jit.dont_look_inside From pypy.commits at gmail.com Mon Feb 8 02:43:54 2016 From: pypy.commits at gmail.com (plan_rich) Date: Sun, 07 Feb 2016 23:43:54 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: LGR should have been AGR Message-ID: <56b8473a.c3e01c0a.be4eb.ffffad2b@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82114:e6ff00efc05f Date: 2016-02-08 08:43 +0100 http://bitbucket.org/pypy/pypy/changeset/e6ff00efc05f/ Log: LGR should have been AGR diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -751,7 +751,7 @@ self._read_typeid(r.SCRATCH2, loc_object) self.mc.load_imm(r.SCRATCH, base_type_info + infobits_offset) assert shift_by == 0 - self.mc.LGR(r.SCRATCH, r.SCRATCH2) + self.mc.AGR(r.SCRATCH, r.SCRATCH2) self.mc.LLGC(r.SCRATCH2, l.addr(0, r.SCRATCH)) # cannot use r.r0 as index reg self.mc.NILL(r.SCRATCH2, l.imm(IS_OBJECT_FLAG & 0xff)) self.guard_success_cc = c.NE From pypy.commits at gmail.com Mon Feb 8 07:40:16 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 08 Feb 2016 04:40:16 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: update the values Message-ID: <56b88cb0.8916c20a.8fa3b.6c53@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r700:dcc5d23c5099 Date: 2016-02-08 13:40 +0100 http://bitbucket.org/pypy/pypy.org/changeset/dcc5d23c5099/ Log: update the values diff --git a/don1.html b/don1.html --- a/don1.html +++ b/don1.html @@ -9,13 +9,13 @@ - $62841 of $105000 (59.8%) + $62850 of $105000 (59.9%)
    @@ -23,7 +23,7 @@
  • diff --git a/don3.html b/don3.html --- a/don3.html +++ b/don3.html @@ -15,7 +15,7 @@ - $53142 of $60000 (88.6%) + $53152 of $60000 (88.6%)
    @@ -23,7 +23,7 @@
  • From pypy.commits at gmail.com Mon Feb 8 11:05:14 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 08 Feb 2016 08:05:14 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Remove undocumented and unused alternative input types for 'args' in register_external() Message-ID: <56b8bcba.42cbc20a.37573.ffffc9d3@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82115:729ec6bd5cd5 Date: 2016-02-08 15:50 +0000 http://bitbucket.org/pypy/pypy/changeset/729ec6bd5cd5/ Log: Remove undocumented and unused alternative input types for 'args' in register_external() diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -1,4 +1,3 @@ -from rpython.tool.sourcetools import func_with_new_name from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rtyper.lltypesystem.lltype import typeOf, FuncType, functionptr, _ptr from rpython.annotator.model import unionof @@ -88,16 +87,7 @@ class FunEntry(ExtFuncEntry): _about_ = function safe_not_sandboxed = sandboxsafe - - if args is None: - def normalize_args(self, *args_s): - return args_s # accept any argument unmodified - elif callable(args): - # custom annotation normalizer (see e.g. os.utime()) - normalize_args = staticmethod(args) - else: # use common case behavior - signature_args = args - + signature_args = args signature_result = annotation(result, None) name = export_name if llimpl: diff --git a/rpython/rtyper/test/test_extfunc.py b/rpython/rtyper/test/test_extfunc.py --- a/rpython/rtyper/test/test_extfunc.py +++ b/rpython/rtyper/test/test_extfunc.py @@ -121,23 +121,6 @@ s = a.build_types(f, []) assert isinstance(s, SomeInteger) - def test_register_external_specialcase(self): - """ - When args=None, the external function accepts any arguments unmodified. - """ - def function_withspecialcase(arg): - return repr(arg) - register_external(function_withspecialcase, args=None, result=str) - - def f(): - x = function_withspecialcase - return x(33) + x("aaa") + x([]) + "\n" - - policy = AnnotatorPolicy() - a = RPythonAnnotator(policy=policy) - s = a.build_types(f, []) - assert isinstance(s, SomeString) - def test_str0(self): str0 = SomeString(no_nul=True) def os_open(s): From pypy.commits at gmail.com Mon Feb 8 12:05:11 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 08 Feb 2016 09:05:11 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Test register_external(), not its internal implementation Message-ID: <56b8cac7.4c0c1c0a.a3bb2.ffffa1ab@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82116:d159c6726d45 Date: 2016-02-08 17:04 +0000 http://bitbucket.org/pypy/pypy/changeset/d159c6726d45/ Log: Test register_external(), not its internal implementation diff --git a/rpython/rtyper/test/test_extfunc.py b/rpython/rtyper/test/test_extfunc.py --- a/rpython/rtyper/test/test_extfunc.py +++ b/rpython/rtyper/test/test_extfunc.py @@ -1,7 +1,6 @@ import py -from rpython.rtyper.extfunc import ExtFuncEntry, register_external,\ - is_external +from rpython.rtyper.extfunc import register_external from rpython.annotator.model import SomeInteger, SomeString, AnnotatorError from rpython.annotator.annrpython import RPythonAnnotator from rpython.annotator.policy import AnnotatorPolicy @@ -19,11 +18,7 @@ "NOT_RPYTHON" return eval("x+40") - class BTestFuncEntry(ExtFuncEntry): - _about_ = b - name = 'b' - signature_args = [SomeInteger()] - signature_result = SomeInteger() + register_external(b, [int], result=int) def f(): return b(2) @@ -43,15 +38,11 @@ def c(y, x): yyy - class CTestFuncEntry(ExtFuncEntry): - _about_ = c - name = 'ccc' - signature_args = [SomeInteger()] * 2 - signature_result = SomeInteger() + def llimpl(y, x): + return y + x - def lltypeimpl(y, x): - return y + x - lltypeimpl = staticmethod(lltypeimpl) + register_external(c, [int, int], result=int, llimpl=llimpl, + export_name='ccc') def f(): return c(3, 4) @@ -59,22 +50,6 @@ res = interpret(f, []) assert res == 7 - def test_register_external_signature(self): - """ - Test the standard interface for external functions. - """ - def dd(): - pass - register_external(dd, [int], int) - - def f(): - return dd(3) - - policy = AnnotatorPolicy() - a = RPythonAnnotator(policy=policy) - s = a.build_types(f, []) - assert isinstance(s, SomeInteger) - def test_register_external_tuple_args(self): """ Verify the annotation of a registered external function which takes a From pypy.commits at gmail.com Mon Feb 8 13:34:24 2016 From: pypy.commits at gmail.com (fijal) Date: Mon, 08 Feb 2016 10:34:24 -0800 (PST) Subject: [pypy-commit] buildbot default: try to make LINUX32 run an hour later Message-ID: <56b8dfb0.8916c20a.8fa3b.03ee@mx.google.com> Author: fijal Branch: Changeset: r982:5ae8ba9b9554 Date: 2016-02-08 19:34 +0100 http://bitbucket.org/pypy/buildbot/changeset/5ae8ba9b9554/ Log: try to make LINUX32 run an hour later diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -270,7 +270,6 @@ Nightly("nightly-0-00", [ # benchmarks # linux tests - LINUX32, # on tannit32, uses all cores LINUX64, # on speed-old, uses all cores JITLINUX32, # on tannit32, uses 1 core JITLINUX64, # on speed-old, uses 1 core @@ -295,6 +294,7 @@ ], branch='s390x-backend', hour=2, minute=0), Nightly("nightly-1-00", [ + LINUX32, # on tannit32, uses all cores JITBENCH, # on tannit32, uses 1 core (in part exclusively) JITBENCH64, # on tannit64, uses 1 core (in part exclusively) JITBENCH64_NEW, # on speed64, uses 1 core (in part exclusively) From pypy.commits at gmail.com Mon Feb 8 13:34:41 2016 From: pypy.commits at gmail.com (fijal) Date: Mon, 08 Feb 2016 10:34:41 -0800 (PST) Subject: [pypy-commit] buildbot default: Backed out changeset 5ae8ba9b9554 Message-ID: <56b8dfc1.418f1c0a.3a711.ffffb7dc@mx.google.com> Author: fijal Branch: Changeset: r983:f76d0f67ea99 Date: 2016-02-08 19:34 +0100 http://bitbucket.org/pypy/buildbot/changeset/f76d0f67ea99/ Log: Backed out changeset 5ae8ba9b9554 diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -270,6 +270,7 @@ Nightly("nightly-0-00", [ # benchmarks # linux tests + LINUX32, # on tannit32, uses all cores LINUX64, # on speed-old, uses all cores JITLINUX32, # on tannit32, uses 1 core JITLINUX64, # on speed-old, uses 1 core @@ -294,7 +295,6 @@ ], branch='s390x-backend', hour=2, minute=0), Nightly("nightly-1-00", [ - LINUX32, # on tannit32, uses all cores JITBENCH, # on tannit32, uses 1 core (in part exclusively) JITBENCH64, # on tannit64, uses 1 core (in part exclusively) JITBENCH64_NEW, # on speed64, uses 1 core (in part exclusively) From pypy.commits at gmail.com Mon Feb 8 13:35:17 2016 From: pypy.commits at gmail.com (fijal) Date: Mon, 08 Feb 2016 10:35:17 -0800 (PST) Subject: [pypy-commit] buildbot default: move LINUX64 to run an hour later Message-ID: <56b8dfe5.05e41c0a.992b1.ffff8011@mx.google.com> Author: fijal Branch: Changeset: r984:d3ca85cd39a5 Date: 2016-02-08 19:35 +0100 http://bitbucket.org/pypy/buildbot/changeset/d3ca85cd39a5/ Log: move LINUX64 to run an hour later diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -271,7 +271,6 @@ # benchmarks # linux tests LINUX32, # on tannit32, uses all cores - LINUX64, # on speed-old, uses all cores JITLINUX32, # on tannit32, uses 1 core JITLINUX64, # on speed-old, uses 1 core #APPLVLLINUX32, # on tannit32, uses 1 core @@ -295,6 +294,7 @@ ], branch='s390x-backend', hour=2, minute=0), Nightly("nightly-1-00", [ + LINUX64, # on speed-old, uses all cores JITBENCH, # on tannit32, uses 1 core (in part exclusively) JITBENCH64, # on tannit64, uses 1 core (in part exclusively) JITBENCH64_NEW, # on speed64, uses 1 core (in part exclusively) From pypy.commits at gmail.com Mon Feb 8 14:55:39 2016 From: pypy.commits at gmail.com (fijal) Date: Mon, 08 Feb 2016 11:55:39 -0800 (PST) Subject: [pypy-commit] pypy default: fix a test Message-ID: <56b8f2bb.0cb81c0a.6083f.ffffed71@mx.google.com> Author: fijal Branch: Changeset: r82117:b88cc59e72a4 Date: 2016-02-08 20:54 +0100 http://bitbucket.org/pypy/pypy/changeset/b88cc59e72a4/ Log: fix a test diff --git a/rpython/tool/jitlogparser/test/logtest2.log b/rpython/tool/jitlogparser/test/logtest2.log --- a/rpython/tool/jitlogparser/test/logtest2.log +++ b/rpython/tool/jitlogparser/test/logtest2.log @@ -139,7 +139,7 @@ debug_merge_point(0, 0, ' #12 LOAD_CONST') +289: guard_value(p4, ConstPtr(ptr22), descr=) [p1, p0, p4, p2, p3, p6, p11, p13, p17] debug_merge_point(0, 0, ' #15 COMPARE_OP') -+308: i23 = getfield_gc_pure_i(p11, descr=) ++308: i23 = getfield_gc_i(p11, descr=) +312: i25 = int_lt(i23, 10) guard_true(i25, descr=) [p1, p0, p11, p2, p3, p6, p13] debug_merge_point(0, 0, ' #18 POP_JUMP_IF_FALSE') @@ -285,9 +285,9 @@ +283: p23 = getfield_gc_r(p21, descr=) +287: guard_class(p23, 26517736, descr=) [p1, p0, p15, i22, p23, p21, p2, p3, p4, i5, p6, p11, p13, p17] +299: p25 = getfield_gc_r(p21, descr=) -+303: i26 = getfield_gc_pure_i(p25, descr=) -+307: i27 = getfield_gc_pure_i(p25, descr=) -+311: i28 = getfield_gc_pure_i(p25, descr=) ++303: i26 = getfield_gc_i(p25, descr=) ++307: i27 = getfield_gc_i(p25, descr=) ++311: i28 = getfield_gc_i(p25, descr=) +315: i30 = int_lt(i22, 0) guard_false(i30, descr=) [p1, p0, p15, i22, i28, i27, i26, p2, p3, p4, i5, p6, p11, p13, p17] +325: i31 = int_ge(i22, i28) From pypy.commits at gmail.com Mon Feb 8 16:04:34 2016 From: pypy.commits at gmail.com (fijal) Date: Mon, 08 Feb 2016 13:04:34 -0800 (PST) Subject: [pypy-commit] pypy default: kill some special code that we're not using any more Message-ID: <56b902e2.e906c20a.52ae6.420a@mx.google.com> Author: fijal Branch: Changeset: r82118:a204ce60d060 Date: 2016-02-08 22:03 +0100 http://bitbucket.org/pypy/pypy/changeset/a204ce60d060/ Log: kill some special code that we're not using any more diff --git a/rpython/rlib/rvmprof/src/vmprof_getpc.h b/rpython/rlib/rvmprof/src/vmprof_getpc.h --- a/rpython/rlib/rvmprof/src/vmprof_getpc.h +++ b/rpython/rlib/rvmprof/src/vmprof_getpc.h @@ -111,47 +111,9 @@ // PC_FROM_UCONTEXT in config.h. The only thing we need to do here, // then, is to do the magic call-unrolling for systems that support it. -// -- Special case 1: linux x86, for which we have CallUnrollInfo #if defined(__linux) && defined(__i386) && defined(__GNUC__) -static const CallUnrollInfo callunrollinfo[] = { - // Entry to a function: push %ebp; mov %esp,%ebp - // Top-of-stack contains the caller IP. - { 0, - {0x55, 0x89, 0xe5}, 3, - 0 - }, - // Entry to a function, second instruction: push %ebp; mov %esp,%ebp - // Top-of-stack contains the old frame, caller IP is +4. - { -1, - {0x55, 0x89, 0xe5}, 3, - 4 - }, - // Return from a function: RET. - // Top-of-stack contains the caller IP. - { 0, - {0xc3}, 1, - 0 - } -}; - intptr_t GetPC(ucontext_t *signal_ucontext) { - // See comment above struct CallUnrollInfo. Only try instruction - // flow matching if both eip and esp looks reasonable. - const int eip = signal_ucontext->uc_mcontext.gregs[REG_EIP]; - const int esp = signal_ucontext->uc_mcontext.gregs[REG_ESP]; - if ((eip & 0xffff0000) != 0 && (~eip & 0xffff0000) != 0 && - (esp & 0xffff0000) != 0) { - char* eip_char = reinterpret_cast(eip); - for (int i = 0; i < sizeof(callunrollinfo)/sizeof(*callunrollinfo); ++i) { - if (!memcmp(eip_char + callunrollinfo[i].pc_offset, - callunrollinfo[i].ins, callunrollinfo[i].ins_size)) { - // We have a match. - intptr_t *retaddr = (intptr_t*)(esp + callunrollinfo[i].return_sp_offset); - return *retaddr; - } - } - } - return eip; + return signal_ucontext->uc_mcontext.gregs[REG_EIP]; } // Special case #2: Windows, which has to do something totally different. From pypy.commits at gmail.com Mon Feb 8 21:14:03 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 08 Feb 2016 18:14:03 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Compute signature eagerly in register_external() Message-ID: <56b94b6b.42711c0a.6a736.4266@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82119:c1b013088ffd Date: 2016-02-09 02:13 +0000 http://bitbucket.org/pypy/pypy/changeset/c1b013088ffd/ Log: Compute signature eagerly in register_external() diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -6,31 +6,29 @@ class ExtFuncEntry(ExtRegistryEntry): safe_not_sandboxed = False - # common case: args is a list of annotation or types - def normalize_args(self, *args_s): - args = self.signature_args - signature_args = [annotation(arg, None) for arg in args] - assert len(args_s) == len(signature_args),\ + def check_args(self, *args_s): + params_s = self.signature_args + assert len(args_s) == len(params_s),\ "Argument number mismatch" - for i, expected in enumerate(signature_args): - arg = unionof(args_s[i], expected) - if not expected.contains(arg): + for i, s_param in enumerate(params_s): + arg = unionof(args_s[i], s_param) + if not s_param.contains(arg): raise SignatureError("In call to external function %r:\n" "arg %d must be %s,\n" " got %s" % ( - self.name, i+1, expected, args_s[i])) - return signature_args + self.name, i+1, s_param, args_s[i])) + return params_s def compute_result_annotation(self, *args_s): - self.normalize_args(*args_s) # check arguments + self.check_args(*args_s) return self.signature_result def specialize_call(self, hop): from rpython.rtyper.rtyper import llinterp_backend rtyper = hop.rtyper - signature_args = self.normalize_args(*hop.args_s) - args_r = [rtyper.getrepr(s_arg) for s_arg in signature_args] + signature_args = self.signature_args + args_r = [rtyper.getrepr(s_arg) for s_arg in self.signature_args] args_ll = [r_arg.lowleveltype for r_arg in args_r] s_result = hop.s_result r_result = rtyper.getrepr(s_result) @@ -83,23 +81,20 @@ if export_name is None: export_name = function.__name__ + params_s = [annotation(arg) for arg in args] + s_result = annotation(result) class FunEntry(ExtFuncEntry): _about_ = function safe_not_sandboxed = sandboxsafe - signature_args = args - signature_result = annotation(result, None) + signature_args = params_s + signature_result = s_result name = export_name if llimpl: lltypeimpl = staticmethod(llimpl) if llfakeimpl: lltypefakeimpl = staticmethod(llfakeimpl) - if export_name: - FunEntry.__name__ = export_name - else: - FunEntry.__name__ = function.func_name - def is_external(func): if hasattr(func, 'value'): func = func.value From pypy.commits at gmail.com Tue Feb 9 03:28:51 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 09 Feb 2016 00:28:51 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: imports, and "oops" Message-ID: <56b9a343.c3e01c0a.be4eb.ffff9a34@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82120:0016cf6f514f Date: 2016-02-09 09:28 +0100 http://bitbucket.org/pypy/pypy/changeset/0016cf6f514f/ Log: imports, and "oops" diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -617,6 +617,7 @@ @specialize.ll() def wrapper(*args): from pypy.module.cpyext.pyobject import make_ref, from_ref, is_pyobj + from pypy.module.cpyext.pyobject import as_pyobj # we hope that malloc removal removes the newtuple() that is # inserted exactly here by the varargs specializer if gil_acquire: @@ -671,7 +672,7 @@ if is_pyobj(result): retval = result else: - if result is None: + if result is not None: if callable.api_func.result_borrowed: retval = as_pyobj(space, result) else: @@ -1262,7 +1263,8 @@ @specialize.memo() def make_generic_cpy_call(FT, expect_null): from pypy.module.cpyext.pyobject import make_ref, from_ref, Py_DecRef - from pypy.module.cpyext.pyobject import RefcountState + from pypy.module.cpyext.pyobject import is_pyobj, as_pyobj + from pypy.module.cpyext.pyobject import get_w_obj_and_decref from pypy.module.cpyext.pyerrors import PyErr_Occurred unrolling_arg_types = unrolling_iterable(enumerate(FT.ARGS)) RESULT_TYPE = FT.RESULT From pypy.commits at gmail.com Tue Feb 9 03:30:30 2016 From: pypy.commits at gmail.com (cfbolz) Date: Tue, 09 Feb 2016 00:30:30 -0800 (PST) Subject: [pypy-commit] pypy statistics-maps: add some code to print statistics about maps Message-ID: <56b9a3a6.0772c20a.50d1d.fffff259@mx.google.com> Author: Carl Friedrich Bolz Branch: statistics-maps Changeset: r82121:8b18aadfc032 Date: 2016-02-09 09:18 +0100 http://bitbucket.org/pypy/pypy/changeset/8b18aadfc032/ Log: add some code to print statistics about maps diff --git a/pypy/bin/pyinteractive.py b/pypy/bin/pyinteractive.py --- a/pypy/bin/pyinteractive.py +++ b/pypy/bin/pyinteractive.py @@ -192,6 +192,8 @@ exit_status = 0 finally: def doit(): + from pypy.objspace.std.mapdict import _print_stats + _print_stats(space) space.finish() main.run_toplevel(space, doit, verbose=interactiveconfig.verbose) diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -69,6 +69,8 @@ debug(" operror-value: " + space.str_w(space.str(e.get_w_value(space)))) return 1 finally: + from pypy.objspace.std.mapdict import _print_stats + _print_stats(space) try: space.finish() except OperationError, e: diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -1,4 +1,4 @@ -import weakref +import weakref, os from rpython.rlib import jit, objectmodel, debug, rerased from rpython.rlib.rarithmetic import intmask, r_uint @@ -22,15 +22,110 @@ # note: we use "x * NUM_DIGITS_POW2" instead of "x << NUM_DIGITS" because # we want to propagate knowledge that the result cannot be negative +class All(object): + def __init__(self): + self.all = [] + + at objectmodel.specialize.argtype(1) +def _print_line(key, value, indent=0): + from pypy.objspace.std.bytesobject import string_escape_encode + if isinstance(value, int): + value = str(value) + else: + value = string_escape_encode(str(value), '"') + return "%s%s: %s," % (" " * (indent * 4), string_escape_encode(key, '"'), value) + +def _print_stats(space): + fn = space.bytes_w(space.getitem(space.getattr(space.sys, space.wrap('argv')), space.wrap(0))) + if fn.endswith(".py"): + end = len(fn) - len('.py') + assert end >= 0 + fn = fn[:end] + elif fn.endswith(".pyc"): + end = len(fn) - len('.pyc') + assert end >= 0 + fn = fn[:end] + if "/" in fn: + index = fn.rfind("/") + assert index >= 0 + fn = fn[index+1:] + f = file("mapstats-%s-%s.txt" % (fn, os.getpid(), ), "w") + try: + f.write("[\n") + for map in AbstractAttribute._all_maps.all: + map.print_counts(f) + f.write("]\n") + finally: + f.close() + class AbstractAttribute(object): _immutable_fields_ = ['terminator'] cache_attrs = None _size_estimate = 0 + _number_instantiated = 0 + _number_unnecessary_writes = 0 + _number_writes = None + _number_reads = None + _number_transitions = None + _all_maps = All() def __init__(self, space, terminator): self.space = space assert isinstance(terminator, Terminator) self.terminator = terminator + self._number_reads = {} + self._number_writes = {} + self._number_transitions = {} + self._all_maps.all.append(self) + + def _count_read(self, name, index): + key = (name, index) + self._number_reads[key] = self._number_reads.get(key, 0) + 1 + + def _count_write(self, name, index, w_value): + key = (name, index, w_value.__class__.__name__) + self._number_writes[key] = self._number_writes.get(key, 0) + 1 + + def _count_transition(self, key): + self._number_transitions[key] = self._number_transitions.get(key, 0) + 1 + + def print_counts(self, f): + lines = ["{"] + lines.append(_print_line('type', self.__class__.__name__, 1)) + lines.append(_print_line('id', str(objectmodel.compute_unique_id(self)), 1)) + lines.append(_print_line('instances', self._number_instantiated, 1)) + if isinstance(self, PlainAttribute): + lines.append(_print_line('back', str(objectmodel.compute_unique_id(self.back)), 1)) + lines.append(_print_line('name', self.name, 1)) + lines.append(_print_line('index', self.index, 1)) + lines.append(_print_line('ever_mutated', self.ever_mutated, 1)) + lines.append(_print_line('can_contain_mutable_cell', self.can_contain_mutable_cell, 1)) + lines.append(_print_line('number_unnecessary_writes', self._number_unnecessary_writes, 1)) + lines.append(_print_line('_hprof_status', str(self._hprof_status), 1)) + if self.class_is_known(): + lines.append(_print_line('_hprof_const_cls', self.read_constant_cls().__name__, 1)) + elif isinstance(self, Terminator): + if self.w_cls is not None: + lines.append(_print_line('w_cls', self.w_cls.name, 1)) + if self._number_reads: + lines.append(' "reads": {') + for key, value in self._number_reads.items(): + lines.append(_print_line(str(key), value, 2)) + lines.append(" },") + if self._number_writes: + lines.append(' "writes": {') + for key, value in self._number_writes.items(): + lines.append(_print_line(str(key), value, 2)) + lines.append(" },") + if self._number_transitions: + lines.append(' "transitions": {') + for key, value in self._number_transitions.items(): + lines.append(_print_line(str(objectmodel.compute_unique_id(key)), value, 2)) + lines.append(" },") + lines.append("},") + lines.append("") + lines.append("") + f.write("\n".join(lines)) def read(self, obj, name, index): from pypy.objspace.std.intobject import W_IntObject @@ -38,6 +133,7 @@ attr = self.find_map_attr(name, index) if attr is None: return self.terminator._read_terminator(obj, name, index) + attr._count_read(name, index) # XXX move to PlainAttribute? if jit.we_are_jitted(): if attr.can_fold_read_int(): @@ -74,11 +170,13 @@ attr = self.find_map_attr(name, index) if attr is None: return self.terminator._write_terminator(obj, name, index, w_value) + attr._count_write(name, index, w_value) # if the write is not necessary, the storage is already filled from the # time we did the map transition. Therefore, if the value profiler says # so, we can not do the write write_necessary = attr.write_necessary(w_value) if not write_necessary: + self._number_unnecessary_writes += 1 return True if not attr.ever_mutated: attr.ever_mutated = True @@ -206,6 +304,7 @@ def add_attr(self, obj, name, index, w_value): # grumble, jit needs this attr = self._get_new_attr(name, index) + attr._count_write(name, index, w_value) oldattr = obj._get_mapdict_map() if not jit.we_are_jitted(): size_est = (oldattr._size_estimate + attr.size_estimate() @@ -501,18 +600,24 @@ def _get_mapdict_map(self): return jit.promote(self.map) def _set_mapdict_map(self, map): + old = self.map + if old is not map and map: + old._count_transition(map) self.map = map # _____________________________________________ # objspace interface def getdictvalue(self, space, attrname): - return self._get_mapdict_map().read(self, attrname, DICT) + map = self._get_mapdict_map() + return map.read(self, attrname, DICT) def setdictvalue(self, space, attrname, w_value): - return self._get_mapdict_map().write(self, attrname, DICT, w_value) + map = self._get_mapdict_map() + return map.write(self, attrname, DICT, w_value) def deldictvalue(self, space, attrname): - new_obj = self._get_mapdict_map().delete(self, attrname, DICT) + map = self._get_mapdict_map() + new_obj = map.delete(self, attrname, DICT) if new_obj is None: return False self._become(new_obj) @@ -558,6 +663,7 @@ self.space = space assert (not self.typedef.hasdict or self.typedef is W_InstanceObject.typedef) + w_subtype.terminator._number_instantiated += 1 self._init_empty(w_subtype.terminator) def getslotvalue(self, slotindex): @@ -615,7 +721,7 @@ return len(self.storage) def _set_mapdict_storage_and_map(self, storage, map): self.storage = storage - self.map = map + self._set_mapdict_map(map) class Object(ObjectMixin, BaseMapdictObject, W_Root): pass # mainly for tests @@ -705,7 +811,7 @@ return n def _set_mapdict_storage_and_map(self, storage, map): - self.map = map + self._set_mapdict_map(map) len_storage = len(storage) for i in rangenmin1: if i < len_storage: @@ -980,7 +1086,7 @@ # used if we_are_jitted(). entry = pycode._mapdict_caches[nameindex] map = w_obj._get_mapdict_map() - if entry.is_valid_for_map(map) and entry.w_method is None: + if False: #entry.is_valid_for_map(map) and entry.w_method is None: # everything matches, it's incredibly fast return unwrap_cell( map.space, w_obj._mapdict_read_storage(entry.storageindex)) @@ -1026,6 +1132,7 @@ if index != INVALID: attr = map.find_map_attr(attrname, index) if attr is not None: + attr._count_read(attrname, index) # Note that if map.terminator is a DevolvedDictTerminator, # map.find_map_attr will always return None if index==DICT. _fill_cache(pycode, nameindex, map, version_tag, attr.storageindex) From pypy.commits at gmail.com Tue Feb 9 03:30:32 2016 From: pypy.commits at gmail.com (cfbolz) Date: Tue, 09 Feb 2016 00:30:32 -0800 (PST) Subject: [pypy-commit] pypy statistics-maps: a script to turn map stats files into dot files Message-ID: <56b9a3a8.463f1c0a.5880a.ffff96ac@mx.google.com> Author: Carl Friedrich Bolz Branch: statistics-maps Changeset: r82122:84c6a5b5979f Date: 2016-02-09 09:28 +0100 http://bitbucket.org/pypy/pypy/changeset/84c6a5b5979f/ Log: a script to turn map stats files into dot files diff --git a/pypy/tool/mapstatsdot.py b/pypy/tool/mapstatsdot.py new file mode 100644 --- /dev/null +++ b/pypy/tool/mapstatsdot.py @@ -0,0 +1,217 @@ +#! /usr/bin/env python +import sys + +class Getattrwrap(object): + def __init__(self, obj): + self.obj = obj + + def __getattr__(self, name): + try: + return self.obj[name] + except KeyError: + return None + + def __repr__(self): + return "<%s>" % (self.obj, ) + + +class Map(object): + allmaps = {} + instances = 0 + + def __init__(self, typ, id): + self.type = typ + self.id = id + self.allmaps[id] = self + + @staticmethod + def make(content): + typ = content.type + cls = Map + if typ == 'PlainAttribute': + cls = Attribute + elif "Terminator" in typ: + cls = Terminator + else: + import pdb; pdb.set_trace() + return cls(typ, content.id) + + def fill(self, content): + self.raw = content + self.type = content.type + self.direct_instances = content.instances + self.instances += content.instances + + transitions = content.transitions + d = {} + if transitions: + for id, count in transitions.iteritems(): + map = Map.getmap(id) + map.instances += count + d[map] = count + self.transitions = d + + @staticmethod + def getmap(id): + return Map.allmaps[id] + + def dot(self, output, seen): + if self in seen: + return + seen.add(self) + if hasattr(self, 'back'): + self.back.dot(output, seen) + if not self.instances: + return + node = output.node(self.id, label=self.getlabel(), + shape="box", labeljust="r", + fillcolor=self.getfillcolor()) + for next, count in self.transitions.iteritems(): + next.dot(output, seen) + output.edge(self.id, next.id, label=str(count)) + return node + + def getfillcolor(self): + if len(self.transitions) > 1: + return "red" + return "white" + + +class Terminator(Map): + def fill(self, content): + Map.fill(self, content) + self.w_cls = content.w_cls + + def getlabel(self): + return self.w_cls + +class Attribute(Map): + def fill(self, content): + Map.fill(self, content) + self.back = Map.getmap(content.back) + self.name = content.name + self.nametype = content.index + self.ever_mutated = content.ever_mutated + self.can_contain_mutable_cell = content.can_contain_mutable_cell + self.hprof_status = content._hprof_status + self.constant = False + if self.hprof_status == "i": + self.constant = True + if self.hprof_status == "o": + self.constant = True + self.constant_class = content._hprof_const_cls + self.number_unnecessary_writes = content.number_unnecessary_writes + + writes = content.writes + d = {} + if writes: + for tup, count in writes.iteritems(): + key, index, cls = tup.strip('()').split(', ') + if key.startswith('"'): + key = eval(key) + assert key == self.name + assert int(index) == self.nametype + d[cls] = count + self.writes = d + + reads = content.reads + count = 0 + if reads: + assert len(reads) == 1 + for tup, count in reads.iteritems(): + key, index = tup.strip('()').split(', ') + if key.startswith('"'): + key = eval(key) + assert key == self.name + assert int(index) == self.nametype + self.reads = count + + def getlabel(self): + if self.nametype == 0: + name = self.name + else: + name = self.name + " " + str(self.nametype) + if self.hprof_status == "i": + name += " (constant int)" + if self.hprof_status == "o": + name += " (constant obj)" + label = [name] + label.append("reads: %s" % self.reads) + label.append("writes:") + for write, count in self.writes.items(): + label.append(" %s: %s" % (write, count)) + if self.number_unnecessary_writes and self.constant: + assert len(self.writes) == 1 + label[-1] += " (%s unnecessary)" % (self.number_unnecessary_writes, ) + if not self.ever_mutated: + label.append('immutable') + if self.can_contain_mutable_cell and not self.constant_class: + label.append('may be a cell') + if self.constant_class: + label.append('constant class: ' + self.constant_class) + return "\\l".join(label) + + def getfillcolor(self): + if len(self.transitions) > 1: + return "red" + if len(self.writes) > 1: # more than one type + return "yellow" + if self.constant: + return "green" + if self.constant_class: + return "greenyellow" + return "white" + +def dot(allmaps): + import graphviz + output = graphviz.Digraph() + seen = set() + #allmaps = [map for map in allmaps if map.instances and map.getfillcolor() != "white"] + allmaps.sort(key=lambda map: getattr(map, "instances", 0)) + allmaps.reverse() + for map in allmaps: + map.dot(output, seen) + print output.source + + +def main(): + input = eval(file(sys.argv[1]).read()) + input = [Getattrwrap(obj) for obj in input] + allmaps = [] + for mp in input: + allmaps.append(Map.make(mp)) + for content in input: + mp = Map.getmap(content.id) + mp.fill(content) + totalreads = 0 + goodreads = 0 + totalwrites = 0 + goodwrites = 0 + totalattrs = 0 + goodattrs = 0 + unnecessary = 0 + + for mp in allmaps: + if not isinstance(mp, Attribute): + continue + totalwrites += sum(mp.writes.values()) + totalreads += mp.reads + totalattrs += 1 + if len(mp.writes) == 1: + goodwrites += sum(mp.writes.values()) + goodreads += mp.reads + goodattrs += 1 + if mp.constant: + unnecessary += mp.number_unnecessary_writes + with file("out.csv", "a") as f: + print >> f, ", ".join(map(str, [sys.argv[1], totalreads, goodreads, totalwrites, goodwrites, unnecessary, totalattrs, goodattrs])) + print >> sys.stderr, "reads:", totalreads, goodreads, float(goodreads) / totalreads + print >> sys.stderr, "writes:", totalwrites, goodwrites, float(goodwrites) / totalwrites + print >> sys.stderr, "unnecessary writes:", unnecessary, totalwrites, float(unnecessary) / totalwrites + print >> sys.stderr, "attrs:", totalattrs, goodattrs, float(goodattrs) / totalattrs + print >> sys.stderr, "reads / writes", float(totalreads) / totalwrites + + dot(allmaps) + +if __name__ == '__main__': + main() From pypy.commits at gmail.com Tue Feb 9 07:31:44 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 09 Feb 2016 04:31:44 -0800 (PST) Subject: [pypy-commit] cffi default: Document whatsnew Message-ID: <56b9dc30.2851c20a.765b9.46a9@mx.google.com> Author: Armin Rigo Branch: Changeset: r2620:66c37829e1f5 Date: 2016-02-09 13:31 +0100 http://bitbucket.org/cffi/cffi/changeset/66c37829e1f5/ Log: Document whatsnew diff --git a/doc/source/whatsnew.rst b/doc/source/whatsnew.rst --- a/doc/source/whatsnew.rst +++ b/doc/source/whatsnew.rst @@ -3,6 +3,18 @@ ====================== +v1.5.1 +====== + +* A few installation-time tweaks (thanks Stefano!) + +* Issue #245: Win32: ``__stdcall`` was never generated for + ``extern "Python"`` functions + +* Issue #246: trying to be more robust against CPython's fragile + interpreter shutdown logic + + v1.5.0 ====== From pypy.commits at gmail.com Tue Feb 9 07:44:32 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 09 Feb 2016 04:44:32 -0800 (PST) Subject: [pypy-commit] cffi default: bump the version number Message-ID: <56b9df30.0cb81c0a.6083f.0371@mx.google.com> Author: Armin Rigo Branch: Changeset: r2621:5833cd037e9e Date: 2016-02-09 13:44 +0100 http://bitbucket.org/cffi/cffi/changeset/5833cd037e9e/ Log: bump the version number diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c --- a/c/_cffi_backend.c +++ b/c/_cffi_backend.c @@ -2,7 +2,7 @@ #include #include "structmember.h" -#define CFFI_VERSION "1.5.0" +#define CFFI_VERSION "1.5.1" #ifdef MS_WIN32 #include diff --git a/c/test_c.py b/c/test_c.py --- a/c/test_c.py +++ b/c/test_c.py @@ -12,7 +12,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/cffi/__init__.py b/cffi/__init__.py --- a/cffi/__init__.py +++ b/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.1" +__version_info__ = (1, 5, 1) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/cffi/_embedding.h b/cffi/_embedding.h --- a/cffi/_embedding.h +++ b/cffi/_embedding.h @@ -233,7 +233,7 @@ f = PySys_GetObject((char *)"stderr"); if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.5.0" + "\ncompiled with cffi version: 1.5.1" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/doc/source/conf.py b/doc/source/conf.py --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -47,7 +47,7 @@ # The short X.Y version. version = '1.5' # The full version, including alpha/beta/rc tags. -release = '1.5.0' +release = '1.5.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/installation.rst b/doc/source/installation.rst --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -51,11 +51,11 @@ Download and Installation: -* http://pypi.python.org/packages/source/c/cffi/cffi-1.5.0.tar.gz +* http://pypi.python.org/packages/source/c/cffi/cffi-1.5.1.tar.gz - - MD5: dec8441e67880494ee881305059af656 + - MD5: ... - - SHA: fd21011ba2a3cab627001b52c69fd7274517e549 + - SHA: ... * Or grab the most current version from the `Bitbucket page`_: ``hg clone https://bitbucket.org/cffi/cffi`` diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -144,7 +144,7 @@ `Mailing list `_ """, - version='1.5.0', + version='1.5.1', packages=['cffi'] if cpython else [], package_data={'cffi': ['_cffi_include.h', 'parse_c_type.h', '_embedding.h']} From pypy.commits at gmail.com Tue Feb 9 08:24:54 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 09 Feb 2016 05:24:54 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: LE set OF flag, GE set OF flag, replace guard jump from a 12bit jump to 20bit jump (relative) Message-ID: <56b9e8a6.2968c20a.215b1.4d8e@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82123:eda46a48b3de Date: 2016-02-09 11:52 +0100 http://bitbucket.org/pypy/pypy/changeset/eda46a48b3de/ Log: LE set OF flag, GE set OF flag, replace guard jump from a 12bit jump to 20bit jump (relative) diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -144,7 +144,6 @@ self.mc.LG(r.r14, l.pool(offset)) self.mc.load_imm(r.SCRATCH, fail_descr) - #self.mc.LGFI(r.SCRATCH, l.imm(fail_descr)) self.mc.BCR(l.imm(0xf), r.r14) return startpos diff --git a/rpython/jit/backend/zarch/codebuilder.py b/rpython/jit/backend/zarch/codebuilder.py --- a/rpython/jit/backend/zarch/codebuilder.py +++ b/rpython/jit/backend/zarch/codebuilder.py @@ -123,14 +123,14 @@ def b_cond_offset(self, offset, condition): assert condition != c.cond_none - self.BRC(condition, l.imm(offset)) + self.BRCL(condition, l.imm(offset)) def b_offset(self, reladdr): offset = reladdr - self.get_relative_pos() self.BRC(c.ANY, l.imm(offset)) def reserve_guard_branch(self): - self.BRC(l.imm(0x0), l.imm(0)) + self.BRCL(l.imm(0x0), l.imm(0)) def trap(self): self.TRAP2() @@ -168,7 +168,7 @@ def load_imm(self, dest_reg, word): - if -32768 <= word <= 32767: + if -2**15 <= word <= 2**15-1: self.LGHI(dest_reg, l.imm(word)) elif -2**31 <= word <= 2**31-1: self.LGFI(dest_reg, l.imm(word)) diff --git a/rpython/jit/backend/zarch/conditions.py b/rpython/jit/backend/zarch/conditions.py --- a/rpython/jit/backend/zarch/conditions.py +++ b/rpython/jit/backend/zarch/conditions.py @@ -23,39 +23,41 @@ LT = ConditionLocation(0x4) GT = ConditionLocation(0x2) OF = ConditionLocation(0x1) # overflow -LE = ConditionLocation(EQ.value | LT.value) -GE = ConditionLocation(EQ.value | GT.value) + +LE = ConditionLocation(EQ.value | LT.value | OF.value) +GE = ConditionLocation(EQ.value | GT.value | OF.value) NE = ConditionLocation(LT.value | GT.value | OF.value) NO = ConditionLocation(0xe) # NO overflow + ANY = ConditionLocation(0xf) FP_ROUND_DEFAULT = loc.imm(0x0) FP_TOWARDS_ZERO = loc.imm(0x5) -cond_none = loc.imm(0x0) +cond_none = loc.imm(-1) def negate(cond): - isfloat = (cond.value & 0x10) != 0 + val = cond.value + isfloat = (val & 0x10) != 0 + cc = (~val) & 0xf if isfloat: # inverting is handeled differently for floats - # overflow is never inverted - value = (~cond.value) & 0xf - return ConditionLocation(value | FLOAT.value) - value = (~cond.value) & 0xf - return ConditionLocation(value) + return ConditionLocation(cc | FLOAT.value) + return ConditionLocation(cc) def prepare_float_condition(cond): newcond = ConditionLocation(cond.value | FLOAT.value) return newcond -def _assert_invert(v1, v2): - assert (v1.value & 0xe) == (v2.value & 0xe) -_assert_invert(negate(EQ), NE) -_assert_invert(negate(NE), EQ) -_assert_invert(negate(LT), GE) -_assert_invert(negate(LE), GT) -_assert_invert(negate(GT), LE) -_assert_invert(negate(GE), LT) -assert negate(NO).value == OF.value -assert negate(OF).value == NO.value -del _assert_invert +def _assert_value(v1, v2): + assert v1.value == v2.value + +_assert_value(negate(EQ), NE) +_assert_value(negate(NE), EQ) +_assert_value(negate(LT), GE) +_assert_value(negate(LE), GT) +_assert_value(negate(GT), LE) +_assert_value(negate(GE), LT) +_assert_value(negate(NO), OF) +_assert_value(negate(OF), NO) +del _assert_value diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -365,7 +365,7 @@ def emit_cond_call(self, op, arglocs, regalloc): fcond = self.guard_success_cc self.guard_success_cc = c.cond_none - assert fcond != c.cond_none + assert fcond.value != c.cond_none.value fcond = c.negate(fcond) jmp_adr = self.mc.get_relative_pos() @@ -631,7 +631,7 @@ else: fcond = self.guard_success_cc self.guard_success_cc = c.cond_none - assert fcond != c.cond_none + assert fcond.value != c.cond_none.value fcond = c.negate(fcond) token = self.build_guard_token(op, arglocs[0].value, arglocs[1:], fcond) token.pos_jump_offset = self.mc.currpos() From pypy.commits at gmail.com Tue Feb 9 08:25:21 2016 From: pypy.commits at gmail.com (fijal) Date: Tue, 09 Feb 2016 05:25:21 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: start fighting windows Message-ID: <56b9e8c1.9a6f1c0a.b0f77.09f1@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82124:ccfe8a95ee02 Date: 2016-02-09 14:24 +0100 http://bitbucket.org/pypy/pypy/changeset/ccfe8a95ee02/ Log: start fighting windows diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -30,11 +30,11 @@ def setup(): + compile_extra = ['-DRPYTHON_LL2CTYPES'] platform.verify_eci(ExternalCompilationInfo( - compile_extra=['-DRPYTHON_LL2CTYPES'], + compile_extra=compile_extra, **eci_kwds)) - eci = global_eci vmprof_init = rffi.llexternal("vmprof_init", [rffi.INT, rffi.DOUBLE, rffi.CCHARP], @@ -89,9 +89,11 @@ s.c_next = vmprof_tl_stack.get_or_make_raw() s.c_value = unique_id s.c_kind = VMPROF_CODE_TAG + print s vmprof_tl_stack.setraw(s) return s def leave_code(s): vmprof_tl_stack.setraw(s.c_next) + print "pop" lltype.free(s, flavor='raw') diff --git a/rpython/rlib/rvmprof/src/rvmprof.c b/rpython/rlib/rvmprof/src/rvmprof.c --- a/rpython/rlib/rvmprof/src/rvmprof.c +++ b/rpython/rlib/rvmprof/src/rvmprof.c @@ -1,23 +1,21 @@ #define _GNU_SOURCE 1 - #ifdef RPYTHON_LL2CTYPES /* only for testing: ll2ctypes sets RPY_EXTERN from the command-line */ -# ifndef RPY_EXTERN -# define RPY_EXTERN RPY_EXPORTED -# endif -# define RPY_EXPORTED extern __attribute__((visibility("default"))) -# define VMPROF_ADDR_OF_TRAMPOLINE(addr) 0 +#ifndef RPY_EXTERN +#define RPY_EXTERN RPY_EXPORTED +#endif +#ifdef _WIN32 +#define RPY_EXPORTED __declspec(dllexport) +#else +#define RPY_EXPORTED extern __attribute__((visibility("default"))) +#endif #else - # include "common_header.h" # include "structdef.h" # include "src/threadlocal.h" # include "rvmprof.h" -/*# ifndef VMPROF_ADDR_OF_TRAMPOLINE -# error "RPython program using rvmprof, but not calling vmprof_execute_code()" -# endif*/ #endif diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -7,9 +7,6 @@ static long profile_interval_usec = 0; static int opened_profile(char *interp_name); -#define MAX_STACK_DEPTH \ - ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) - #define MARKER_STACKTRACE '\x01' #define MARKER_VIRTUAL_IP '\x02' #define MARKER_TRAILER '\x03' @@ -20,6 +17,9 @@ #define VERSION_THREAD_ID '\x01' #define VERSION_TAG '\x02' +#define MAX_STACK_DEPTH \ + ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) + typedef struct prof_stacktrace_s { char padding[sizeof(long) - 1]; char marker; diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -10,6 +10,20 @@ return 0; } +#if defined(_MSC_VER) +#include +typedef SSIZE_T ssize_t; +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include #include "vmprof_stack.h" #include "vmprof_common.h" #include @@ -17,6 +31,8 @@ // This file has been inspired (but not copied from since the LICENSE // would not allow it) from verysleepy profiler +#define SINGLE_BUF_SIZE 8192 + volatile int thread_started = 0; volatile int enabled = 0; @@ -55,9 +71,42 @@ return 0; } -int vmprof_snapshot_thread(DWORD thread_id, PyThreadState *tstate, prof_stacktrace_s *stack) +int vmprof_snapshot_thread(prof_stacktrace_s *stack) { - HRESULT result; + void *addr; + vmprof_stack_t *cur; + long tid; + HANDLE hThread; + +#ifdef RPYTHON_LL2CTYPES + return 0; // not much we can do +#else + OP_THREADLOCALREF_ADDR(addr); +#ifdef RPY_TLOFS_thread_ident // compiled with threads + tid = *(long*)((char*)addr + RPY_TLOFS_thread_ident); + hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, tid); + if (!hThread) { + return -1; + } + result = SuspendThread(hThread); + if(result == 0xffffffff) + return -1; // possible, e.g. attached debugger or thread alread suspended + if (*(long*)((char*)addr + RPY_TLOFS_thread_ident) != tid) { + // swapped threads, bail + ResumeThread(hThread); + return -1; + } +#endif + cur = *(vmprof_stack_t**)((char*)addr + RPY_TLOFS_vmprof_tl_stack); + if (cur) { + printf("%p\n", cur->kind); + } else { + printf("null\n"); + } +#ifdef RPY_TLOFS_thread_ident + ResumeThread(hThread); +#endif + /* HRESULT result; HANDLE hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, thread_id); int depth; if (!hThread) { @@ -74,7 +123,9 @@ stack->count = 1; stack->marker = MARKER_STACKTRACE; ResumeThread(hThread); - return depth; + return depth;*/ + return 0; +#endif } long __stdcall vmprof_mainloop(void *arg) @@ -82,23 +133,19 @@ prof_stacktrace_s *stack = (prof_stacktrace_s*)malloc(SINGLE_BUF_SIZE); HANDLE hThreadSnap = INVALID_HANDLE_VALUE; int depth; - PyThreadState *tstate; while (1) { - Sleep(profile_interval_usec * 1000); + //Sleep(profile_interval_usec * 1000); + Sleep(10); if (!enabled) { continue; } - tstate = PyInterpreterState_Head()->tstate_head; - while (tstate) { - depth = vmprof_snapshot_thread(tstate->thread_id, tstate, stack); - if (depth > 0) { - _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), - depth * sizeof(void *) + - sizeof(struct prof_stacktrace_s) - - offsetof(struct prof_stacktrace_s, marker)); - } - tstate = tstate->next; + depth = vmprof_snapshot_thread(stack); + if (depth > 0) { + _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), + depth * sizeof(void *) + + sizeof(struct prof_stacktrace_s) - + offsetof(struct prof_stacktrace_s, marker)); } } } diff --git a/rpython/rlib/rvmprof/src/vmprof_stack.h b/rpython/rlib/rvmprof/src/vmprof_stack.h --- a/rpython/rlib/rvmprof/src/vmprof_stack.h +++ b/rpython/rlib/rvmprof/src/vmprof_stack.h @@ -1,7 +1,11 @@ #ifndef _VMPROF_STACK_H_ #define _VMPROF_STACK_H_ +#ifdef _WIN32 +#define intptr_t long // XXX windows VC++ 2008 lacks stdint.h +#else #include +#endif #define VMPROF_CODE_TAG 1 /* <- also in cintf.py */ #define VMPROF_BLACKHOLE_TAG 2 From pypy.commits at gmail.com Tue Feb 9 08:31:17 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 09 Feb 2016 05:31:17 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: fix Message-ID: <56b9ea25.162f1c0a.eb92f.1480@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5604:16b7f0cfe1fe Date: 2016-02-09 14:28 +0100 http://bitbucket.org/pypy/extradoc/changeset/16b7f0cfe1fe/ Log: fix diff --git a/talk/swisspython2016/slides.rst b/talk/swisspython2016/slides.rst --- a/talk/swisspython2016/slides.rst +++ b/talk/swisspython2016/slides.rst @@ -107,7 +107,7 @@ :: - | from cffi import FFI + | import cffi | ffi = cffi.FFI() From pypy.commits at gmail.com Tue Feb 9 08:31:19 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 09 Feb 2016 05:31:19 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: expand with more recent info Message-ID: <56b9ea27.05e41c0a.992b1.ffffcc42@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5605:5296e53a14b9 Date: 2016-02-09 14:31 +0100 http://bitbucket.org/pypy/extradoc/changeset/5296e53a14b9/ Log: expand with more recent info diff --git a/sprintinfo/leysin-winter-2016/people.txt b/sprintinfo/leysin-winter-2016/people.txt --- a/sprintinfo/leysin-winter-2016/people.txt +++ b/sprintinfo/leysin-winter-2016/people.txt @@ -10,9 +10,8 @@ Name Arrive/Depart Accomodation ==================== ============== ======================= Armin Rigo private - (SEE NOTE BELOW) Remi Meier 21-27 Ermina -Carl Friedrich Bolz 20-27 Ermina? +Carl Friedrich Bolz 20-27 Ermina (individual room) Matti Picus 20-25 Ermina Manuel Jacob 20-27 Ermina Richard Plangger 20-28 Ermina @@ -21,10 +20,16 @@ Pierre-Yves David 20-27 Ermina ==================== ============== ======================= -**NOTE:** we might have only a single double-bed room and a big room -(5-6 individual beds). I can ask if more people want smaller rooms, -and/or recommend hotels elsewhere in Leysin. Please be explicit in what -you prefer. The standard booking is for the nights from Saturday to +**NOTE:** lodging is by default in Ermina in 4-5 people rooms. (One of +the two big rooms may be rented on the last day; we'll see if we have to +do other arrangements on-the-spot, but don't worry, it is possible in +the worst case to send a couple of people to a hotel for one night.) + +Please tell if you would prefer a smaller or individual room (which may +be in a nearby hotel). Of course you're free to book the hotel yourself +if you prefer. + +The standard booking is for the nights from Saturday to Saturday, but it is possible to extend that. From pypy.commits at gmail.com Tue Feb 9 11:18:02 2016 From: pypy.commits at gmail.com (rlamy) Date: Tue, 09 Feb 2016 08:18:02 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Sandbox externals during annotation Message-ID: <56ba113a.d4e41c0a.65626.50f7@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82125:e2ae384cca29 Date: 2016-02-09 05:08 +0000 http://bitbucket.org/pypy/pypy/changeset/e2ae384cca29/ Log: Sandbox externals during annotation diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -24,6 +24,16 @@ self.check_args(*args_s) return self.signature_result + def compute_annotation(self): + if (self.bookkeeper.annotator.translator.config.translation.sandbox + and not self.safe_not_sandboxed): + from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline + impl = make_sandbox_trampoline(self.name, self.signature_args, + self.signature_result) + return self.bookkeeper.immutablevalue(impl) + return super(ExtFuncEntry, self).compute_annotation() + + def specialize_call(self, hop): from rpython.rtyper.rtyper import llinterp_backend rtyper = hop.rtyper @@ -36,8 +46,6 @@ impl = getattr(self, 'lltypeimpl', None) fakeimpl = getattr(self, 'lltypefakeimpl', self.instance) if impl: - impl = make_impl(rtyper, impl, self.safe_not_sandboxed, self.name, - signature_args, s_result) if hasattr(self, 'lltypefakeimpl') and rtyper.backend is llinterp_backend: FT = FuncType(args_ll, ll_result) obj = functionptr(FT, self.name, _external_name=self.name, @@ -60,12 +68,6 @@ hop.exception_is_here() return hop.genop('direct_call', vlist, r_result) -def make_impl(rtyper, impl, sandboxsafe, name, args_s, s_result): - if (rtyper.annotator.translator.config.translation.sandbox - and not sandboxsafe): - from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline - impl = make_sandbox_trampoline(name, args_s, s_result) - return impl def register_external(function, args, result=None, export_name=None, llimpl=None, llfakeimpl=None, sandboxsafe=False): From pypy.commits at gmail.com Tue Feb 9 11:18:04 2016 From: pypy.commits at gmail.com (rlamy) Date: Tue, 09 Feb 2016 08:18:04 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Simplify ExtFuncEntry.specialize_call() by extracting .get_funcptr() Message-ID: <56ba113c.89bd1c0a.f2fa7.4f38@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82126:8ac8fbad47de Date: 2016-02-09 16:16 +0000 http://bitbucket.org/pypy/pypy/changeset/8ac8fbad47de/ Log: Simplify ExtFuncEntry.specialize_call() by extracting .get_funcptr() diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -33,40 +33,41 @@ return self.bookkeeper.immutablevalue(impl) return super(ExtFuncEntry, self).compute_annotation() + def specialize_call(self, hop): + rtyper = hop.rtyper + args_r = [rtyper.getrepr(s_arg) for s_arg in self.signature_args] + r_result = rtyper.getrepr(self.signature_result) + obj = self.get_funcptr(rtyper, args_r, r_result) + vlist = [hop.inputconst(typeOf(obj), obj)] + hop.inputargs(*args_r) + hop.exception_is_here() + return hop.genop('direct_call', vlist, r_result) - def specialize_call(self, hop): + def get_funcptr(self, rtyper, args_r, r_result): from rpython.rtyper.rtyper import llinterp_backend - rtyper = hop.rtyper - signature_args = self.signature_args - args_r = [rtyper.getrepr(s_arg) for s_arg in self.signature_args] args_ll = [r_arg.lowleveltype for r_arg in args_r] - s_result = hop.s_result - r_result = rtyper.getrepr(s_result) ll_result = r_result.lowleveltype impl = getattr(self, 'lltypeimpl', None) fakeimpl = getattr(self, 'lltypefakeimpl', self.instance) if impl: if hasattr(self, 'lltypefakeimpl') and rtyper.backend is llinterp_backend: FT = FuncType(args_ll, ll_result) - obj = functionptr(FT, self.name, _external_name=self.name, - _callable=fakeimpl) + return functionptr( + FT, self.name, _external_name=self.name, + _callable=fakeimpl) elif isinstance(impl, _ptr): - obj = impl + return impl else: # store some attributes to the 'impl' function, where # the eventual call to rtyper.getcallable() will find them # and transfer them to the final lltype.functionptr(). impl._llfnobjattrs_ = {'_name': self.name} - obj = rtyper.getannmixlevel().delayedfunction( - impl, signature_args, hop.s_result) + return rtyper.getannmixlevel().delayedfunction( + impl, self.signature_args, self.signature_result) else: FT = FuncType(args_ll, ll_result) - obj = functionptr(FT, self.name, _external_name=self.name, - _callable=fakeimpl, - _safe_not_sandboxed=self.safe_not_sandboxed) - vlist = [hop.inputconst(typeOf(obj), obj)] + hop.inputargs(*args_r) - hop.exception_is_here() - return hop.genop('direct_call', vlist, r_result) + return functionptr( + FT, self.name, _external_name=self.name, _callable=fakeimpl, + _safe_not_sandboxed=self.safe_not_sandboxed) def register_external(function, args, result=None, export_name=None, From pypy.commits at gmail.com Tue Feb 9 12:17:39 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 09 Feb 2016 09:17:39 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: reviewed guarding, made negate array lookup instead of fiddling with the mask bit Message-ID: <56ba1f33.890bc30a.36972.ffffb712@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82127:ce150b6b822b Date: 2016-02-09 18:16 +0100 http://bitbucket.org/pypy/pypy/changeset/ce150b6b822b/ Log: reviewed guarding, made negate array lookup instead of fiddling with the mask bit diff --git a/rpython/jit/backend/zarch/conditions.py b/rpython/jit/backend/zarch/conditions.py --- a/rpython/jit/backend/zarch/conditions.py +++ b/rpython/jit/backend/zarch/conditions.py @@ -5,8 +5,6 @@ _immutable_ = True def __repr__(self): s = "" - if self.value & 0x10 != 0: - s += "!FLOAT! " if self.value & 0x1 != 0: s += "OF" if self.value & 0x2 != 0: @@ -19,16 +17,22 @@ # normal branch instructions FLOAT = ConditionLocation(0x10) + EQ = ConditionLocation(0x8) LT = ConditionLocation(0x4) GT = ConditionLocation(0x2) OF = ConditionLocation(0x1) # overflow LE = ConditionLocation(EQ.value | LT.value | OF.value) +FLE = ConditionLocation(EQ.value | LT.value) GE = ConditionLocation(EQ.value | GT.value | OF.value) +FGE = ConditionLocation(EQ.value | GT.value) NE = ConditionLocation(LT.value | GT.value | OF.value) NO = ConditionLocation(0xe) # NO overflow +FGT = ConditionLocation(GT.value | OF.value) +FLT = ConditionLocation(LT.value | OF.value) + ANY = ConditionLocation(0xf) FP_ROUND_DEFAULT = loc.imm(0x0) @@ -36,22 +40,35 @@ cond_none = loc.imm(-1) +opposites = [None] * 16 +opposites[0] = ANY + +opposites[OF.value] = NO +opposites[GT.value] = LE +opposites[LT.value] = GE +opposites[EQ.value] = NE + +opposites[NO.value] = OF +opposites[LE.value] = GT +opposites[GE.value] = LT +opposites[NE.value] = EQ + +opposites[FGE.value] = FLT +opposites[FLE.value] = FGT + +opposites[FGT.value] = FLE +opposites[FLT.value] = FGE + +opposites[ANY.value] = ConditionLocation(0) + def negate(cond): - val = cond.value - isfloat = (val & 0x10) != 0 - cc = (~val) & 0xf - if isfloat: - # inverting is handeled differently for floats - return ConditionLocation(cc | FLOAT.value) - return ConditionLocation(cc) - -def prepare_float_condition(cond): - newcond = ConditionLocation(cond.value | FLOAT.value) - return newcond + cc = opposites[cond.value] + if cc is None: + assert 0, "provide a sane value to negate" + return cc def _assert_value(v1, v2): assert v1.value == v2.value - _assert_value(negate(EQ), NE) _assert_value(negate(NE), EQ) _assert_value(negate(LT), GE) @@ -60,4 +77,11 @@ _assert_value(negate(GE), LT) _assert_value(negate(NO), OF) _assert_value(negate(OF), NO) + +_assert_value(negate(FLE), FGT) +_assert_value(negate(FGT), FLE) + +_assert_value(negate(FGE), FLT) +_assert_value(negate(FLT), FGE) + del _assert_value diff --git a/rpython/jit/backend/zarch/helper/assembler.py b/rpython/jit/backend/zarch/helper/assembler.py --- a/rpython/jit/backend/zarch/helper/assembler.py +++ b/rpython/jit/backend/zarch/helper/assembler.py @@ -12,11 +12,6 @@ assert not l0.is_imm() # do the comparison self.mc.cmp_op(l0, l1, pool=l1.is_in_pool(), imm=l1.is_imm(), signed=signed, fp=fp) - - if fp: - # Support for NaNs: S390X sets condition register to 0x3 (unordered) - # as soon as any of the operands is NaN - condition = c.prepare_float_condition(condition) self.flush_cc(condition, arglocs[2]) diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -240,12 +240,19 @@ emit_float_mul = gen_emit_rr_or_rpool('MDBR','MDB') emit_float_truediv = gen_emit_rr_or_rpool('DDBR','DDB') + # Support for NaNs: S390X sets condition code to 0x3 (unordered) + # whenever any operand is nan. + # in the case float_le,float_ge the overflow bit is not set of + # the initial condition! + # e.g. guard_true(nan <= x): jumps 1100 inv => 0011, bit 3 set + # e.g. guard_false(nan <= x): does not jump 1100, bit 3 not set + # e.g. guard_true(nan >= nan): jumps 1010 inv => 0101, bit 3 set emit_float_lt = gen_emit_cmp_op(c.LT, fp=True) - emit_float_le = gen_emit_cmp_op(c.LE, fp=True) + emit_float_le = gen_emit_cmp_op(c.FLE, fp=True) emit_float_eq = gen_emit_cmp_op(c.EQ, fp=True) emit_float_ne = gen_emit_cmp_op(c.NE, fp=True) emit_float_gt = gen_emit_cmp_op(c.GT, fp=True) - emit_float_ge = gen_emit_cmp_op(c.GE, fp=True) + emit_float_ge = gen_emit_cmp_op(c.FGE, fp=True) def emit_float_neg(self, op, arglocs, regalloc): l0, = arglocs @@ -633,6 +640,7 @@ self.guard_success_cc = c.cond_none assert fcond.value != c.cond_none.value fcond = c.negate(fcond) + token = self.build_guard_token(op, arglocs[0].value, arglocs[1:], fcond) token.pos_jump_offset = self.mc.currpos() assert token.guard_not_invalidated() == is_guard_not_invalidated diff --git a/rpython/jit/backend/zarch/test/test_runner.py b/rpython/jit/backend/zarch/test/test_runner.py --- a/rpython/jit/backend/zarch/test/test_runner.py +++ b/rpython/jit/backend/zarch/test/test_runner.py @@ -24,7 +24,7 @@ cpu.setup_once() return cpu - add_loop_instructions = "lg; lgr; larl; agr; cgfi; je; j;$" + add_loop_instructions = "lg; lgr; larl; agr; cgfi; jge; j;$" # realloc frame takes the most space (from just after larl, to lay) bridge_loop_instructions = "larl; lg; cgfi; jhe; lghi; " \ "iilf;( iihf;)? iilf;( iihf;)? basr; lg; br;$" From pypy.commits at gmail.com Tue Feb 9 12:48:56 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 09 Feb 2016 09:48:56 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: fixed test_runner asmlen, since GE has the overflow bit set this test fails Message-ID: <56ba2688.c711c30a.add0.ffffce34@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82128:42e7257e5777 Date: 2016-02-09 18:47 +0100 http://bitbucket.org/pypy/pypy/changeset/42e7257e5777/ Log: fixed test_runner asmlen, since GE has the overflow bit set this test fails diff --git a/rpython/jit/backend/zarch/test/test_runner.py b/rpython/jit/backend/zarch/test/test_runner.py --- a/rpython/jit/backend/zarch/test/test_runner.py +++ b/rpython/jit/backend/zarch/test/test_runner.py @@ -26,5 +26,5 @@ add_loop_instructions = "lg; lgr; larl; agr; cgfi; jge; j;$" # realloc frame takes the most space (from just after larl, to lay) - bridge_loop_instructions = "larl; lg; cgfi; jhe; lghi; " \ + bridge_loop_instructions = "larl; lg; cgfi; jnl; lghi; " \ "iilf;( iihf;)? iilf;( iihf;)? basr; lg; br;$" From pypy.commits at gmail.com Tue Feb 9 15:18:12 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 09 Feb 2016 12:18:12 -0800 (PST) Subject: [pypy-commit] pypy cffi-embedding-win32: Add pypy_init_embedded_cffi_module() to test_ztranslation Message-ID: <56ba4984.2aacc20a.101ee.04f5@mx.google.com> Author: Armin Rigo Branch: cffi-embedding-win32 Changeset: r82129:6644cb289b26 Date: 2016-02-09 21:14 +0100 http://bitbucket.org/pypy/pypy/changeset/6644cb289b26/ Log: Add pypy_init_embedded_cffi_module() to test_ztranslation diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,15 +4,18 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module +from pypy.module._cffi_backend import cffi1_module, embedding def test_checkmodule(): # prepare_file_argument() is not working without translating the _file # module too def dummy_prepare_file_argument(space, fileobj): - # call load_cffi1_module() too, from a random place like here - cffi1_module.load_cffi1_module(space, "foo", "foo", 42) + # call pypy_init_embedded_cffi_module() from a random place like here + # --- this calls load_cffi1_module(), too + embedding.pypy_init_embedded_cffi_module( + rffi.cast(rffi.INT, embedding.EMBED_VERSION_MIN), + 42) return lltype.nullptr(rffi.CCHARP.TO) old = ctypeptr.prepare_file_argument try: diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py --- a/pypy/objspace/fake/objspace.py +++ b/pypy/objspace/fake/objspace.py @@ -397,9 +397,14 @@ space.wrap(value) class FakeCompiler(object): - pass + def compile(self, code, name, mode, flags): + return FakePyCode() FakeObjSpace.default_compiler = FakeCompiler() +class FakePyCode(W_Root): + def exec_code(self, space, w_globals, w_locals): + return W_Root() + class FakeModule(W_Root): def __init__(self): From pypy.commits at gmail.com Tue Feb 9 15:18:14 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 09 Feb 2016 12:18:14 -0800 (PST) Subject: [pypy-commit] pypy cffi-embedding-win32: in-progress: share more code between posix and win32, and use Message-ID: <56ba4986.6bb8c20a.5f459.ffffd0e6@mx.google.com> Author: Armin Rigo Branch: cffi-embedding-win32 Changeset: r82130:12211c49a141 Date: 2016-02-09 21:17 +0100 http://bitbucket.org/pypy/pypy/changeset/12211c49a141/ Log: in-progress: share more code between posix and win32, and use only win32 functions available on Windows XP diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -84,72 +84,87 @@ return rffi.cast(rffi.INT, res) # ____________________________________________________________ + if os.name == 'nt': - do_startup = r''' -#include -#define WIN32_LEAN_AND_MEAN + + do_includes = r""" +#define _WIN32_WINNT 0x0501 #include -RPY_EXPORTED void rpython_startup_code(void); -RPY_EXPORTED int pypy_setup_home(char *, int); -static unsigned char _cffi_ready = 0; -static const char *volatile _cffi_module_name; +#define CFFI_INIT_HOME_PATH_MAX _MAX_PATH +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); -static void _cffi_init_error(const char *msg, const char *extra) +static int _cffi_init_home(char *output_home_path) { - fprintf(stderr, - "\nPyPy initialization failure when loading module '%s':\n%s%s\n", - _cffi_module_name, msg, extra); -} - -BOOL CALLBACK _cffi_init(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *lpContex) -{ - - HMODULE hModule; - TCHAR home[_MAX_PATH]; - rpython_startup_code(); - RPyGilAllocate(); + HMODULE hModule = 0; + DWORD res; GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, (LPCTSTR)&_cffi_init, &hModule); + if (hModule == 0 ) { - /* TODO turn the int into a string with FormatMessage */ - - _cffi_init_error("dladdr() failed: ", ""); - return TRUE; + _cffi_init_error("GetModuleHandleEx() failed", ""); + return -1; } - GetModuleFileName(hModule, home, _MAX_PATH); - if (pypy_setup_home(home, 1) != 0) { - _cffi_init_error("pypy_setup_home() failed", ""); - return TRUE; + res = GetModuleFileName(hModule, output_home_path, CFFI_INIT_HOME_PATH_MAX); + if (res >= CFFI_INIT_HOME_PATH_MAX) { + return -1; } - _cffi_ready = 1; - fprintf(stderr, "startup succeeded, home %s\n", home); - return TRUE; + return 0; } -RPY_EXPORTED -int pypy_carefully_make_gil(const char *name) +static void _cffi_init_once(void) { - /* For CFFI: this initializes the GIL and loads the home path. - It can be called completely concurrently from unrelated threads. - It assumes that we don't hold the GIL before (if it exists), and we - don't hold it afterwards. - */ - static INIT_ONCE s_init_once; + static LONG volatile lock = 0; + static int _init_called = 0; - _cffi_module_name = name; /* not really thread-safe, but better than - nothing */ - InitOnceExecuteOnce(&s_init_once, _cffi_init, NULL, NULL); - return (int)_cffi_ready - 1; -}''' + while (InterlockedCompareExchange(&lock, 1, 0) != 0) { + SwitchToThread(); /* spin loop */ + } + if (!_init_called) { + _cffi_init(); + _init_called = 1; + } + InterlockedCompareExchange(&lock, 0, 1); +} +""" + else: - do_startup = r""" -#include + + do_includes = r""" #include #include +#define CFFI_INIT_HOME_PATH_MAX PATH_MAX +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + Dl_info info; + dlerror(); /* reset */ + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return -1; + } + if (realpath(info.dli_fname, output_home_path) == NULL) { + perror("realpath() failed"); + _cffi_init_error("realpath() failed", ""); + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + pthread_once(&once_control, _cffi_init); +} +""" + +do_startup = do_includes + r""" RPY_EXPORTED void rpython_startup_code(void); RPY_EXPORTED int pypy_setup_home(char *, int); @@ -165,17 +180,13 @@ static void _cffi_init(void) { - Dl_info info; - char *home; + char home[CFFI_INIT_HOME_PATH_MAX + 1]; rpython_startup_code(); RPyGilAllocate(); - if (dladdr(&_cffi_init, &info) == 0) { - _cffi_init_error("dladdr() failed: ", dlerror()); + if (_cffi_init_home(home) != 0) return; - } - home = realpath(info.dli_fname, NULL); if (pypy_setup_home(home, 1) != 0) { _cffi_init_error("pypy_setup_home() failed", ""); return; @@ -191,11 +202,9 @@ It assumes that we don't hold the GIL before (if it exists), and we don't hold it afterwards. */ - static pthread_once_t once_control = PTHREAD_ONCE_INIT; - _cffi_module_name = name; /* not really thread-safe, but better than nothing */ - pthread_once(&once_control, _cffi_init); + _cffi_init_once(); return (int)_cffi_ready - 1; } """ From pypy.commits at gmail.com Tue Feb 9 16:33:26 2016 From: pypy.commits at gmail.com (mattip) Date: Tue, 09 Feb 2016 13:33:26 -0800 (PST) Subject: [pypy-commit] pypy seperate-strucmember_h: create branch to seperate structmember.h from Python.h Message-ID: <56ba5b26.8ab71c0a.59280.2ebb@mx.google.com> Author: mattip Branch: seperate-strucmember_h Changeset: r82131:89ae4fa28821 Date: 2016-02-09 23:26 +0200 http://bitbucket.org/pypy/pypy/changeset/89ae4fa28821/ Log: create branch to seperate structmember.h from Python.h From pypy.commits at gmail.com Tue Feb 9 16:33:28 2016 From: pypy.commits at gmail.com (devin.jeanpierre) Date: Tue, 09 Feb 2016 13:33:28 -0800 (PST) Subject: [pypy-commit] pypy seperate-strucmember_h: Failing test case! This is what I want to fix. Message-ID: <56ba5b28.44e21c0a.a4518.4c0f@mx.google.com> Author: Devin Jeanpierre Branch: seperate-strucmember_h Changeset: r82132:f6a0fb5629d9 Date: 2016-01-30 23:53 -0800 http://bitbucket.org/pypy/pypy/changeset/f6a0fb5629d9/ Log: Failing test case! This is what I want to fix. Existing Python programs may (okay, do) use symbols from structmember.h, and this works in CPython because structmember.h is not included from Python.h. In PyPy, if you include Python.h, then you can't use the name RO, because it does in fact include structmember.h. (And, indeed, our Python.h says that we should not.) diff --git a/pypy/module/cpyext/test/test_cpyext.py b/pypy/module/cpyext/test/test_cpyext.py --- a/pypy/module/cpyext/test/test_cpyext.py +++ b/pypy/module/cpyext/test/test_cpyext.py @@ -863,3 +863,15 @@ os.unlink('_imported_already') except OSError: pass + + def test_no_structmember(self): + """structmember.h should not be included by default.""" + mod = self.import_extension('foo', [ + ('bar', 'METH_NOARGS', + ''' + /* reuse a name that is #defined in structmember.h */ + int RO; + Py_RETURN_NONE; + ''' + ), + ]) From pypy.commits at gmail.com Tue Feb 9 16:33:30 2016 From: pypy.commits at gmail.com (devin.jeanpierre) Date: Tue, 09 Feb 2016 13:33:30 -0800 (PST) Subject: [pypy-commit] pypy seperate-strucmember_h: Move structmember.h out of Python.h. Message-ID: <56ba5b2a.2aacc20a.101ee.1c06@mx.google.com> Author: Devin Jeanpierre Branch: seperate-strucmember_h Changeset: r82133:84085c039403 Date: 2016-01-31 01:49 -0800 http://bitbucket.org/pypy/pypy/changeset/84085c039403/ Log: Move structmember.h out of Python.h. Rough breakdown of changes: * Where Python.h is included by .c files, also include structmember.h * Add pypy_structmember_decl.h file which is included by structmember.h * Allow cpython_api functions to change which headers they are added to. The first two are very manual and icky, and I don't like how I did them. It feels like a hack. But I'm not sure of a better way to do it without more substantive rewrites. The last part was an API changes to cpython_api, so very invasive, but clean. Maybe the implementation is dumb. One last note: The header file being added in copy_header_files() doesn't seem to matter. I left it in anyway "just in case". Does it matter? diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -59,7 +59,7 @@ class CConfig: _compilation_info_ = ExternalCompilationInfo( include_dirs=include_dirs, - includes=['Python.h', 'stdarg.h'], + includes=['Python.h', 'stdarg.h', 'structmember.h'], compile_extra=['-DPy_BUILD_CORE'], ) @@ -129,6 +129,7 @@ for name in constant_names: setattr(CConfig_constants, name, rffi_platform.ConstantInteger(name)) udir.join('pypy_decl.h').write("/* Will be filled later */\n") +udir.join('pypy_structmember_decl.h').write("/* Will be filled later */\n") udir.join('pypy_macros.h').write("/* Will be filled later */\n") globals().update(rffi_platform.configure(CConfig_constants)) @@ -147,7 +148,7 @@ # XXX: 20 lines of code to recursively copy a directory, really?? assert dstdir.check(dir=True) headers = include_dir.listdir('*.h') + include_dir.listdir('*.inl') - for name in ("pypy_decl.h", "pypy_macros.h"): + for name in ("pypy_decl.h", "pypy_macros.h", "pypy_structmember_decl.h"): headers.append(udir.join(name)) _copy_header_files(headers, dstdir) @@ -232,7 +233,7 @@ wrapper.c_name = cpyext_namespace.uniquename(self.c_name) return wrapper -def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, external=True, +def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header='pypy_decl.h', gil=None): """ Declares a function to be exported. @@ -241,8 +242,8 @@ special value 'CANNOT_FAIL' (also when restype is Void) turns an eventual exception into a wrapped SystemError. Unwrapped exceptions also cause a SytemError. - - set `external` to False to get a C function pointer, but not exported by - the API headers. + - `header` is the header file to export the function in, Set to None to get + a C function pointer, but not exported by the API headers. - set `gil` to "acquire", "release" or "around" to acquire the GIL, release the GIL, or both """ @@ -263,7 +264,7 @@ def decorate(func): func_name = func.func_name - if external: + if header is not None: c_name = None else: c_name = func_name @@ -271,7 +272,7 @@ c_name=c_name, gil=gil) func.api_func = api_function - if external: + if header is not None: assert func_name not in FUNCTIONS, ( "%s already registered" % func_name) @@ -363,8 +364,9 @@ unwrapper_catch = make_unwrapper(True) unwrapper_raise = make_unwrapper(False) - if external: + if header is not None: FUNCTIONS[func_name] = api_function + FUNCTIONS_BY_HEADER.setdefault(header, {})[func_name] = api_function INTERPLEVEL_API[func_name] = unwrapper_catch # used in tests return unwrapper_raise # used in 'normal' RPython code. return decorate @@ -383,6 +385,7 @@ INTERPLEVEL_API = {} FUNCTIONS = {} +FUNCTIONS_BY_HEADER = {} # These are C symbols which cpyext will export, but which are defined in .c # files somewhere in the implementation of cpyext (rather than being defined in @@ -811,6 +814,7 @@ global_code = '\n'.join(global_objects) prologue = ("#include \n" + "#include \n" "#include \n") code = (prologue + struct_declaration_code + @@ -956,34 +960,62 @@ pypy_macros_h = udir.join('pypy_macros.h') pypy_macros_h.write('\n'.join(pypy_macros)) +def _header_to_guard(header_name): + return '_PYPY_' + header_name.replace('.', '_').upper() + +def _decl_header_top(header_name): + guard = _header_to_guard(header_name) + header = [ + "#ifndef %s\n" % guard, + "#define %s\n" % guard, + "#ifndef PYPY_STANDALONE\n", + "#ifdef __cplusplus", + "extern \"C\" {", + "#endif\n", + '#define Signed long /* xxx temporary fix */\n', + '#define Unsigned unsigned long /* xxx temporary fix */\n' + ] + if header_name == 'pypy_decl.h': # XXX don't send for code review unless I'm sure this is necessary + for decl in FORWARD_DECLS: + header.append("%s;" % (decl,)) + return header + +def _decl_header_bottom(header_name): + return [ + '#undef Signed /* xxx temporary fix */\n', + '#undef Unsigned /* xxx temporary fix */\n', + "#ifdef __cplusplus", + "}", + "#endif", + "#endif /*PYPY_STANDALONE*/\n", + "#endif /*%s*/\n" % _header_to_guard(header_name), + ] + def generate_decls_and_callbacks(db, export_symbols, api_struct=True): "NOT_RPYTHON" # implement function callbacks and generate function decls functions = [] - pypy_decls = [] - pypy_decls.append("#ifndef _PYPY_PYPY_DECL_H\n") - pypy_decls.append("#define _PYPY_PYPY_DECL_H\n") - pypy_decls.append("#ifndef PYPY_STANDALONE\n") - pypy_decls.append("#ifdef __cplusplus") - pypy_decls.append("extern \"C\" {") - pypy_decls.append("#endif\n") - pypy_decls.append('#define Signed long /* xxx temporary fix */\n') - pypy_decls.append('#define Unsigned unsigned long /* xxx temporary fix */\n') + decls = {} - for decl in FORWARD_DECLS: - pypy_decls.append("%s;" % (decl,)) + for header_name, header_functions in FUNCTIONS_BY_HEADER.iteritems(): + if header_name not in decls: + decls[header_name] = header = [] + header.extend(_decl_header_top(header_name)) + else: + header = decls[header_name] - for name, func in sorted(FUNCTIONS.iteritems()): - restype, args = c_function_signature(db, func) - pypy_decls.append("PyAPI_FUNC(%s) %s(%s);" % (restype, name, args)) - if api_struct: - callargs = ', '.join('arg%d' % (i,) - for i in range(len(func.argtypes))) - if func.restype is lltype.Void: - body = "{ _pypyAPI.%s(%s); }" % (name, callargs) - else: - body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) - functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) + for name, func in sorted(header_functions.iteritems()): + restype, args = c_function_signature(db, func) + header.append("PyAPI_FUNC(%s) %s(%s);" % (restype, name, args)) + if api_struct: + callargs = ', '.join('arg%d' % (i,) + for i in range(len(func.argtypes))) + if func.restype is lltype.Void: + body = "{ _pypyAPI.%s(%s); }" % (name, callargs) + else: + body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) + functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) + pypy_decls = decls['pypy_decl.h'] for name in VA_TP_LIST: name_no_star = process_va_name(name) header = ('%s pypy_va_get_%s(va_list* vp)' % @@ -999,16 +1031,11 @@ typ = 'PyObject*' pypy_decls.append('PyAPI_DATA(%s) %s;' % (typ, name)) - pypy_decls.append('#undef Signed /* xxx temporary fix */\n') - pypy_decls.append('#undef Unsigned /* xxx temporary fix */\n') - pypy_decls.append("#ifdef __cplusplus") - pypy_decls.append("}") - pypy_decls.append("#endif") - pypy_decls.append("#endif /*PYPY_STANDALONE*/\n") - pypy_decls.append("#endif /*_PYPY_PYPY_DECL_H*/\n") + for header_name, header_decls in decls.iteritems(): + header_decls.extend(_decl_header_bottom(header_name)) - pypy_decl_h = udir.join('pypy_decl.h') - pypy_decl_h.write('\n'.join(pypy_decls)) + decl_h = udir.join(header_name) + decl_h.write('\n'.join(header_decls)) return functions separate_module_files = [source_dir / "varargwrapper.c", diff --git a/pypy/module/cpyext/bufferobject.py b/pypy/module/cpyext/bufferobject.py --- a/pypy/module/cpyext/bufferobject.py +++ b/pypy/module/cpyext/bufferobject.py @@ -73,7 +73,7 @@ "Don't know how to realize a buffer")) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def buffer_dealloc(space, py_obj): py_buf = rffi.cast(PyBufferObject, py_obj) if py_buf.c_b_base: diff --git a/pypy/module/cpyext/frameobject.py b/pypy/module/cpyext/frameobject.py --- a/pypy/module/cpyext/frameobject.py +++ b/pypy/module/cpyext/frameobject.py @@ -39,7 +39,7 @@ py_frame.c_f_locals = make_ref(space, frame.get_w_locals()) rffi.setintfield(py_frame, 'c_f_lineno', frame.getorcreatedebug().f_lineno) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def frame_dealloc(space, py_obj): py_frame = rffi.cast(PyFrameObject, py_obj) py_code = rffi.cast(PyObject, py_frame.c_f_code) diff --git a/pypy/module/cpyext/funcobject.py b/pypy/module/cpyext/funcobject.py --- a/pypy/module/cpyext/funcobject.py +++ b/pypy/module/cpyext/funcobject.py @@ -56,7 +56,7 @@ assert isinstance(w_obj, Function) py_func.c_func_name = make_ref(space, space.wrap(w_obj.name)) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def function_dealloc(space, py_obj): py_func = rffi.cast(PyFunctionObject, py_obj) Py_DecRef(space, py_func.c_func_name) @@ -75,7 +75,7 @@ rffi.setintfield(py_code, 'c_co_flags', co_flags) rffi.setintfield(py_code, 'c_co_argcount', w_obj.co_argcount) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def code_dealloc(space, py_obj): py_code = rffi.cast(PyCodeObject, py_obj) Py_DecRef(space, py_code.c_co_name) diff --git a/pypy/module/cpyext/include/Python.h b/pypy/module/cpyext/include/Python.h --- a/pypy/module/cpyext/include/Python.h +++ b/pypy/module/cpyext/include/Python.h @@ -132,9 +132,6 @@ /* Missing definitions */ #include "missing.h" -// XXX This shouldn't be included here -#include "structmember.h" - #include /* Define macros for inline documentation. */ diff --git a/pypy/module/cpyext/include/structmember.h b/pypy/module/cpyext/include/structmember.h --- a/pypy/module/cpyext/include/structmember.h +++ b/pypy/module/cpyext/include/structmember.h @@ -55,3 +55,6 @@ } #endif #endif /* !Py_STRUCTMEMBER_H */ + +/* API functions. */ +#include "pypy_structmember_decl.h" diff --git a/pypy/module/cpyext/methodobject.py b/pypy/module/cpyext/methodobject.py --- a/pypy/module/cpyext/methodobject.py +++ b/pypy/module/cpyext/methodobject.py @@ -50,7 +50,7 @@ py_func.c_m_self = make_ref(space, w_obj.w_self) py_func.c_m_module = make_ref(space, w_obj.w_module) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def cfunction_dealloc(space, py_obj): py_func = rffi.cast(PyCFunctionObject, py_obj) Py_DecRef(space, py_func.c_m_self) diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -70,7 +70,7 @@ alloc : allocate and basic initialization of a raw PyObject attach : Function called to tie a raw structure to a pypy object realize : Function called to create a pypy object from a raw struct - dealloc : a cpython_api(external=False), similar to PyObject_dealloc + dealloc : a cpython_api(header=None), similar to PyObject_dealloc """ tp_basestruct = kw.pop('basestruct', PyObject.TO) diff --git a/pypy/module/cpyext/pytraceback.py b/pypy/module/cpyext/pytraceback.py --- a/pypy/module/cpyext/pytraceback.py +++ b/pypy/module/cpyext/pytraceback.py @@ -41,7 +41,7 @@ rffi.setintfield(py_traceback, 'c_tb_lasti', traceback.lasti) rffi.setintfield(py_traceback, 'c_tb_lineno',traceback.get_lineno()) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def traceback_dealloc(space, py_obj): py_traceback = rffi.cast(PyTracebackObject, py_obj) Py_DecRef(space, rffi.cast(PyObject, py_traceback.c_tb_next)) diff --git a/pypy/module/cpyext/sliceobject.py b/pypy/module/cpyext/sliceobject.py --- a/pypy/module/cpyext/sliceobject.py +++ b/pypy/module/cpyext/sliceobject.py @@ -36,7 +36,7 @@ py_slice.c_stop = make_ref(space, w_obj.w_stop) py_slice.c_step = make_ref(space, w_obj.w_step) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def slice_dealloc(space, py_obj): """Frees allocated PyStringObject resources. """ diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -309,7 +309,7 @@ return space.wrap(generic_cpy_call(space, func_target, w_self, w_other)) - at cpython_api([PyTypeObjectPtr, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyTypeObjectPtr, PyObject, PyObject], PyObject, header=None) def slot_tp_new(space, type, w_args, w_kwds): from pypy.module.cpyext.tupleobject import PyTuple_Check pyo = rffi.cast(PyObject, type) @@ -320,30 +320,30 @@ w_args_new = space.newtuple(args_w) return space.call(w_func, w_args_new, w_kwds) - at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, external=False) + at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, header=None) def slot_tp_init(space, w_self, w_args, w_kwds): w_descr = space.lookup(w_self, '__init__') args = Arguments.frompacked(space, w_args, w_kwds) space.get_and_call_args(w_descr, w_self, args) return 0 - at cpython_api([PyObject, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyObject, PyObject, PyObject], PyObject, header=None) def slot_tp_call(space, w_self, w_args, w_kwds): return space.call(w_self, w_args, w_kwds) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_str(space, w_self): return space.str(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_nb_int(space, w_self): return space.int(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iter(space, w_self): return space.iter(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iternext(space, w_self): return space.next(w_self) @@ -371,7 +371,7 @@ return @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, - error=-1, external=True) # XXX should not be exported + error=-1) # XXX should be header=None @func_renamer("cpyext_tp_setattro_%s" % (typedef.name,)) def slot_tp_setattro(space, w_self, w_name, w_value): if w_value is not None: @@ -385,8 +385,7 @@ if getattr_fn is None: return - @cpython_api([PyObject, PyObject], PyObject, - external=True) + @cpython_api([PyObject, PyObject], PyObject) @func_renamer("cpyext_tp_getattro_%s" % (typedef.name,)) def slot_tp_getattro(space, w_self, w_name): return space.call_function(getattr_fn, w_self, w_name) diff --git a/pypy/module/cpyext/stringobject.py b/pypy/module/cpyext/stringobject.py --- a/pypy/module/cpyext/stringobject.py +++ b/pypy/module/cpyext/stringobject.py @@ -103,7 +103,7 @@ track_reference(space, py_obj, w_obj) return w_obj - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def string_dealloc(space, py_obj): """Frees allocated PyStringObject resources. """ diff --git a/pypy/module/cpyext/structmember.py b/pypy/module/cpyext/structmember.py --- a/pypy/module/cpyext/structmember.py +++ b/pypy/module/cpyext/structmember.py @@ -31,8 +31,10 @@ (T_PYSSIZET, rffi.SSIZE_T, PyLong_AsSsize_t), ]) +_HEADER = 'pypy_structmember_decl.h' - at cpython_api([PyObject, lltype.Ptr(PyMemberDef)], PyObject) + + at cpython_api([PyObject, lltype.Ptr(PyMemberDef)], PyObject, header=_HEADER) def PyMember_GetOne(space, obj, w_member): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset @@ -83,7 +85,8 @@ return w_result - at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, + error=-1, header=_HEADER) def PyMember_SetOne(space, obj, w_member, w_value): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset diff --git a/pypy/module/cpyext/test/test_translate.py b/pypy/module/cpyext/test/test_translate.py --- a/pypy/module/cpyext/test/test_translate.py +++ b/pypy/module/cpyext/test/test_translate.py @@ -19,7 +19,7 @@ @specialize.memo() def get_tp_function(space, typedef): - @cpython_api([], lltype.Signed, error=-1, external=False) + @cpython_api([], lltype.Signed, error=-1, header=None) def slot_tp_function(space): return typedef.value diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -183,7 +183,7 @@ if pto.c_tp_new: add_tp_new_wrapper(space, dict_w, pto) - at cpython_api([PyObject, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyObject, PyObject, PyObject], PyObject, header=None) def tp_new_wrapper(space, self, w_args, w_kwds): tp_new = rffi.cast(PyTypeObjectPtr, self).c_tp_new @@ -311,7 +311,7 @@ dealloc=type_dealloc) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def subtype_dealloc(space, obj): pto = obj.c_ob_type base = pto @@ -327,7 +327,7 @@ # hopefully this does not clash with the memory model assumed in # extension modules - at cpython_api([PyObject, Py_ssize_tP], lltype.Signed, external=False, + at cpython_api([PyObject, Py_ssize_tP], lltype.Signed, header=None, error=CANNOT_FAIL) def str_segcount(space, w_obj, ref): if ref: @@ -335,7 +335,7 @@ return 1 @cpython_api([PyObject, Py_ssize_t, rffi.VOIDPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def str_getreadbuffer(space, w_str, segment, ref): from pypy.module.cpyext.stringobject import PyString_AsString if segment != 0: @@ -348,7 +348,7 @@ return space.len_w(w_str) @cpython_api([PyObject, Py_ssize_t, rffi.CCHARPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def str_getcharbuffer(space, w_str, segment, ref): from pypy.module.cpyext.stringobject import PyString_AsString if segment != 0: @@ -361,7 +361,7 @@ return space.len_w(w_str) @cpython_api([PyObject, Py_ssize_t, rffi.VOIDPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def buf_getreadbuffer(space, pyref, segment, ref): from pypy.module.cpyext.bufferobject import PyBufferObject if segment != 0: @@ -393,7 +393,7 @@ buf_getreadbuffer.api_func.get_wrapper(space)) pto.c_tp_as_buffer = c_buf - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def type_dealloc(space, obj): from pypy.module.cpyext.object import PyObject_dealloc obj_pto = rffi.cast(PyTypeObjectPtr, obj) diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -75,7 +75,7 @@ track_reference(space, py_obj, w_obj) return w_obj - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def unicode_dealloc(space, py_obj): py_unicode = rffi.cast(PyUnicodeObject, py_obj) if py_unicode.c_buffer: From pypy.commits at gmail.com Tue Feb 9 16:33:32 2016 From: pypy.commits at gmail.com (devin.jeanpierre) Date: Tue, 09 Feb 2016 13:33:32 -0800 (PST) Subject: [pypy-commit] pypy seperate-strucmember_h: Unrefactor code that was really better as it was. Message-ID: <56ba5b2c.6bb8c20a.5f459.ffffe8ef@mx.google.com> Author: Devin Jeanpierre Branch: seperate-strucmember_h Changeset: r82134:66d60a554284 Date: 2016-01-31 02:30 -0800 http://bitbucket.org/pypy/pypy/changeset/66d60a554284/ Log: Unrefactor code that was really better as it was. I didn't realize I'd end up sharing, well, basically nothing. diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -960,47 +960,27 @@ pypy_macros_h = udir.join('pypy_macros.h') pypy_macros_h.write('\n'.join(pypy_macros)) -def _header_to_guard(header_name): - return '_PYPY_' + header_name.replace('.', '_').upper() - -def _decl_header_top(header_name): - guard = _header_to_guard(header_name) - header = [ - "#ifndef %s\n" % guard, - "#define %s\n" % guard, - "#ifndef PYPY_STANDALONE\n", - "#ifdef __cplusplus", - "extern \"C\" {", - "#endif\n", - '#define Signed long /* xxx temporary fix */\n', - '#define Unsigned unsigned long /* xxx temporary fix */\n' - ] - if header_name == 'pypy_decl.h': # XXX don't send for code review unless I'm sure this is necessary - for decl in FORWARD_DECLS: - header.append("%s;" % (decl,)) - return header - -def _decl_header_bottom(header_name): - return [ - '#undef Signed /* xxx temporary fix */\n', - '#undef Unsigned /* xxx temporary fix */\n', - "#ifdef __cplusplus", - "}", - "#endif", - "#endif /*PYPY_STANDALONE*/\n", - "#endif /*%s*/\n" % _header_to_guard(header_name), - ] - def generate_decls_and_callbacks(db, export_symbols, api_struct=True): "NOT_RPYTHON" # implement function callbacks and generate function decls functions = [] decls = {} + pypy_decls = decls['pypy_decl.h'] = [] + pypy_decls.append("#ifndef _PYPY_PYPY_DECL_H\n") + pypy_decls.append("#define _PYPY_PYPY_DECL_H\n") + pypy_decls.append("#ifndef PYPY_STANDALONE\n") + pypy_decls.append("#ifdef __cplusplus") + pypy_decls.append("extern \"C\" {") + pypy_decls.append("#endif\n") + pypy_decls.append('#define Signed long /* xxx temporary fix */\n') + pypy_decls.append('#define Unsigned unsigned long /* xxx temporary fix */\n') + + for decl in FORWARD_DECLS: + pypy_decls.append("%s;" % (decl,)) for header_name, header_functions in FUNCTIONS_BY_HEADER.iteritems(): if header_name not in decls: - decls[header_name] = header = [] - header.extend(_decl_header_top(header_name)) + header = decls[header_name] = [] else: header = decls[header_name] @@ -1015,7 +995,6 @@ else: body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) - pypy_decls = decls['pypy_decl.h'] for name in VA_TP_LIST: name_no_star = process_va_name(name) header = ('%s pypy_va_get_%s(va_list* vp)' % @@ -1031,9 +1010,15 @@ typ = 'PyObject*' pypy_decls.append('PyAPI_DATA(%s) %s;' % (typ, name)) + pypy_decls.append('#undef Signed /* xxx temporary fix */\n') + pypy_decls.append('#undef Unsigned /* xxx temporary fix */\n') + pypy_decls.append("#ifdef __cplusplus") + pypy_decls.append("}") + pypy_decls.append("#endif") + pypy_decls.append("#endif /*PYPY_STANDALONE*/\n") + pypy_decls.append("#endif /*_PYPY_PYPY_DECL_H*/\n") + for header_name, header_decls in decls.iteritems(): - header_decls.extend(_decl_header_bottom(header_name)) - decl_h = udir.join(header_name) decl_h.write('\n'.join(header_decls)) return functions diff --git a/pypy/module/cpyext/include/structmember.h b/pypy/module/cpyext/include/structmember.h --- a/pypy/module/cpyext/include/structmember.h +++ b/pypy/module/cpyext/include/structmember.h @@ -50,11 +50,18 @@ #define PY_WRITE_RESTRICTED 4 #define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) +#define Signed long /* xxx temporary fix */ +#define Unsigned unsigned long /* xxx temporary fix */ + +/* API functions. */ +#include "pypy_structmember_decl.h" + +#undef Signed /* xxx temporary fix */ +#undef Unsigned /* xxx temporary fix */ #ifdef __cplusplus } #endif #endif /* !Py_STRUCTMEMBER_H */ -/* API functions. */ -#include "pypy_structmember_decl.h" + From pypy.commits at gmail.com Tue Feb 9 17:24:11 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 09 Feb 2016 14:24:11 -0800 (PST) Subject: [pypy-commit] pypy default: add missing file Message-ID: <56ba670b.05e41c0a.992b1.ffff9167@mx.google.com> Author: Armin Rigo Branch: Changeset: r82135:675f24c75537 Date: 2016-02-09 23:23 +0100 http://bitbucket.org/pypy/pypy/changeset/675f24c75537/ Log: add missing file diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.1" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier From pypy.commits at gmail.com Tue Feb 9 17:33:50 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 09 Feb 2016 14:33:50 -0800 (PST) Subject: [pypy-commit] pypy default: import cffi 1.5.1 at rev. 5833cd037e9e Message-ID: <56ba694e.6217c20a.bbcb1.0ede@mx.google.com> Author: Armin Rigo Branch: Changeset: r82136:e83d573de6df Date: 2016-02-09 23:32 +0100 http://bitbucket.org/pypy/pypy/changeset/e83d573de6df/ Log: import cffi 1.5.1 at rev. 5833cd037e9e diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.0 +Version: 1.5.1 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.1" +__version_info__ = (1, 5, 1) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, types +import sys, sysconfig, types from .lock import allocate_lock try: @@ -544,28 +544,32 @@ def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # if '__pypy__' in sys.builtin_module_names: if hasattr(sys, 'prefix'): import os - libdir = os.path.join(sys.prefix, 'bin') - dirs = kwds.setdefault('library_dirs', []) - if libdir not in dirs: - dirs.append(libdir) + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) pythonlib = "pypy-c" else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags - libraries = kwds.setdefault('libraries', []) - if pythonlib not in libraries: - libraries.append(pythonlib) + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): @@ -631,7 +635,7 @@ compiled DLL. Use '*' to force distutils' choice, suitable for regular CPython C API modules. Use a file name ending in '.*' to ask for the system's default extension for dynamic libraries - (.so/.dll). + (.so/.dll/.dylib). The default is '*' when building a non-embedded C API extension, and (module_name + '.*') when building an embedded library. @@ -695,6 +699,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,14 +21,12 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def compile(tmpdir, ext, compiler_verbose=0): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose, - target_extension, embedding) + outputfilename = _build(tmpdir, ext, compiler_verbose) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -38,32 +36,7 @@ os.environ[key] = value return outputfilename -def _save_val(name): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - return config_vars.get(name, Ellipsis) - -def _restore_val(name, value): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - config_vars[name] = value - if value is Ellipsis: - del config_vars[name] - -def _win32_hack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): - MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ - MSVCCompiler._remove_visual_c_ref - MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file - -def _win32_unhack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - MSVCCompiler._remove_visual_c_ref = \ - MSVCCompiler._remove_visual_c_ref_CFFI_BAK - -def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def _build(tmpdir, ext, compiler_verbose=0): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -76,25 +49,14 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: - if sys.platform == 'win32' and embedding: - _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 - old_SO = _save_val('SO') - old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: - if target_extension is not None: - _restore_val('SO', target_extension) - _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') cmd_obj = dist.get_command_obj('build_ext') [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) - _restore_val('SO', old_SO) - _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) - if sys.platform == 'win32' and embedding: - _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and @@ -1357,6 +1359,58 @@ parts[-1] += extension return os.path.join(outputdir, *parts), parts + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, compiler_verbose=1, target=None, **kwds): @@ -1382,36 +1436,22 @@ target = '%s.*' % module_name else: target = '*' - if target == '*': - target_module_name = module_name - target_extension = None # use default - else: - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - else: - target += '.so' - # split along the first '.' (not the last one, otherwise the - # preceeding dots are interpreted as splitting package names) - index = target.find('.') - if index < 0: - raise ValueError("target argument %r should be a file name " - "containing a '.'" % (target,)) - target_module_name = target[:index] - target_extension = target[index:] # - ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: + patchlist = [] cwd = os.getcwd() try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose, - target_extension, - embedding=embedding) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) finally: os.chdir(cwd) + _unpatch_meths(patchlist) return outputfilename else: return ext, updated diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.0" +VERSION = "1.5.1" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py @@ -1847,3 +1847,8 @@ thread.start_new_thread(f, ()) time.sleep(1.5) assert seen == ['init!', 'init done'] + 6 * [7] + + def test_sizeof_struct_directly(self): + # only works with the Python FFI instances + ffi = FFI(backend=self.Backend()) + assert ffi.sizeof("struct{int a;}") == ffi.sizeof("int") diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_ffi_backend.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_ffi_backend.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_ffi_backend.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_ffi_backend.py @@ -420,3 +420,7 @@ ]: x = ffi.sizeof(name) assert 1 <= x <= 16 + + def test_ffi_def_extern(self): + ffi = FFI() + py.test.raises(ValueError, ffi.def_extern) diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_verify.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_verify.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_verify.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_verify.py @@ -92,8 +92,8 @@ assert lib.sin(1.23) == math.sin(1.23) def _Wconversion(cdef, source, **kargs): - if sys.platform == 'win32': - py.test.skip("needs GCC or Clang") + if sys.platform in ('win32', 'darwin'): + py.test.skip("needs GCC") ffi = FFI() ffi.cdef(cdef) py.test.raises(VerificationError, ffi.verify, source, **kargs) diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py @@ -1714,3 +1714,33 @@ # a case where 'onerror' is not callable py.test.raises(TypeError, ffi.def_extern(name='bar', onerror=42), lambda x: x) + +def test_extern_python_stdcall(): + ffi = FFI() + ffi.cdef(""" + extern "Python" int __stdcall foo(int); + extern "Python" int WINAPI bar(int); + int (__stdcall * mycb1)(int); + int indirect_call(int); + """) + lib = verify(ffi, 'test_extern_python_stdcall', """ + #ifndef _MSC_VER + # define __stdcall + #endif + static int (__stdcall * mycb1)(int); + static int indirect_call(int x) { + return mycb1(x); + } + """) + # + @ffi.def_extern() + def foo(x): + return x + 42 + @ffi.def_extern() + def bar(x): + return x + 43 + assert lib.foo(100) == 142 + assert lib.bar(100) == 143 + lib.mycb1 = lib.foo + assert lib.mycb1(200) == 242 + assert lib.indirect_call(300) == 342 diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_verify1.py b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_verify1.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_verify1.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_verify1.py @@ -72,8 +72,8 @@ assert lib.sin(1.23) == math.sin(1.23) def _Wconversion(cdef, source, **kargs): - if sys.platform == 'win32': - py.test.skip("needs GCC or Clang") + if sys.platform in ('win32', 'darwin'): + py.test.skip("needs GCC") ffi = FFI() ffi.cdef(cdef) py.test.raises(VerificationError, ffi.verify, source, **kargs) @@ -2092,20 +2092,20 @@ old = sys.getdlopenflags() try: ffi1 = FFI() - ffi1.cdef("int foo_verify_dlopen_flags;") + ffi1.cdef("int foo_verify_dlopen_flags_1;") sys.setdlopenflags(ffi1.RTLD_GLOBAL | ffi1.RTLD_NOW) - lib1 = ffi1.verify("int foo_verify_dlopen_flags;") + lib1 = ffi1.verify("int foo_verify_dlopen_flags_1;") finally: sys.setdlopenflags(old) ffi2 = FFI() ffi2.cdef("int *getptr(void);") lib2 = ffi2.verify(""" - extern int foo_verify_dlopen_flags; - static int *getptr(void) { return &foo_verify_dlopen_flags; } + extern int foo_verify_dlopen_flags_1; + static int *getptr(void) { return &foo_verify_dlopen_flags_1; } """) p = lib2.getptr() - assert ffi1.addressof(lib1, 'foo_verify_dlopen_flags') == p + assert ffi1.addressof(lib1, 'foo_verify_dlopen_flags_1') == p def test_consider_not_implemented_function_type(): ffi = FFI() diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py @@ -49,7 +49,8 @@ import setuptools except ImportError: py.test.skip("setuptools not found") - self.run(['setup.py', 'egg_info'], cwd=self.rootdir) + if os.path.exists(os.path.join(self.rootdir, 'setup.py')): + self.run(['setup.py', 'egg_info'], cwd=self.rootdir) TestDist._setuptools_ready = True def check_produced_files(self, content, curdir=None): @@ -58,7 +59,7 @@ found_so = None for name in os.listdir(curdir): if (name.endswith('.so') or name.endswith('.pyd') or - name.endswith('.dylib')): + name.endswith('.dylib') or name.endswith('.dll')): found_so = os.path.join(curdir, name) # foo.so => foo parts = name.split('.') @@ -220,23 +221,6 @@ x = ffi.compile(target="foo.bar.*") if sys.platform != 'win32': sofile = self.check_produced_files({ - 'foo.bar.SO': None, - 'mod_name_in_package': {'mymod.c': None, - 'mymod.o': None}}) - assert os.path.isabs(x) and os.path.samefile(x, sofile) - else: - self.check_produced_files({ - 'foo.bar.SO': None, - 'mod_name_in_package': {'mymod.c': None}, - 'Release': '?'}) - - @chdir_to_tmp - def test_api_compile_explicit_target_2(self): - ffi = cffi.FFI() - ffi.set_source("mod_name_in_package.mymod", "/*code would be here*/") - x = ffi.compile(target=os.path.join("mod_name_in_package", "foo.bar.*")) - if sys.platform != 'win32': - sofile = self.check_produced_files({ 'mod_name_in_package': {'foo.bar.SO': None, 'mymod.c': None, 'mymod.o': None}}) @@ -254,15 +238,16 @@ x = ffi.compile(target="foo.bar.baz") if sys.platform != 'win32': self.check_produced_files({ - 'foo.bar.baz': None, - 'mod_name_in_package': {'mymod.c': None, + 'mod_name_in_package': {'foo.bar.baz': None, + 'mymod.c': None, 'mymod.o': None}}) - sofile = os.path.join(str(self.udir), 'foo.bar.baz') + sofile = os.path.join(str(self.udir), + 'mod_name_in_package', 'foo.bar.baz') assert os.path.isabs(x) and os.path.samefile(x, sofile) else: self.check_produced_files({ - 'foo.bar.baz': None, - 'mod_name_in_package': {'mymod.c': None}, + 'mod_name_in_package': {'foo.bar.baz': None, + 'mymod.c': None}, 'Release': '?'}) @chdir_to_tmp diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py @@ -5,10 +5,6 @@ from pypy.module.test_lib_pypy.cffi_tests.udir import udir import cffi -if hasattr(sys, 'gettotalrefcount'): - py.test.skip("tried hard and failed to have these tests run " - "in a debug-mode python") - local_dir = os.path.dirname(os.path.abspath(__file__)) _link_error = '?' @@ -30,21 +26,34 @@ py.test.skip(str(_link_error)) +def prefix_pythonpath(): + cffi_base = os.path.dirname(os.path.dirname(local_dir)) + pythonpath = os.environ.get('PYTHONPATH', '').split(os.pathsep) + if cffi_base not in pythonpath: + pythonpath.insert(0, cffi_base) + return os.pathsep.join(pythonpath) + + class EmbeddingTests: _compiled_modules = {} def setup_method(self, meth): check_lib_python_found(str(udir.ensure('embedding', dir=1))) self._path = udir.join('embedding', meth.__name__) - if sys.platform == "win32": + if sys.platform == "win32" or sys.platform == "darwin": self._compiled_modules.clear() # workaround def get_path(self): return str(self._path.ensure(dir=1)) - def _run(self, args, env=None): - print(args) - popen = subprocess.Popen(args, env=env, cwd=self.get_path(), + def _run_base(self, args, env_extra={}, **kwds): + print('RUNNING:', args, env_extra, kwds) + env = os.environ.copy() + env.update(env_extra) + return subprocess.Popen(args, env=env, **kwds) + + def _run(self, args, env_extra={}): + popen = self._run_base(args, env_extra, cwd=self.get_path(), stdout=subprocess.PIPE, universal_newlines=True) output = popen.stdout.read() @@ -65,15 +74,16 @@ # find a solution to that: we could hack sys.path inside the # script run here, but we can't hack it in the same way in # execute(). - env = os.environ.copy() - env['PYTHONPATH'] = os.path.dirname(os.path.dirname(local_dir)) + env_extra = {'PYTHONPATH': prefix_pythonpath()} output = self._run([sys.executable, os.path.join(local_dir, filename)], - env=env) + env_extra=env_extra) match = re.compile(r"\bFILENAME: (.+)").search(output) assert match dynamic_lib_name = match.group(1) if sys.platform == 'win32': assert dynamic_lib_name.endswith('_cffi.dll') + elif sys.platform == 'darwin': + assert dynamic_lib_name.endswith('_cffi.dylib') else: assert dynamic_lib_name.endswith('_cffi.so') self._compiled_modules[name] = dynamic_lib_name @@ -98,6 +108,7 @@ assert m.endswith('.dll') libfiles.append('Release\\%s.lib' % m[:-4]) modules = libfiles + extra_preargs.append('/MANIFEST') elif threads: extra_preargs.append('-pthread') objects = c.compile([filename], macros=sorted(defines.items()), debug=True) @@ -107,21 +118,22 @@ def execute(self, name): path = self.get_path() - env = os.environ.copy() - env['PYTHONPATH'] = os.path.dirname(os.path.dirname(local_dir)) - libpath = env.get('LD_LIBRARY_PATH') + env_extra = {'PYTHONPATH': prefix_pythonpath()} + libpath = os.environ.get('LD_LIBRARY_PATH') if libpath: libpath = path + ':' + libpath else: libpath = path - env['LD_LIBRARY_PATH'] = libpath + env_extra['LD_LIBRARY_PATH'] = libpath print('running %r in %r' % (name, path)) executable_name = name if sys.platform == 'win32': executable_name = os.path.join(path, executable_name + '.exe') - popen = subprocess.Popen([executable_name], cwd=path, env=env, - stdout=subprocess.PIPE, - universal_newlines=True) + else: + executable_name = os.path.join('.', executable_name) + popen = self._run_base([executable_name], env_extra, cwd=path, + stdout=subprocess.PIPE, + universal_newlines=True) result = popen.stdout.read() err = popen.wait() if err: From pypy.commits at gmail.com Wed Feb 10 03:02:30 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 10 Feb 2016 00:02:30 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: catchup with default Message-ID: <56baee96.05e41c0a.992b1.108a@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82137:506ffad57f5e Date: 2016-02-10 09:01 +0100 http://bitbucket.org/pypy/pypy/changeset/506ffad57f5e/ Log: catchup with default diff too long, truncating to 2000 out of 4971 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.0 +Version: 1.5.1 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.1" +__version_info__ = (1, 5, 1) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.1" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, types +import sys, sysconfig, types from .lock import allocate_lock try: @@ -544,28 +544,32 @@ def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # if '__pypy__' in sys.builtin_module_names: if hasattr(sys, 'prefix'): import os - libdir = os.path.join(sys.prefix, 'bin') - dirs = kwds.setdefault('library_dirs', []) - if libdir not in dirs: - dirs.append(libdir) + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) pythonlib = "pypy-c" else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags - libraries = kwds.setdefault('libraries', []) - if pythonlib not in libraries: - libraries.append(pythonlib) + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): @@ -631,7 +635,7 @@ compiled DLL. Use '*' to force distutils' choice, suitable for regular CPython C API modules. Use a file name ending in '.*' to ask for the system's default extension for dynamic libraries - (.so/.dll). + (.so/.dll/.dylib). The default is '*' when building a non-embedded C API extension, and (module_name + '.*') when building an embedded library. @@ -695,6 +699,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,14 +21,12 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def compile(tmpdir, ext, compiler_verbose=0): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose, - target_extension, embedding) + outputfilename = _build(tmpdir, ext, compiler_verbose) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -38,32 +36,7 @@ os.environ[key] = value return outputfilename -def _save_val(name): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - return config_vars.get(name, Ellipsis) - -def _restore_val(name, value): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - config_vars[name] = value - if value is Ellipsis: - del config_vars[name] - -def _win32_hack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): - MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ - MSVCCompiler._remove_visual_c_ref - MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file - -def _win32_unhack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - MSVCCompiler._remove_visual_c_ref = \ - MSVCCompiler._remove_visual_c_ref_CFFI_BAK - -def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def _build(tmpdir, ext, compiler_verbose=0): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -76,25 +49,14 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: - if sys.platform == 'win32' and embedding: - _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 - old_SO = _save_val('SO') - old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: - if target_extension is not None: - _restore_val('SO', target_extension) - _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') cmd_obj = dist.get_command_obj('build_ext') [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) - _restore_val('SO', old_SO) - _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) - if sys.platform == 'win32' and embedding: - _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and @@ -1357,6 +1359,58 @@ parts[-1] += extension return os.path.join(outputdir, *parts), parts + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, compiler_verbose=1, target=None, **kwds): @@ -1382,36 +1436,22 @@ target = '%s.*' % module_name else: target = '*' - if target == '*': - target_module_name = module_name - target_extension = None # use default - else: - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - else: - target += '.so' - # split along the first '.' (not the last one, otherwise the - # preceeding dots are interpreted as splitting package names) - index = target.find('.') - if index < 0: - raise ValueError("target argument %r should be a file name " - "containing a '.'" % (target,)) - target_module_name = target[:index] - target_extension = target[index:] # - ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: + patchlist = [] cwd = os.getcwd() try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose, - target_extension, - embedding=embedding) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) finally: os.chdir(cwd) + _unpatch_meths(patchlist) return outputfilename else: return ext, updated diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -140,6 +140,10 @@ .. branch: cpyext-bootstrap +.. branch: vmprof-newstack + +Refactor vmprof to work cross-operating-system. + .. branch: memop-simplify3 Further simplifying the backend operations malloc_cond_varsize and zero_array. diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.0" +VERSION = "1.5.1" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -84,11 +84,68 @@ return rffi.cast(rffi.INT, res) # ____________________________________________________________ +if os.name == 'nt': + do_startup = r''' +#include +#define WIN32_LEAN_AND_MEAN +#include +RPY_EXPORTED void rpython_startup_code(void); +RPY_EXPORTED int pypy_setup_home(char *, int); +static unsigned char _cffi_ready = 0; +static const char *volatile _cffi_module_name; -eci = ExternalCompilationInfo(separate_module_sources=[ -r""" -/* XXX Windows missing */ +static void _cffi_init_error(const char *msg, const char *extra) +{ + fprintf(stderr, + "\nPyPy initialization failure when loading module '%s':\n%s%s\n", + _cffi_module_name, msg, extra); +} + +BOOL CALLBACK _cffi_init(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *lpContex) +{ + + HMODULE hModule; + TCHAR home[_MAX_PATH]; + rpython_startup_code(); + RPyGilAllocate(); + + GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + (LPCTSTR)&_cffi_init, &hModule); + if (hModule == 0 ) { + /* TODO turn the int into a string with FormatMessage */ + + _cffi_init_error("dladdr() failed: ", ""); + return TRUE; + } + GetModuleFileName(hModule, home, _MAX_PATH); + if (pypy_setup_home(home, 1) != 0) { + _cffi_init_error("pypy_setup_home() failed", ""); + return TRUE; + } + _cffi_ready = 1; + fprintf(stderr, "startup succeeded, home %s\n", home); + return TRUE; +} + +RPY_EXPORTED +int pypy_carefully_make_gil(const char *name) +{ + /* For CFFI: this initializes the GIL and loads the home path. + It can be called completely concurrently from unrelated threads. + It assumes that we don't hold the GIL before (if it exists), and we + don't hold it afterwards. + */ + static INIT_ONCE s_init_once; + + _cffi_module_name = name; /* not really thread-safe, but better than + nothing */ + InitOnceExecuteOnce(&s_init_once, _cffi_init, NULL, NULL); + return (int)_cffi_ready - 1; +}''' +else: + do_startup = r""" #include #include #include @@ -141,6 +198,7 @@ pthread_once(&once_control, _cffi_init); return (int)_cffi_ready - 1; } -"""]) +""" +eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -102,7 +102,7 @@ fd = space.appexec([w_socket, space.wrap(orig_fd.fileno()), space.wrap(socket.AF_INET), space.wrap(socket.SOCK_STREAM), space.wrap(0)], - """(_socket, fd, family, type, proto): + """(_socket, fd, family, type, proto): return _socket.fromfd(fd, family, type, proto)""") assert space.unwrap(space.call_method(fd, 'fileno')) @@ -326,7 +326,7 @@ def test_ntoa_exception(self): import _socket - raises(_socket.error, _socket.inet_ntoa, "ab") + raises(_socket.error, _socket.inet_ntoa, b"ab") def test_aton_exceptions(self): import _socket @@ -418,7 +418,7 @@ # it if there is no connection. try: s.connect(("www.python.org", 80)) - except _socket.gaierror, ex: + except _socket.gaierror as ex: skip("GAIError - probably no connection: %s" % str(ex.args)) name = s.getpeername() # Will raise socket.error if not connected assert name[1] == 80 @@ -465,7 +465,7 @@ sizes = {socket.htonl: 32, socket.ntohl: 32, socket.htons: 16, socket.ntohs: 16} for func, size in sizes.items(): - mask = (1L<f_lineno = 48; /* Does not work with CPython */ @@ -51,6 +52,7 @@ Py_XDECREF(empty_string); Py_XDECREF(empty_tuple); Py_XDECREF(py_globals); + Py_XDECREF(py_locals); From pypy.commits at gmail.com Wed Feb 10 05:06:25 2016 From: pypy.commits at gmail.com (HawkOwl) Date: Wed, 10 Feb 2016 02:06:25 -0800 (PST) Subject: [pypy-commit] pypy default: Make the default filesystem encoding ASCII Message-ID: <56bb0ba1.034cc20a.9ac36.ffffbb4d@mx.google.com> Author: Amber Brown Branch: Changeset: r82138:3bd88741ae4e Date: 2016-02-10 12:29 +0800 http://bitbucket.org/pypy/pypy/changeset/3bd88741ae4e/ Log: Make the default filesystem encoding ASCII diff --git a/pypy/module/sys/interp_encoding.py b/pypy/module/sys/interp_encoding.py --- a/pypy/module/sys/interp_encoding.py +++ b/pypy/module/sys/interp_encoding.py @@ -34,7 +34,11 @@ elif sys.platform == "darwin": base_encoding = "utf-8" else: - base_encoding = None + # In CPython, the default base encoding is NULL. This is paired with a + # comment that says "If non-NULL, this is different than the default + # encoding for strings". Therefore, the default filesystem encoding is the + # default encoding for strings, which is ASCII. + base_encoding = "ascii" def _getfilesystemencoding(space): encoding = base_encoding From pypy.commits at gmail.com Wed Feb 10 05:19:13 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 10 Feb 2016 02:19:13 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: ups, f1, f3, f5, f7 are volatiles, but where not added to the list of volatiles Message-ID: <56bb0ea1.cb371c0a.68152.fffff1b4@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82139:67c6fa68e2b9 Date: 2016-02-10 11:18 +0100 http://bitbucket.org/pypy/pypy/changeset/67c6fa68e2b9/ Log: ups, f1,f3,f5,f7 are volatiles, but where not added to the list of volatiles diff --git a/rpython/jit/backend/zarch/registers.py b/rpython/jit/backend/zarch/registers.py --- a/rpython/jit/backend/zarch/registers.py +++ b/rpython/jit/backend/zarch/registers.py @@ -27,7 +27,7 @@ FP_SCRATCH = f15 MANAGED_FP_REGS = fpregisters[:-1] -VOLATILES_FLOAT = [f0,f2,f4,f6] +VOLATILES_FLOAT = [f0,f1,f2,f3,f4,f5,f6,f7] # The JITFRAME_FIXED_SIZE is measured in words, and should be the # number of registers that need to be saved into the jitframe when From pypy.commits at gmail.com Wed Feb 10 06:44:43 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 10 Feb 2016 03:44:43 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: saving f8 through f15 before entering the jit and restoring it before exiting it. (ABI demands this) Message-ID: <56bb22ab.44e21c0a.a4518.19e5@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82140:6cf6a1b5353a Date: 2016-02-10 12:43 +0100 http://bitbucket.org/pypy/pypy/changeset/6cf6a1b5353a/ Log: saving f8 through f15 before entering the jit and restoring it before exiting it. (ABI demands this) diff --git a/rpython/jit/backend/zarch/arch.py b/rpython/jit/backend/zarch/arch.py --- a/rpython/jit/backend/zarch/arch.py +++ b/rpython/jit/backend/zarch/arch.py @@ -83,3 +83,9 @@ JUMPABS_TARGET_ADDR__POOL_OFFSET = 0 JUMPABS_POOL_ADDR_POOL_OFFSET = 8 + +# r8 through r15 are saved registers (= non volatile) +# thus when entering the jit, we do not know if those +# are overwritten in the jit. save them using some extra +# stack space! +JIT_ENTER_EXTRA_STACK_SPACE = 8*8 diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -17,7 +17,7 @@ STD_FRAME_SIZE_IN_BYTES, THREADLOCAL_ADDR_OFFSET, RECOVERY_GCMAP_POOL_OFFSET, RECOVERY_TARGET_POOL_OFFSET, JUMPABS_TARGET_ADDR__POOL_OFFSET, JUMPABS_POOL_ADDR_POOL_OFFSET, - THREADLOCAL_ON_ENTER_JIT) + THREADLOCAL_ON_ENTER_JIT, JIT_ENTER_EXTRA_STACK_SPACE) from rpython.jit.backend.zarch.opassembler import OpAssembler from rpython.jit.backend.zarch.regalloc import Regalloc from rpython.jit.codewriter.effectinfo import EffectInfo @@ -50,6 +50,7 @@ self.gcrootmap_retaddr_forced = 0 self.failure_recovery_code = [0, 0, 0, 0] self.wb_slowpath = [0,0,0,0,0] + self.pool = None def setup(self, looptoken): BaseAssembler.setup(self, looptoken) @@ -57,7 +58,8 @@ if we_are_translated(): self.debug = False self.current_clt = looptoken.compiled_loop_token - self.mc = InstrBuilder() + self.pool = LiteralPool() + self.mc = InstrBuilder(self.pool) self.pending_guard_tokens = [] self.pending_guard_tokens_recovered = 0 #assert self.datablockwrapper is None --- but obscure case @@ -68,7 +70,6 @@ self.mc.datablockwrapper = self.datablockwrapper self.target_tokens_currently_compiling = {} self.frame_depth_to_patch = [] - self.pool = LiteralPool() def teardown(self): self.pending_guard_tokens = None @@ -91,7 +92,7 @@ self.mc.BCR_rr(0xf, register.value) def _build_failure_recovery(self, exc, withfloats=False): - mc = InstrBuilder() + mc = InstrBuilder(self.pool) self.mc = mc # fill in the jf_descr and jf_gcmap fields of the frame according # to which failure we are resuming from. These are set before @@ -202,6 +203,7 @@ mc.LAY(r.SP, l.addr(-extra_stack_size, r.SP)) mc.STMG(r.r10, r.r12, l.addr(off, r.SP)) mc.STG(r.r2, l.addr(off+3*WORD, r.SP)) + # OK to use STD, because offset is not negative mc.STD(r.f0, l.addr(off+4*WORD, r.SP)) saved_regs = None saved_fp_regs = None @@ -1008,14 +1010,22 @@ def _call_header(self): # Build a new stackframe of size STD_FRAME_SIZE_IN_BYTES - self.mc.STMG(r.r6, r.r15, l.addr(6*WORD, r.SP)) + fpoff = JIT_ENTER_EXTRA_STACK_SPACE + self.mc.STMG(r.r6, r.r15, l.addr(-fpoff+6*WORD, r.SP)) self.mc.LARL(r.POOL, l.halfword(self.pool.pool_start - self.mc.get_relative_pos())) + # f8 through f15 are saved registers (= non volatile) + # TODO it would be good to detect if any float is used in the loop + # and to skip this push/pop whenever no float operation occurs + for i,reg in enumerate(range(8,16)): + off = -fpoff + STD_FRAME_SIZE_IN_BYTES + assert off > 0 + self.mc.STD_rx(reg, l.addr(off + i*8, r.SP)) # save r3, the second argument, to the thread local position self.mc.STG(r.r3, l.addr(THREADLOCAL_ON_ENTER_JIT, r.SP)) - # push a standard frame for any call - self.mc.push_std_frame() + # push a standard frame for any within the jit trace + self.mc.push_std_frame(fpoff) # move the first argument to SPP: the jitframe object self.mc.LGR(r.SPP, r.r2) @@ -1060,8 +1070,13 @@ if gcrootmap and gcrootmap.is_shadow_stack: self._call_footer_shadowstack(gcrootmap) + size = STD_FRAME_SIZE_IN_BYTES + # f8 through f15 are saved registers (= non volatile) + # TODO it would be good to detect if any float is used in the loop + # and to skip this push/pop whenever no float operation occurs + for i,reg in enumerate(range(8,16)): + self.mc.LD_rx(reg, l.addr(size + size + i*8, r.SP)) # restore registers r6-r15 - size = STD_FRAME_SIZE_IN_BYTES self.mc.LMG(r.r6, r.r15, l.addr(size+6*WORD, r.SP)) self.jmpto(r.r14) diff --git a/rpython/jit/backend/zarch/codebuilder.py b/rpython/jit/backend/zarch/codebuilder.py --- a/rpython/jit/backend/zarch/codebuilder.py +++ b/rpython/jit/backend/zarch/codebuilder.py @@ -69,9 +69,10 @@ RAW_CALL_REG = r.r14 - def __init__(self): + def __init__(self, pool=None): AbstractZARCHBuilder.__init__(self) self.init_block_builder() + self.pool = pool # # ResOperation --> offset in the assembly. # ops_offset[None] represents the beginning of the code after the last op @@ -173,6 +174,9 @@ elif -2**31 <= word <= 2**31-1: self.LGFI(dest_reg, l.imm(word)) else: + if self.pool and self.pool.contains_constant(word): + self.LG(dest_reg, l.pool(self.pool.get_direct_offset(word))) + return # this is not put into the constant pool, because it # is an immediate value that cannot easily be forseen self.IILF(dest_reg, l.imm(word & 0xFFFFffff)) diff --git a/rpython/jit/backend/zarch/pool.py b/rpython/jit/backend/zarch/pool.py --- a/rpython/jit/backend/zarch/pool.py +++ b/rpython/jit/backend/zarch/pool.py @@ -95,6 +95,9 @@ if arg.is_constant(): self.reserve_literal(8, arg) + def contains_constant(self, unique_val): + return unique_val in self.offset_map + def get_descr_offset(self, descr): return self.offset_descr[descr] @@ -105,6 +108,11 @@ assert self.offset_map[uvalue] >= 0 return self.offset_map[uvalue] + def get_direct_offset(self, unique_val): + """ Get the offset directly using a unique value, + use get_offset if you have a Const box """ + return self.offset_map[unique_val] + def unique_value(self, val): if val.type == FLOAT: if val.getfloat() == 0.0: @@ -170,6 +178,8 @@ self.pool_start = asm.mc.get_relative_pos() for op in operations: self.ensure_can_hold_constants(asm, op) + self.ensure_value(asm.cpu.pos_exc_value()) + # TODO add more values that are loaded with load_imm if self.size == 0: # no pool needed! return diff --git a/rpython/jit/backend/zarch/test/test_calling_convention.py b/rpython/jit/backend/zarch/test/test_calling_convention.py --- a/rpython/jit/backend/zarch/test/test_calling_convention.py +++ b/rpython/jit/backend/zarch/test/test_calling_convention.py @@ -5,7 +5,7 @@ import rpython.jit.backend.zarch.conditions as c -class TestPPCCallingConvention(CallingConvTests): +class TestZARCHCallingConvention(CallingConvTests): # ../../test/calling_convention_test.py def make_function_returning_stack_pointer(self): From pypy.commits at gmail.com Wed Feb 10 07:03:11 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 10 Feb 2016 04:03:11 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: putting thread local to the right position (after moving the whole frame down) Message-ID: <56bb26ff.48dcc20a.ffa8a.ffffed8d@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82141:5bae6a642fe5 Date: 2016-02-10 13:02 +0100 http://bitbucket.org/pypy/pypy/changeset/5bae6a642fe5/ Log: putting thread local to the right position (after moving the whole frame down) diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -1022,7 +1022,7 @@ self.mc.STD_rx(reg, l.addr(off + i*8, r.SP)) # save r3, the second argument, to the thread local position - self.mc.STG(r.r3, l.addr(THREADLOCAL_ON_ENTER_JIT, r.SP)) + self.mc.STG(r.r3, l.addr(-fpoff+THREADLOCAL_ON_ENTER_JIT, r.SP)) # push a standard frame for any within the jit trace self.mc.push_std_frame(fpoff) From pypy.commits at gmail.com Wed Feb 10 08:00:10 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 10 Feb 2016 05:00:10 -0800 (PST) Subject: [pypy-commit] buildbot default: merged default Message-ID: <56bb345a.0c2d1c0a.85e0e.ffff8c62@mx.google.com> Author: Richard Plangger Branch: Changeset: r986:928b30c7fc85 Date: 2016-02-10 13:59 +0100 http://bitbucket.org/pypy/buildbot/changeset/928b30c7fc85/ Log: merged default diff --git a/bot2/pypybuildbot/builds.py b/bot2/pypybuildbot/builds.py --- a/bot2/pypybuildbot/builds.py +++ b/bot2/pypybuildbot/builds.py @@ -809,14 +809,18 @@ '--upload-executable', 'pypy-c' + postfix, '--upload-project', 'PyPy', '--revision', WithProperties('%(got_revision)s'), - '--branch', WithProperties('%(branch)s'), + # HACK: branches are not uploaded any more, so that + # codespeed will display it, even if not "default" + #'--branch', WithProperties('%(branch)s'), '--upload-urls', 'http://speed.pypy.org/', '--upload-baseline', '--upload-baseline-executable', 'pypy-c-jit' + postfix, '--upload-baseline-project', 'PyPy', '--upload-baseline-revision', WithProperties('%(got_revision)s'), - '--upload-baseline-branch', WithProperties('%(branch)s'), + # HACK: branches are not uploaded any more, so that + # codespeed will display it, even if not "default" + #'--upload-baseline-branch', WithProperties('%(branch)s'), '--upload-baseline-urls', 'http://speed.pypy.org/', ], workdir='./benchmarks', diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -273,7 +273,6 @@ # benchmarks # linux tests LINUX32, # on tannit32, uses all cores - LINUX64, # on speed-old, uses all cores JITLINUX32, # on tannit32, uses 1 core JITLINUX64, # on speed-old, uses 1 core #APPLVLLINUX32, # on tannit32, uses 1 core @@ -297,6 +296,7 @@ ], branch='s390x-backend', hour=2, minute=0), Nightly("nightly-1-00", [ + LINUX64, # on speed-old, uses all cores JITBENCH, # on tannit32, uses 1 core (in part exclusively) JITBENCH64, # on tannit64, uses 1 core (in part exclusively) JITBENCH64_NEW, # on speed64, uses 1 core (in part exclusively) From pypy.commits at gmail.com Wed Feb 10 08:00:08 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 10 Feb 2016 05:00:08 -0800 (PST) Subject: [pypy-commit] buildbot default: s390x app level and jit suite use python for building Message-ID: <56bb3458.53ad1c0a.ac3ba.ffffd224@mx.google.com> Author: Richard Plangger Branch: Changeset: r985:d4cc08767a0e Date: 2016-02-10 13:58 +0100 http://bitbucket.org/pypy/buildbot/changeset/d4cc08767a0e/ Log: s390x app level and jit suite use python for building diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -66,7 +66,8 @@ platform='linux64') pypyTranslatedAppLevelTestFactoryS390X = pypybuilds.Translated(lib_python=True, app_tests=True, - platform='s390x') + platform='s390x', + interpreter='python') # these are like the two above: the only difference is that they only run # lib-python tests,not -A tests @@ -156,6 +157,7 @@ lib_python=True, pypyjit=True, app_tests=True, + interpreter='python', # use cpython for now ) pypyJITBenchmarkFactory_tannit = pypybuilds.JITBenchmark(host='tannit') From pypy.commits at gmail.com Wed Feb 10 08:51:48 2016 From: pypy.commits at gmail.com (mjacob) Date: Wed, 10 Feb 2016 05:51:48 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Skip this test, which fails on CPython 3.3 as well (but not on later versions). Message-ID: <56bb4074.a151c20a.e0759.1ac6@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82142:52c94a3c2dc9 Date: 2016-02-10 14:02 +0100 http://bitbucket.org/pypy/pypy/changeset/52c94a3c2dc9/ Log: Skip this test, which fails on CPython 3.3 as well (but not on later versions). diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -692,6 +692,7 @@ del sys.modules['itertools'] def test_invalid_pathname(self): + skip("This test fails on CPython 3.3, but passes on CPython 3.4+") import imp import pkg import os From pypy.commits at gmail.com Wed Feb 10 08:51:52 2016 From: pypy.commits at gmail.com (mjacob) Date: Wed, 10 Feb 2016 05:51:52 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Remove test which calls imp._run_compiled_module(). This function was removed. Message-ID: <56bb4078.0ab81c0a.beb05.487e@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82144:7cd3ceac33eb Date: 2016-02-10 14:28 +0100 http://bitbucket.org/pypy/pypy/changeset/7cd3ceac33eb/ Log: Remove test which calls imp._run_compiled_module(). This function was removed. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -1208,20 +1208,6 @@ sys.meta_path.pop() -class AppTestPyPyExtension(object): - spaceconfig = dict(usemodules=['imp', 'zipimport', '__pypy__']) - - def setup_class(cls): - cls.w_udir = cls.space.wrap(str(udir)) - - def test_run_compiled_module(self): - # XXX minimal test only - import imp, types - module = types.ModuleType('foobar') - raises(IOError, imp._run_compiled_module, - 'foobar', 'this_file_does_not_exist', None, module) - - class AppTestNoPycFile(object): spaceconfig = { "objspace.usepycfiles": False, From pypy.commits at gmail.com Wed Feb 10 08:51:53 2016 From: pypy.commits at gmail.com (mjacob) Date: Wed, 10 Feb 2016 05:51:53 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Copy _r_long/_w_long helper functions from default's importing.py to this test file. Although these functions are not needed anymore in importing.py, some tests call them. Message-ID: <56bb4079.c615c20a.84ffe.1845@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82145:42d83e6431d8 Date: 2016-02-10 14:50 +0100 http://bitbucket.org/pypy/pypy/changeset/42d83e6431d8/ Log: Copy _r_long/_w_long helper functions from default's importing.py to this test file. Although these functions are not needed anymore in importing.py, some tests call them. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -17,6 +17,30 @@ from pypy import conftest + +def _read_n(stream, n): + buf = '' + while len(buf) < n: + data = stream.read(n - len(buf)) + if not data: + raise streamio.StreamError("end of file") + buf += data + return buf + +def _r_long(stream): + s = _read_n(stream, 4) + return importing._get_long(s) + +def _w_long(stream, x): + a = x & 0xff + x >>= 8 + b = x & 0xff + x >>= 8 + c = x & 0xff + x >>= 8 + d = x & 0xff + stream.write(chr(a) + chr(b) + chr(c) + chr(d)) + def setuppkg(pkgname, **entries): p = udir.join('impsubdir') if pkgname: @@ -838,8 +862,8 @@ stream = streamio.open_file_as_stream(cpathname, "rb") try: w_mod = space.wrap(Module(space, w_modulename)) - magic = importing._r_long(stream) - timestamp = importing._r_long(stream) + magic = _r_long(stream) + timestamp = _r_long(stream) w_ret = importing.load_compiled_module(space, w_modulename, w_mod, @@ -863,8 +887,8 @@ stream = streamio.open_file_as_stream(cpathname, "rb") try: w_mod = space.wrap(Module(space, w_modulename)) - magic = importing._r_long(stream) - timestamp = importing._r_long(stream) + magic = _r_long(stream) + timestamp = _r_long(stream) w_ret = importing.load_compiled_module(space, w_modulename, w_mod, @@ -899,18 +923,18 @@ pathname = str(udir.join('test.dat')) stream = streamio.open_file_as_stream(pathname, "wb") try: - importing._w_long(stream, 42) - importing._w_long(stream, 12312) - importing._w_long(stream, 128397198) + _w_long(stream, 42) + _w_long(stream, 12312) + _w_long(stream, 128397198) finally: stream.close() stream = streamio.open_file_as_stream(pathname, "rb") try: - res = importing._r_long(stream) + res = _r_long(stream) assert res == 42 - res = importing._r_long(stream) + res = _r_long(stream) assert res == 12312 - res = importing._r_long(stream) + res = _r_long(stream) assert res == 128397198 finally: stream.close() @@ -937,8 +961,8 @@ stream = streamio.open_file_as_stream(cpathname, "rb") try: w_mod = space2.wrap(Module(space2, w_modulename)) - magic = importing._r_long(stream) - timestamp = importing._r_long(stream) + magic = _r_long(stream) + timestamp = _r_long(stream) space2.raises_w(space2.w_ImportError, importing.load_compiled_module, space2, From pypy.commits at gmail.com Wed Feb 10 08:51:50 2016 From: pypy.commits at gmail.com (mjacob) Date: Wed, 10 Feb 2016 05:51:50 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Remove tests which call importing.load_source_module(). This function was removed. Message-ID: <56bb4076.8205c20a.fec93.19e5@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82143:7ce8c80e23e7 Date: 2016-02-10 14:18 +0100 http://bitbucket.org/pypy/pypy/changeset/7ce8c80e23e7/ Log: Remove tests which call importing.load_source_module(). This function was removed. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -915,106 +915,6 @@ finally: stream.close() - def test_load_source_module(self): - space = self.space - w_modulename = space.wrap('somemodule') - w_mod = space.wrap(Module(space, w_modulename)) - pathname = _testfilesource() - stream = streamio.open_file_as_stream(pathname, "r") - try: - w_ret = importing.load_source_module( - space, w_modulename, w_mod, - pathname, stream.readall(), - stream.try_to_find_file_descriptor()) - finally: - stream.close() - assert w_mod is w_ret - w_ret = space.getattr(w_mod, space.wrap('x')) - ret = space.int_w(w_ret) - assert ret == 42 - - cpathname = udir.join('test.pyc') - assert cpathname.check() - cpathname.remove() - - def test_load_source_module_nowrite(self): - space = self.space - w_modulename = space.wrap('somemodule') - w_mod = space.wrap(Module(space, w_modulename)) - pathname = _testfilesource() - stream = streamio.open_file_as_stream(pathname, "r") - try: - w_ret = importing.load_source_module( - space, w_modulename, w_mod, - pathname, stream.readall(), - stream.try_to_find_file_descriptor(), - write_pyc=False) - finally: - stream.close() - cpathname = udir.join('test.pyc') - assert not cpathname.check() - - def test_load_source_module_dont_write_bytecode(self): - space = self.space - w_modulename = space.wrap('somemodule') - w_mod = space.wrap(Module(space, w_modulename)) - pathname = _testfilesource() - stream = streamio.open_file_as_stream(pathname, "r") - try: - space.setattr(space.sys, space.wrap('dont_write_bytecode'), - space.w_True) - w_ret = importing.load_source_module( - space, w_modulename, w_mod, - pathname, stream.readall(), - stream.try_to_find_file_descriptor()) - finally: - space.setattr(space.sys, space.wrap('dont_write_bytecode'), - space.w_False) - stream.close() - cpathname = udir.join('test.pyc') - assert not cpathname.check() - - def test_load_source_module_syntaxerror(self): - # No .pyc file on SyntaxError - space = self.space - w_modulename = space.wrap('somemodule') - w_mod = space.wrap(Module(space, w_modulename)) - pathname = _testfilesource(source="") - stream = streamio.open_file_as_stream(pathname, "r") - try: - w_ret = importing.load_source_module( - space, w_modulename, w_mod, - pathname, stream.readall(), - stream.try_to_find_file_descriptor()) - except OperationError: - # OperationError("Syntax Error") - pass - stream.close() - - cpathname = udir.join('test.pyc') - assert not cpathname.check() - - def test_load_source_module_importerror(self): - # the .pyc file is created before executing the module - space = self.space - w_modulename = space.wrap('somemodule') - w_mod = space.wrap(Module(space, w_modulename)) - pathname = _testfilesource(source="a = unknown_name") - stream = streamio.open_file_as_stream(pathname, "r") - try: - w_ret = importing.load_source_module( - space, w_modulename, w_mod, - pathname, stream.readall(), - stream.try_to_find_file_descriptor()) - except OperationError: - # OperationError("NameError", "global name 'unknown_name' is not defined") - pass - stream.close() - - # And the .pyc has been generated - cpathname = udir.join(importing.make_compiled_pathname('test.py')) - assert cpathname.check() - def test_pyc_magic_changes(self): py.test.skip("For now, PyPy generates only one kind of .pyc files") # test that the pyc files produced by a space are not reimportable From pypy.commits at gmail.com Wed Feb 10 09:10:32 2016 From: pypy.commits at gmail.com (arigo) Date: Wed, 10 Feb 2016 06:10:32 -0800 (PST) Subject: [pypy-commit] buildbot default: check in the changes applied locally on baroquesoftware.com Message-ID: <56bb44d8.0775c20a.329eb.25ca@mx.google.com> Author: Armin Rigo Branch: Changeset: r987:f460098a5737 Date: 2016-02-10 15:08 +0100 http://bitbucket.org/pypy/buildbot/changeset/f460098a5737/ Log: check in the changes applied locally on baroquesoftware.com diff --git a/README b/README --- a/README +++ b/README @@ -50,6 +50,10 @@ $ buildbot start +OR + +$ ./restart_buildmaster_when_not_running + To run a buildslave =================== Please refer to README_BUILDSLAVE diff --git a/bbhook/irc.py b/bbhook/irc.py --- a/bbhook/irc.py +++ b/bbhook/irc.py @@ -44,11 +44,13 @@ print message + '\n' else: from .main import app - return subprocess.call([ + args = [ app.config['BOT'], app.config['CHANNEL'], message, - ]) + ] + print args + return subprocess.call(args) def get_short_id(owner, repo, branch): """ diff --git a/bot2/pypybuildbot/master.py b/bot2/pypybuildbot/master.py --- a/bot2/pypybuildbot/master.py +++ b/bot2/pypybuildbot/master.py @@ -484,7 +484,7 @@ "category": 'mac32' }, {"name" : JITMACOSX64, - "slavenames": ["rebuy-de", "xerxes", "tosh", "osx-10.9-x64-dw"], + "slavenames": ["rebuy-de", "tosh", "osx-10.9-x64-dw"], # "xerxes" 'builddir' : JITMACOSX64, 'factory' : pypyJITTranslatedTestFactoryOSX64, 'category' : 'mac64', diff --git a/bot2/pypybuildbot/util.py b/bot2/pypybuildbot/util.py --- a/bot2/pypybuildbot/util.py +++ b/bot2/pypybuildbot/util.py @@ -2,7 +2,7 @@ import socket def we_are_debugging(): - return socket.gethostname() != 'cobra' + return socket.gethostname() != 'baroquesoftware' def load(name): mod = __import__(name, {}, {}, ['__all__']) From pypy.commits at gmail.com Wed Feb 10 09:40:18 2016 From: pypy.commits at gmail.com (arigo) Date: Wed, 10 Feb 2016 06:40:18 -0800 (PST) Subject: [pypy-commit] cffi default: Mark these three source files as deprecated Message-ID: <56bb4bd2.c711c30a.e9bd9.318d@mx.google.com> Author: Armin Rigo Branch: Changeset: r2622:1e856d51d384 Date: 2016-02-10 15:39 +0100 http://bitbucket.org/cffi/cffi/changeset/1e856d51d384/ Log: Mark these three source files as deprecated diff --git a/cffi/vengine_cpy.py b/cffi/vengine_cpy.py --- a/cffi/vengine_cpy.py +++ b/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/cffi/vengine_gen.py b/cffi/vengine_gen.py --- a/cffi/vengine_gen.py +++ b/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/cffi/verifier.py b/cffi/verifier.py --- a/cffi/verifier.py +++ b/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform From pypy.commits at gmail.com Wed Feb 10 11:59:59 2016 From: pypy.commits at gmail.com (jbs) Date: Wed, 10 Feb 2016 08:59:59 -0800 (PST) Subject: [pypy-commit] pypy reorder-map-attributes: (cfbolz, jbs): fixed code duplications, add elidable, skip some tests Message-ID: <56bb6c8f.418f1c0a.a7559.ffff8d93@mx.google.com> Author: Jasper.Schulz Branch: reorder-map-attributes Changeset: r82147:fa7727d7db37 Date: 2016-02-10 15:47 +0000 http://bitbucket.org/pypy/pypy/changeset/fa7727d7db37/ Log: (cfbolz, jbs): fixed code duplications, add elidable, skip some tests diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -153,51 +153,51 @@ cache[name, index] = attr return attr + @jit.elidable + def _get_cache_attr(self, name, index): + key = name, index + # this method is not actually elidable, but it's fine anyway + if self.cache_attrs is not None: + return self.cache_attrs.get(key, None) + return None + @jit.look_inside_iff(lambda self, obj, name, index, w_value: jit.isconstant(self) and jit.isconstant(name) and jit.isconstant(index)) def add_attr(self, obj, name, index, w_value): reordered = self._try_reorder_and_add(obj, name, index, w_value) - if reordered != NOT_REORDERED: - return - self._add_attr_without_reordering(obj, name, index, w_value) - - def _add_attr_without_reordering(self, obj, name, index, w_value): - attr = self._get_new_attr(name, index) - oldattr = obj._get_mapdict_map() + if reordered == NOT_REORDERED: + self._add_attr_without_reordering(obj, name, index, w_value) if not jit.we_are_jitted(): + oldattr = self + attr = obj._get_mapdict_map() size_est = (oldattr._size_estimate + attr.size_estimate() - oldattr.size_estimate()) assert size_est >= (oldattr.length() * NUM_DIGITS_POW2) oldattr._size_estimate = size_est - if attr.length() > obj._mapdict_storage_length(): - # note that attr.size_estimate() is always at least attr.length() - new_storage = [None] * attr.size_estimate() + + def _add_attr_without_reordering(self, obj, name, index, w_value): + attr = self._get_new_attr(name, index) + attr._switch_map_and_write_storage(obj, w_value) + + def _switch_map_and_write_storage(self, obj, w_value): + if self.length() > obj._mapdict_storage_length(): + # note that self.size_estimate() is always at least self.length() + new_storage = [None] * self.size_estimate() for i in range(obj._mapdict_storage_length()): new_storage[i] = obj._mapdict_read_storage(i) - obj._set_mapdict_storage_and_map(new_storage, attr) + obj._set_mapdict_storage_and_map(new_storage, self) # the order is important here: first change the map, then the storage, # for the benefit of the special subclasses - obj._set_mapdict_map(attr) - obj._mapdict_write_storage(attr.storageindex, w_value) + obj._set_mapdict_map(self) + obj._mapdict_write_storage(self.storageindex, w_value) def _try_reorder_and_add(self, obj, name, index, w_value): - key = name, index - if self.cache_attrs is not None and key in self.cache_attrs: - attr = self.cache_attrs[key] - # xxx: remove duplicated code - - if attr.length() > obj._mapdict_storage_length(): - # note that attr.size_estimate() is always at least attr.length() - new_storage = [None] * attr.size_estimate() - for i in range(obj._mapdict_storage_length()): - new_storage[i] = obj._mapdict_read_storage(i) - obj._set_mapdict_storage_and_map(new_storage, attr) - - obj._set_mapdict_map(attr) - obj._mapdict_write_storage(attr.storageindex, w_value) + attr = self._get_cache_attr(name, index) + if attr is not None: + attr._switch_map_and_write_storage(obj, w_value) return JUST_REORDERED elif isinstance(self, PlainAttribute): diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py --- a/pypy/objspace/std/test/test_mapdict.py +++ b/pypy/objspace/std/test/test_mapdict.py @@ -440,9 +440,15 @@ class TestMapDictImplementation(BaseTestRDictImplementation): StrategyClass = MapDictStrategy get_impl = get_impl + def test_setdefault_fast(self): + # mapdict can't pass this, which is fine + pass class TestDevolvedMapDictImplementation(BaseTestDevolvedDictImplementation): get_impl = get_impl StrategyClass = MapDictStrategy + def test_setdefault_fast(self): + # mapdict can't pass this, which is fine + pass # ___________________________________________________________ # tests that check the obj interface after the dict has devolved @@ -1213,3 +1219,7 @@ class TestMapDictImplementationUsingnewdict(BaseTestRDictImplementation): StrategyClass = MapDictStrategy # NB: the get_impl method is not overwritten here, as opposed to above + + def test_setdefault_fast(self): + # mapdict can't pass this, which is fine + pass \ No newline at end of file From pypy.commits at gmail.com Wed Feb 10 11:59:57 2016 From: pypy.commits at gmail.com (jbs) Date: Wed, 10 Feb 2016 08:59:57 -0800 (PST) Subject: [pypy-commit] pypy reorder-map-attributes: (cfbolz, jbs): Make sure that we end up with the same map if attributes are inserted in different orders Message-ID: <56bb6c8d.0cb81c0a.edae6.ffff9387@mx.google.com> Author: Jasper.Schulz Branch: reorder-map-attributes Changeset: r82146:a9ed2fa16365 Date: 2016-02-10 15:28 +0000 http://bitbucket.org/pypy/pypy/changeset/a9ed2fa16365/ Log: (cfbolz, jbs): Make sure that we end up with the same map if attributes are inserted in different orders diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -20,6 +20,8 @@ # note: we use "x * NUM_DIGITS_POW2" instead of "x << NUM_DIGITS" because # we want to propagate knowledge that the result cannot be negative +NOT_REORDERED, JUST_REORDERED, SOMEWHERE_REORDERED = range(3) + class AbstractAttribute(object): _immutable_fields_ = ['terminator'] cache_attrs = None @@ -156,7 +158,12 @@ jit.isconstant(name) and jit.isconstant(index)) def add_attr(self, obj, name, index, w_value): - # grumble, jit needs this + reordered = self._try_reorder_and_add(obj, name, index, w_value) + if reordered != NOT_REORDERED: + return + self._add_attr_without_reordering(obj, name, index, w_value) + + def _add_attr_without_reordering(self, obj, name, index, w_value): attr = self._get_new_attr(name, index) oldattr = obj._get_mapdict_map() if not jit.we_are_jitted(): @@ -176,6 +183,39 @@ obj._set_mapdict_map(attr) obj._mapdict_write_storage(attr.storageindex, w_value) + def _try_reorder_and_add(self, obj, name, index, w_value): + key = name, index + if self.cache_attrs is not None and key in self.cache_attrs: + attr = self.cache_attrs[key] + # xxx: remove duplicated code + + if attr.length() > obj._mapdict_storage_length(): + # note that attr.size_estimate() is always at least attr.length() + new_storage = [None] * attr.size_estimate() + for i in range(obj._mapdict_storage_length()): + new_storage[i] = obj._mapdict_read_storage(i) + obj._set_mapdict_storage_and_map(new_storage, attr) + + obj._set_mapdict_map(attr) + obj._mapdict_write_storage(attr.storageindex, w_value) + return JUST_REORDERED + + elif isinstance(self, PlainAttribute): + w_self_value = obj._mapdict_read_storage(self.storageindex) + reordered = self.back._try_reorder_and_add(obj, name, index, w_value) + if reordered == JUST_REORDERED: + obj._get_mapdict_map()._add_attr_without_reordering( + obj, self.name, self.index, w_self_value) + elif reordered == SOMEWHERE_REORDERED: + obj._get_mapdict_map().add_attr(obj, self.name, self.index, w_self_value) + else: + assert reordered == NOT_REORDERED + return NOT_REORDERED + return SOMEWHERE_REORDERED + else: + # we are terminator + return NOT_REORDERED + def materialize_r_dict(self, space, obj, dict_w): raise NotImplementedError("abstract base class") diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py --- a/pypy/objspace/std/test/test_mapdict.py +++ b/pypy/objspace/std/test/test_mapdict.py @@ -107,6 +107,87 @@ assert obj2.getdictvalue(space, "b") == 60 assert obj2.map is obj.map +def test_insert_different_orders(): + cls = Class() + obj = cls.instantiate() + obj.setdictvalue(space, "a", 10) + obj.setdictvalue(space, "b", 20) + + obj2 = cls.instantiate() + obj2.setdictvalue(space, "b", 30) + obj2.setdictvalue(space, "a", 40) + + assert obj.map is obj2.map + +def test_insert_different_orders_2(): + cls = Class() + obj = cls.instantiate() + obj2 = cls.instantiate() + + obj.setdictvalue(space, "a", 10) + + obj2.setdictvalue(space, "b", 20) + obj2.setdictvalue(space, "a", 30) + + obj.setdictvalue(space, "b", 40) + assert obj.map is obj2.map + +def test_insert_different_orders_3(): + cls = Class() + obj = cls.instantiate() + obj2 = cls.instantiate() + obj3 = cls.instantiate() + obj4 = cls.instantiate() + obj5 = cls.instantiate() + obj6 = cls.instantiate() + + obj.setdictvalue(space, "a", 10) + obj.setdictvalue(space, "b", 20) + obj.setdictvalue(space, "c", 30) + + obj2.setdictvalue(space, "a", 30) + obj2.setdictvalue(space, "c", 40) + obj2.setdictvalue(space, "b", 50) + + obj3.setdictvalue(space, "c", 30) + obj3.setdictvalue(space, "a", 40) + obj3.setdictvalue(space, "b", 50) + + obj4.setdictvalue(space, "c", 30) + obj4.setdictvalue(space, "b", 40) + obj4.setdictvalue(space, "a", 50) + + obj5.setdictvalue(space, "b", 30) + obj5.setdictvalue(space, "a", 40) + obj5.setdictvalue(space, "c", 50) + + obj6.setdictvalue(space, "b", 30) + obj6.setdictvalue(space, "c", 40) + obj6.setdictvalue(space, "a", 50) + + assert obj.map is obj2.map + assert obj.map is obj3.map + assert obj.map is obj4.map + assert obj.map is obj5.map + assert obj.map is obj6.map + + +def test_insert_different_orders_perm(): + from itertools import permutations + cls = Class() + seen_maps = {} + for i, attributes in enumerate(permutations("abcdef")): + obj = cls.instantiate() + key = "" + for j, attr in enumerate(attributes): + obj.setdictvalue(space, attr, i*10+j) + key = "".join(sorted(key+attr)) + if key in seen_maps: + assert obj.map is seen_maps[key] + else: + seen_maps[key] = obj.map + print len(seen_maps) + def test_attr_immutability(monkeypatch): cls = Class() obj = cls.instantiate() From pypy.commits at gmail.com Wed Feb 10 12:07:56 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 10 Feb 2016 09:07:56 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: translation issue Message-ID: <56bb6e6c.077bc20a.f4074.622c@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82148:8ca0c94c91ac Date: 2016-02-10 13:21 +0100 http://bitbucket.org/pypy/pypy/changeset/8ca0c94c91ac/ Log: translation issue diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -1016,10 +1016,11 @@ # f8 through f15 are saved registers (= non volatile) # TODO it would be good to detect if any float is used in the loop # and to skip this push/pop whenever no float operation occurs - for i,reg in enumerate(range(8,16)): + for i,reg in enumerate([r.f8, r.f9, r.f10, r.f11, + r.f12, r.f13, r.f14, r.f15]): off = -fpoff + STD_FRAME_SIZE_IN_BYTES assert off > 0 - self.mc.STD_rx(reg, l.addr(off + i*8, r.SP)) + self.mc.STD(reg, l.addr(off + i*8, r.SP)) # save r3, the second argument, to the thread local position self.mc.STG(r.r3, l.addr(-fpoff+THREADLOCAL_ON_ENTER_JIT, r.SP)) @@ -1074,8 +1075,9 @@ # f8 through f15 are saved registers (= non volatile) # TODO it would be good to detect if any float is used in the loop # and to skip this push/pop whenever no float operation occurs - for i,reg in enumerate(range(8,16)): - self.mc.LD_rx(reg, l.addr(size + size + i*8, r.SP)) + for i,reg in enumerate([r.f8, r.f9, r.f10, r.f11, + r.f12, r.f13, r.f14, r.f15]): + self.mc.LD(reg, l.addr(size + size + i*8, r.SP)) # restore registers r6-r15 self.mc.LMG(r.r6, r.r15, l.addr(size+6*WORD, r.SP)) self.jmpto(r.r14) From pypy.commits at gmail.com Wed Feb 10 12:07:57 2016 From: pypy.commits at gmail.com (plan_rich) Date: Wed, 10 Feb 2016 09:07:57 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: _rawffi bigendian issue in callbacks, callback writes narrow integer directly to MSB of 64 bit value on s390x (wrong when value is passed along) Message-ID: <56bb6e6d.e7bec20a.39b65.68d7@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82149:74ebd8669f96 Date: 2016-02-10 18:07 +0100 http://bitbucket.org/pypy/pypy/changeset/74ebd8669f96/ Log: _rawffi bigendian issue in callbacks, callback writes narrow integer directly to MSB of 64 bit value on s390x (wrong when value is passed along) diff --git a/pypy/module/_rawffi/callback.py b/pypy/module/_rawffi/callback.py --- a/pypy/module/_rawffi/callback.py +++ b/pypy/module/_rawffi/callback.py @@ -1,17 +1,23 @@ - +import sys from pypy.interpreter.gateway import interp2app, unwrap_spec from pypy.interpreter.typedef import TypeDef, GetSetProperty from rpython.rtyper.lltypesystem import lltype, rffi from pypy.module._rawffi.interp_rawffi import write_ptr from pypy.module._rawffi.structure import W_Structure from pypy.module._rawffi.interp_rawffi import (W_DataInstance, letter2tp, - unwrap_value, unpack_argshapes, got_libffi_error) + unwrap_value, unpack_argshapes, got_libffi_error, is_narrow_integer_type, + LL_TYPEMAP, NARROW_INTEGER_TYPES) from rpython.rlib.clibffi import USERDATA_P, CallbackFuncPtr, FUNCFLAG_CDECL from rpython.rlib.clibffi import ffi_type_void, LibFFIError from rpython.rlib import rweakref from pypy.module._rawffi.tracker import tracker from pypy.interpreter.error import OperationError from pypy.interpreter import gateway +from rpython.rlib.unroll import unrolling_iterable + +BIGENDIAN = sys.byteorder == 'big' + +unroll_narrow_integer_types = unrolling_iterable(NARROW_INTEGER_TYPES) app = gateway.applevel(''' def tbprint(tb, err): @@ -42,8 +48,17 @@ args_w[i] = space.wrap(rffi.cast(rffi.ULONG, ll_args[i])) w_res = space.call(w_callable, space.newtuple(args_w)) if callback_ptr.result is not None: # don't return void - unwrap_value(space, write_ptr, ll_res, 0, - callback_ptr.result, w_res) + ptr = ll_res + letter = callback_ptr.result + if BIGENDIAN: + # take care of narrow integers! + for int_type in unroll_narrow_integer_types: + if int_type == letter: + T = LL_TYPEMAP[int_type] + n = rffi.sizeof(lltype.Signed) - rffi.sizeof(T) + ptr = rffi.ptradd(ptr, n) + break + unwrap_value(space, write_ptr, ptr, 0, letter, w_res) except OperationError, e: tbprint(space, space.wrap(e.get_traceback()), space.wrap(e.errorstr(space))) diff --git a/pypy/module/_rawffi/interp_rawffi.py b/pypy/module/_rawffi/interp_rawffi.py --- a/pypy/module/_rawffi/interp_rawffi.py +++ b/pypy/module/_rawffi/interp_rawffi.py @@ -440,6 +440,10 @@ space.wrap("cannot directly read value")) wrap_value._annspecialcase_ = 'specialize:arg(1)' +NARROW_INTEGER_TYPES = 'cbhiBIH?' + +def is_narrow_integer_type(letter): + return letter in NARROW_INTEGER_TYPES class W_FuncPtr(W_Root): def __init__(self, space, ptr, argshapes, resshape): @@ -448,7 +452,7 @@ self.resshape = resshape self.narrow_integer = False if resshape is not None: - self.narrow_integer = resshape.itemcode.lower() in ('c','h','i') + self.narrow_integer = is_narrow_integer_type(resshape.itemcode.lower()) def getbuffer(self, space): return space.wrap(rffi.cast(lltype.Unsigned, self.ptr.funcsym)) @@ -512,7 +516,6 @@ # we get a 8 byte value in big endian n = rffi.sizeof(lltype.Signed) - result.shape.size result.buffer_advance(n) - return space.wrap(result) else: self.ptr.call(args_ll, lltype.nullptr(rffi.VOIDP.TO)) From pypy.commits at gmail.com Wed Feb 10 15:04:05 2016 From: pypy.commits at gmail.com (mattip) Date: Wed, 10 Feb 2016 12:04:05 -0800 (PST) Subject: [pypy-commit] pypy seperate-strucmember_h: fix test Message-ID: <56bb97b5.8e301c0a.b4751.74e0@mx.google.com> Author: mattip Branch: seperate-strucmember_h Changeset: r82150:8b1eeaf21f9e Date: 2016-02-10 21:52 +0200 http://bitbucket.org/pypy/pypy/changeset/8b1eeaf21f9e/ Log: fix test diff --git a/pypy/module/cpyext/test/test_intobject.py b/pypy/module/cpyext/test/test_intobject.py --- a/pypy/module/cpyext/test/test_intobject.py +++ b/pypy/module/cpyext/test/test_intobject.py @@ -99,6 +99,7 @@ """), ], prologue=""" + #include "structmember.h" typedef struct { PyObject_HEAD From pypy.commits at gmail.com Wed Feb 10 15:04:07 2016 From: pypy.commits at gmail.com (mattip) Date: Wed, 10 Feb 2016 12:04:07 -0800 (PST) Subject: [pypy-commit] pypy seperate-strucmember_h: minimize differences to cpython's version, remove unneeded #define s Message-ID: <56bb97b7.a3f6c20a.6ba4d.ffffab00@mx.google.com> Author: mattip Branch: seperate-strucmember_h Changeset: r82151:3556cafcadd9 Date: 2016-02-10 21:58 +0200 http://bitbucket.org/pypy/pypy/changeset/3556cafcadd9/ Log: minimize differences to cpython's version, remove unneeded #define s diff --git a/pypy/module/cpyext/include/structmember.h b/pypy/module/cpyext/include/structmember.h --- a/pypy/module/cpyext/include/structmember.h +++ b/pypy/module/cpyext/include/structmember.h @@ -4,64 +4,85 @@ extern "C" { #endif + +/* Interface to map C struct members to Python object attributes */ + #include /* For offsetof */ + +/* The offsetof() macro calculates the offset of a structure member + in its structure. Unfortunately this cannot be written down + portably, hence it is provided by a Standard C header file. + For pre-Standard C compilers, here is a version that usually works + (but watch out!): */ + #ifndef offsetof #define offsetof(type, member) ( (int) & ((type*)0) -> member ) #endif +/* An array of memberlist structures defines the name, type and offset + of selected members of a C structure. These can be read by + PyMember_Get() and set by PyMember_Set() (except if their READONLY flag + is set). The array must be terminated with an entry whose name + pointer is NULL. */ + + typedef struct PyMemberDef { - /* Current version, use this */ - char *name; - int type; - Py_ssize_t offset; - int flags; - char *doc; + /* Current version, use this */ + char *name; + int type; + Py_ssize_t offset; + int flags; + char *doc; } PyMemberDef; +/* Types */ +#define T_SHORT 0 +#define T_INT 1 +#define T_LONG 2 +#define T_FLOAT 3 +#define T_DOUBLE 4 +#define T_STRING 5 +#define T_OBJECT 6 +/* XXX the ordering here is weird for binary compatibility */ +#define T_CHAR 7 /* 1-character string */ +#define T_BYTE 8 /* 8-bit signed int */ +/* unsigned variants: */ +#define T_UBYTE 9 +#define T_USHORT 10 +#define T_UINT 11 +#define T_ULONG 12 -/* Types. These constants are also in structmemberdefs.py. */ -#define T_SHORT 0 -#define T_INT 1 -#define T_LONG 2 -#define T_FLOAT 3 -#define T_DOUBLE 4 -#define T_STRING 5 -#define T_OBJECT 6 -#define T_CHAR 7 /* 1-character string */ -#define T_BYTE 8 /* 8-bit signed int */ -#define T_UBYTE 9 -#define T_USHORT 10 -#define T_UINT 11 -#define T_ULONG 12 -#define T_STRING_INPLACE 13 /* Strings contained in the structure */ -#define T_BOOL 14 -#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError - when the value is NULL, instead of - converting to None. */ -#define T_LONGLONG 17 -#define T_ULONGLONG 18 -#define T_PYSSIZET 19 +/* Added by Jack: strings contained in the structure */ +#define T_STRING_INPLACE 13 + +/* Added by Lillo: bools contained in the structure (assumed char) */ +#define T_BOOL 14 + +#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError + when the value is NULL, instead of + converting to None. */ +#ifdef HAVE_LONG_LONG +#define T_LONGLONG 17 +#define T_ULONGLONG 18 +#endif /* HAVE_LONG_LONG */ + +#define T_PYSSIZET 19 /* Py_ssize_t */ /* Flags. These constants are also in structmemberdefs.py. */ -#define READONLY 1 -#define RO READONLY /* Shorthand */ +#define READONLY 1 +#define RO READONLY /* Shorthand */ #define READ_RESTRICTED 2 #define PY_WRITE_RESTRICTED 4 -#define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) +#define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) -#define Signed long /* xxx temporary fix */ -#define Unsigned unsigned long /* xxx temporary fix */ /* API functions. */ #include "pypy_structmember_decl.h" -#undef Signed /* xxx temporary fix */ -#undef Unsigned /* xxx temporary fix */ #ifdef __cplusplus } #endif #endif /* !Py_STRUCTMEMBER_H */ - From pypy.commits at gmail.com Wed Feb 10 17:09:43 2016 From: pypy.commits at gmail.com (mattip) Date: Wed, 10 Feb 2016 14:09:43 -0800 (PST) Subject: [pypy-commit] pypy seperate-strucmember_h: close branch to be merged Message-ID: <56bbb527.0357c20a.82efa.ffffcd2c@mx.google.com> Author: mattip Branch: seperate-strucmember_h Changeset: r82152:951beb8c1607 Date: 2016-02-11 00:04 +0200 http://bitbucket.org/pypy/pypy/changeset/951beb8c1607/ Log: close branch to be merged From pypy.commits at gmail.com Wed Feb 10 17:09:45 2016 From: pypy.commits at gmail.com (mattip) Date: Wed, 10 Feb 2016 14:09:45 -0800 (PST) Subject: [pypy-commit] pypy default: merge seperate-strucmember_h which moves structmember.h out of Python.h Message-ID: <56bbb529.a3abc20a.90efe.ffffc8bc@mx.google.com> Author: mattip Branch: Changeset: r82153:c9a273a1a16d Date: 2016-02-11 00:05 +0200 http://bitbucket.org/pypy/pypy/changeset/c9a273a1a16d/ Log: merge seperate-strucmember_h which moves structmember.h out of Python.h diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -59,7 +59,7 @@ class CConfig: _compilation_info_ = ExternalCompilationInfo( include_dirs=include_dirs, - includes=['Python.h', 'stdarg.h'], + includes=['Python.h', 'stdarg.h', 'structmember.h'], compile_extra=['-DPy_BUILD_CORE'], ) @@ -129,6 +129,7 @@ for name in constant_names: setattr(CConfig_constants, name, rffi_platform.ConstantInteger(name)) udir.join('pypy_decl.h').write("/* Will be filled later */\n") +udir.join('pypy_structmember_decl.h').write("/* Will be filled later */\n") udir.join('pypy_macros.h').write("/* Will be filled later */\n") globals().update(rffi_platform.configure(CConfig_constants)) @@ -147,7 +148,7 @@ # XXX: 20 lines of code to recursively copy a directory, really?? assert dstdir.check(dir=True) headers = include_dir.listdir('*.h') + include_dir.listdir('*.inl') - for name in ("pypy_decl.h", "pypy_macros.h"): + for name in ("pypy_decl.h", "pypy_macros.h", "pypy_structmember_decl.h"): headers.append(udir.join(name)) _copy_header_files(headers, dstdir) @@ -232,7 +233,7 @@ wrapper.c_name = cpyext_namespace.uniquename(self.c_name) return wrapper -def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, external=True, +def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header='pypy_decl.h', gil=None): """ Declares a function to be exported. @@ -241,8 +242,8 @@ special value 'CANNOT_FAIL' (also when restype is Void) turns an eventual exception into a wrapped SystemError. Unwrapped exceptions also cause a SytemError. - - set `external` to False to get a C function pointer, but not exported by - the API headers. + - `header` is the header file to export the function in, Set to None to get + a C function pointer, but not exported by the API headers. - set `gil` to "acquire", "release" or "around" to acquire the GIL, release the GIL, or both """ @@ -263,7 +264,7 @@ def decorate(func): func_name = func.func_name - if external: + if header is not None: c_name = None else: c_name = func_name @@ -271,7 +272,7 @@ c_name=c_name, gil=gil) func.api_func = api_function - if external: + if header is not None: assert func_name not in FUNCTIONS, ( "%s already registered" % func_name) @@ -363,8 +364,9 @@ unwrapper_catch = make_unwrapper(True) unwrapper_raise = make_unwrapper(False) - if external: + if header is not None: FUNCTIONS[func_name] = api_function + FUNCTIONS_BY_HEADER.setdefault(header, {})[func_name] = api_function INTERPLEVEL_API[func_name] = unwrapper_catch # used in tests return unwrapper_raise # used in 'normal' RPython code. return decorate @@ -383,6 +385,7 @@ INTERPLEVEL_API = {} FUNCTIONS = {} +FUNCTIONS_BY_HEADER = {} # These are C symbols which cpyext will export, but which are defined in .c # files somewhere in the implementation of cpyext (rather than being defined in @@ -811,6 +814,7 @@ global_code = '\n'.join(global_objects) prologue = ("#include \n" + "#include \n" "#include \n") code = (prologue + struct_declaration_code + @@ -960,7 +964,8 @@ "NOT_RPYTHON" # implement function callbacks and generate function decls functions = [] - pypy_decls = [] + decls = {} + pypy_decls = decls['pypy_decl.h'] = [] pypy_decls.append("#ifndef _PYPY_PYPY_DECL_H\n") pypy_decls.append("#define _PYPY_PYPY_DECL_H\n") pypy_decls.append("#ifndef PYPY_STANDALONE\n") @@ -973,17 +978,23 @@ for decl in FORWARD_DECLS: pypy_decls.append("%s;" % (decl,)) - for name, func in sorted(FUNCTIONS.iteritems()): - restype, args = c_function_signature(db, func) - pypy_decls.append("PyAPI_FUNC(%s) %s(%s);" % (restype, name, args)) - if api_struct: - callargs = ', '.join('arg%d' % (i,) - for i in range(len(func.argtypes))) - if func.restype is lltype.Void: - body = "{ _pypyAPI.%s(%s); }" % (name, callargs) - else: - body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) - functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) + for header_name, header_functions in FUNCTIONS_BY_HEADER.iteritems(): + if header_name not in decls: + header = decls[header_name] = [] + else: + header = decls[header_name] + + for name, func in sorted(header_functions.iteritems()): + restype, args = c_function_signature(db, func) + header.append("PyAPI_FUNC(%s) %s(%s);" % (restype, name, args)) + if api_struct: + callargs = ', '.join('arg%d' % (i,) + for i in range(len(func.argtypes))) + if func.restype is lltype.Void: + body = "{ _pypyAPI.%s(%s); }" % (name, callargs) + else: + body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) + functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) for name in VA_TP_LIST: name_no_star = process_va_name(name) header = ('%s pypy_va_get_%s(va_list* vp)' % @@ -1007,8 +1018,9 @@ pypy_decls.append("#endif /*PYPY_STANDALONE*/\n") pypy_decls.append("#endif /*_PYPY_PYPY_DECL_H*/\n") - pypy_decl_h = udir.join('pypy_decl.h') - pypy_decl_h.write('\n'.join(pypy_decls)) + for header_name, header_decls in decls.iteritems(): + decl_h = udir.join(header_name) + decl_h.write('\n'.join(header_decls)) return functions separate_module_files = [source_dir / "varargwrapper.c", diff --git a/pypy/module/cpyext/bufferobject.py b/pypy/module/cpyext/bufferobject.py --- a/pypy/module/cpyext/bufferobject.py +++ b/pypy/module/cpyext/bufferobject.py @@ -73,7 +73,7 @@ "Don't know how to realize a buffer")) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def buffer_dealloc(space, py_obj): py_buf = rffi.cast(PyBufferObject, py_obj) if py_buf.c_b_base: diff --git a/pypy/module/cpyext/frameobject.py b/pypy/module/cpyext/frameobject.py --- a/pypy/module/cpyext/frameobject.py +++ b/pypy/module/cpyext/frameobject.py @@ -39,7 +39,7 @@ py_frame.c_f_locals = make_ref(space, frame.get_w_locals()) rffi.setintfield(py_frame, 'c_f_lineno', frame.getorcreatedebug().f_lineno) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def frame_dealloc(space, py_obj): py_frame = rffi.cast(PyFrameObject, py_obj) py_code = rffi.cast(PyObject, py_frame.c_f_code) diff --git a/pypy/module/cpyext/funcobject.py b/pypy/module/cpyext/funcobject.py --- a/pypy/module/cpyext/funcobject.py +++ b/pypy/module/cpyext/funcobject.py @@ -56,7 +56,7 @@ assert isinstance(w_obj, Function) py_func.c_func_name = make_ref(space, space.wrap(w_obj.name)) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def function_dealloc(space, py_obj): py_func = rffi.cast(PyFunctionObject, py_obj) Py_DecRef(space, py_func.c_func_name) @@ -75,7 +75,7 @@ rffi.setintfield(py_code, 'c_co_flags', co_flags) rffi.setintfield(py_code, 'c_co_argcount', w_obj.co_argcount) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def code_dealloc(space, py_obj): py_code = rffi.cast(PyCodeObject, py_obj) Py_DecRef(space, py_code.c_co_name) diff --git a/pypy/module/cpyext/include/Python.h b/pypy/module/cpyext/include/Python.h --- a/pypy/module/cpyext/include/Python.h +++ b/pypy/module/cpyext/include/Python.h @@ -132,9 +132,6 @@ /* Missing definitions */ #include "missing.h" -// XXX This shouldn't be included here -#include "structmember.h" - #include /* Define macros for inline documentation. */ diff --git a/pypy/module/cpyext/include/structmember.h b/pypy/module/cpyext/include/structmember.h --- a/pypy/module/cpyext/include/structmember.h +++ b/pypy/module/cpyext/include/structmember.h @@ -4,54 +4,85 @@ extern "C" { #endif + +/* Interface to map C struct members to Python object attributes */ + #include /* For offsetof */ + +/* The offsetof() macro calculates the offset of a structure member + in its structure. Unfortunately this cannot be written down + portably, hence it is provided by a Standard C header file. + For pre-Standard C compilers, here is a version that usually works + (but watch out!): */ + #ifndef offsetof #define offsetof(type, member) ( (int) & ((type*)0) -> member ) #endif +/* An array of memberlist structures defines the name, type and offset + of selected members of a C structure. These can be read by + PyMember_Get() and set by PyMember_Set() (except if their READONLY flag + is set). The array must be terminated with an entry whose name + pointer is NULL. */ + + typedef struct PyMemberDef { - /* Current version, use this */ - char *name; - int type; - Py_ssize_t offset; - int flags; - char *doc; + /* Current version, use this */ + char *name; + int type; + Py_ssize_t offset; + int flags; + char *doc; } PyMemberDef; +/* Types */ +#define T_SHORT 0 +#define T_INT 1 +#define T_LONG 2 +#define T_FLOAT 3 +#define T_DOUBLE 4 +#define T_STRING 5 +#define T_OBJECT 6 +/* XXX the ordering here is weird for binary compatibility */ +#define T_CHAR 7 /* 1-character string */ +#define T_BYTE 8 /* 8-bit signed int */ +/* unsigned variants: */ +#define T_UBYTE 9 +#define T_USHORT 10 +#define T_UINT 11 +#define T_ULONG 12 -/* Types. These constants are also in structmemberdefs.py. */ -#define T_SHORT 0 -#define T_INT 1 -#define T_LONG 2 -#define T_FLOAT 3 -#define T_DOUBLE 4 -#define T_STRING 5 -#define T_OBJECT 6 -#define T_CHAR 7 /* 1-character string */ -#define T_BYTE 8 /* 8-bit signed int */ -#define T_UBYTE 9 -#define T_USHORT 10 -#define T_UINT 11 -#define T_ULONG 12 -#define T_STRING_INPLACE 13 /* Strings contained in the structure */ -#define T_BOOL 14 -#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError - when the value is NULL, instead of - converting to None. */ -#define T_LONGLONG 17 -#define T_ULONGLONG 18 -#define T_PYSSIZET 19 +/* Added by Jack: strings contained in the structure */ +#define T_STRING_INPLACE 13 + +/* Added by Lillo: bools contained in the structure (assumed char) */ +#define T_BOOL 14 + +#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError + when the value is NULL, instead of + converting to None. */ +#ifdef HAVE_LONG_LONG +#define T_LONGLONG 17 +#define T_ULONGLONG 18 +#endif /* HAVE_LONG_LONG */ + +#define T_PYSSIZET 19 /* Py_ssize_t */ /* Flags. These constants are also in structmemberdefs.py. */ -#define READONLY 1 -#define RO READONLY /* Shorthand */ +#define READONLY 1 +#define RO READONLY /* Shorthand */ #define READ_RESTRICTED 2 #define PY_WRITE_RESTRICTED 4 -#define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) +#define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) + + +/* API functions. */ +#include "pypy_structmember_decl.h" #ifdef __cplusplus } #endif #endif /* !Py_STRUCTMEMBER_H */ + diff --git a/pypy/module/cpyext/methodobject.py b/pypy/module/cpyext/methodobject.py --- a/pypy/module/cpyext/methodobject.py +++ b/pypy/module/cpyext/methodobject.py @@ -50,7 +50,7 @@ py_func.c_m_self = make_ref(space, w_obj.w_self) py_func.c_m_module = make_ref(space, w_obj.w_module) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def cfunction_dealloc(space, py_obj): py_func = rffi.cast(PyCFunctionObject, py_obj) Py_DecRef(space, py_func.c_m_self) diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -70,7 +70,7 @@ alloc : allocate and basic initialization of a raw PyObject attach : Function called to tie a raw structure to a pypy object realize : Function called to create a pypy object from a raw struct - dealloc : a cpython_api(external=False), similar to PyObject_dealloc + dealloc : a cpython_api(header=None), similar to PyObject_dealloc """ tp_basestruct = kw.pop('basestruct', PyObject.TO) diff --git a/pypy/module/cpyext/pytraceback.py b/pypy/module/cpyext/pytraceback.py --- a/pypy/module/cpyext/pytraceback.py +++ b/pypy/module/cpyext/pytraceback.py @@ -41,7 +41,7 @@ rffi.setintfield(py_traceback, 'c_tb_lasti', traceback.lasti) rffi.setintfield(py_traceback, 'c_tb_lineno',traceback.get_lineno()) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def traceback_dealloc(space, py_obj): py_traceback = rffi.cast(PyTracebackObject, py_obj) Py_DecRef(space, rffi.cast(PyObject, py_traceback.c_tb_next)) diff --git a/pypy/module/cpyext/sliceobject.py b/pypy/module/cpyext/sliceobject.py --- a/pypy/module/cpyext/sliceobject.py +++ b/pypy/module/cpyext/sliceobject.py @@ -36,7 +36,7 @@ py_slice.c_stop = make_ref(space, w_obj.w_stop) py_slice.c_step = make_ref(space, w_obj.w_step) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def slice_dealloc(space, py_obj): """Frees allocated PyStringObject resources. """ diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -309,7 +309,7 @@ return space.wrap(generic_cpy_call(space, func_target, w_self, w_other)) - at cpython_api([PyTypeObjectPtr, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyTypeObjectPtr, PyObject, PyObject], PyObject, header=None) def slot_tp_new(space, type, w_args, w_kwds): from pypy.module.cpyext.tupleobject import PyTuple_Check pyo = rffi.cast(PyObject, type) @@ -320,30 +320,30 @@ w_args_new = space.newtuple(args_w) return space.call(w_func, w_args_new, w_kwds) - at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, external=False) + at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, header=None) def slot_tp_init(space, w_self, w_args, w_kwds): w_descr = space.lookup(w_self, '__init__') args = Arguments.frompacked(space, w_args, w_kwds) space.get_and_call_args(w_descr, w_self, args) return 0 - at cpython_api([PyObject, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyObject, PyObject, PyObject], PyObject, header=None) def slot_tp_call(space, w_self, w_args, w_kwds): return space.call(w_self, w_args, w_kwds) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_str(space, w_self): return space.str(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_nb_int(space, w_self): return space.int(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iter(space, w_self): return space.iter(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iternext(space, w_self): return space.next(w_self) @@ -371,7 +371,7 @@ return @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, - error=-1, external=True) # XXX should not be exported + error=-1) # XXX should be header=None @func_renamer("cpyext_tp_setattro_%s" % (typedef.name,)) def slot_tp_setattro(space, w_self, w_name, w_value): if w_value is not None: @@ -385,8 +385,7 @@ if getattr_fn is None: return - @cpython_api([PyObject, PyObject], PyObject, - external=True) + @cpython_api([PyObject, PyObject], PyObject) @func_renamer("cpyext_tp_getattro_%s" % (typedef.name,)) def slot_tp_getattro(space, w_self, w_name): return space.call_function(getattr_fn, w_self, w_name) diff --git a/pypy/module/cpyext/stringobject.py b/pypy/module/cpyext/stringobject.py --- a/pypy/module/cpyext/stringobject.py +++ b/pypy/module/cpyext/stringobject.py @@ -103,7 +103,7 @@ track_reference(space, py_obj, w_obj) return w_obj - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def string_dealloc(space, py_obj): """Frees allocated PyStringObject resources. """ diff --git a/pypy/module/cpyext/structmember.py b/pypy/module/cpyext/structmember.py --- a/pypy/module/cpyext/structmember.py +++ b/pypy/module/cpyext/structmember.py @@ -31,8 +31,10 @@ (T_PYSSIZET, rffi.SSIZE_T, PyLong_AsSsize_t), ]) +_HEADER = 'pypy_structmember_decl.h' - at cpython_api([PyObject, lltype.Ptr(PyMemberDef)], PyObject) + + at cpython_api([PyObject, lltype.Ptr(PyMemberDef)], PyObject, header=_HEADER) def PyMember_GetOne(space, obj, w_member): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset @@ -83,7 +85,8 @@ return w_result - at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, + error=-1, header=_HEADER) def PyMember_SetOne(space, obj, w_member, w_value): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset diff --git a/pypy/module/cpyext/test/test_cpyext.py b/pypy/module/cpyext/test/test_cpyext.py --- a/pypy/module/cpyext/test/test_cpyext.py +++ b/pypy/module/cpyext/test/test_cpyext.py @@ -863,3 +863,15 @@ os.unlink('_imported_already') except OSError: pass + + def test_no_structmember(self): + """structmember.h should not be included by default.""" + mod = self.import_extension('foo', [ + ('bar', 'METH_NOARGS', + ''' + /* reuse a name that is #defined in structmember.h */ + int RO; + Py_RETURN_NONE; + ''' + ), + ]) diff --git a/pypy/module/cpyext/test/test_intobject.py b/pypy/module/cpyext/test/test_intobject.py --- a/pypy/module/cpyext/test/test_intobject.py +++ b/pypy/module/cpyext/test/test_intobject.py @@ -99,6 +99,7 @@ """), ], prologue=""" + #include "structmember.h" typedef struct { PyObject_HEAD diff --git a/pypy/module/cpyext/test/test_translate.py b/pypy/module/cpyext/test/test_translate.py --- a/pypy/module/cpyext/test/test_translate.py +++ b/pypy/module/cpyext/test/test_translate.py @@ -19,7 +19,7 @@ @specialize.memo() def get_tp_function(space, typedef): - @cpython_api([], lltype.Signed, error=-1, external=False) + @cpython_api([], lltype.Signed, error=-1, header=None) def slot_tp_function(space): return typedef.value diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -183,7 +183,7 @@ if pto.c_tp_new: add_tp_new_wrapper(space, dict_w, pto) - at cpython_api([PyObject, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyObject, PyObject, PyObject], PyObject, header=None) def tp_new_wrapper(space, self, w_args, w_kwds): tp_new = rffi.cast(PyTypeObjectPtr, self).c_tp_new @@ -311,7 +311,7 @@ dealloc=type_dealloc) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def subtype_dealloc(space, obj): pto = obj.c_ob_type base = pto @@ -327,7 +327,7 @@ # hopefully this does not clash with the memory model assumed in # extension modules - at cpython_api([PyObject, Py_ssize_tP], lltype.Signed, external=False, + at cpython_api([PyObject, Py_ssize_tP], lltype.Signed, header=None, error=CANNOT_FAIL) def str_segcount(space, w_obj, ref): if ref: @@ -335,7 +335,7 @@ return 1 @cpython_api([PyObject, Py_ssize_t, rffi.VOIDPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def str_getreadbuffer(space, w_str, segment, ref): from pypy.module.cpyext.stringobject import PyString_AsString if segment != 0: @@ -348,7 +348,7 @@ return space.len_w(w_str) @cpython_api([PyObject, Py_ssize_t, rffi.CCHARPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def str_getcharbuffer(space, w_str, segment, ref): from pypy.module.cpyext.stringobject import PyString_AsString if segment != 0: @@ -361,7 +361,7 @@ return space.len_w(w_str) @cpython_api([PyObject, Py_ssize_t, rffi.VOIDPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def buf_getreadbuffer(space, pyref, segment, ref): from pypy.module.cpyext.bufferobject import PyBufferObject if segment != 0: @@ -393,7 +393,7 @@ buf_getreadbuffer.api_func.get_wrapper(space)) pto.c_tp_as_buffer = c_buf - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def type_dealloc(space, obj): from pypy.module.cpyext.object import PyObject_dealloc obj_pto = rffi.cast(PyTypeObjectPtr, obj) diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -75,7 +75,7 @@ track_reference(space, py_obj, w_obj) return w_obj - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def unicode_dealloc(space, py_obj): py_unicode = rffi.cast(PyUnicodeObject, py_obj) if py_unicode.c_buffer: From pypy.commits at gmail.com Wed Feb 10 17:09:47 2016 From: pypy.commits at gmail.com (mattip) Date: Wed, 10 Feb 2016 14:09:47 -0800 (PST) Subject: [pypy-commit] pypy default: document merged branch Message-ID: <56bbb52b.034cc20a.9ac36.ffffcd7d@mx.google.com> Author: mattip Branch: Changeset: r82154:4ff7977d1071 Date: 2016-02-11 00:08 +0200 http://bitbucket.org/pypy/pypy/changeset/4ff7977d1071/ Log: document merged branch diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -142,4 +142,9 @@ .. branch: vmprof-newstack -Refactor vmprof to work cross-operating-system. \ No newline at end of file +Refactor vmprof to work cross-operating-system. + +.. branch: seperate-strucmember_h + +Seperate structmember.h from Python.h Also enhance creating api functions +to specify which header file they appear in (previously only pypy_decl.h) From pypy.commits at gmail.com Thu Feb 11 03:16:15 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 11 Feb 2016 00:16:15 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: added assert, fixed test (big endian issue with unions) Message-ID: <56bc434f.c13fc20a.8e7a6.4589@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82155:9638a3d105fa Date: 2016-02-11 09:15 +0100 http://bitbucket.org/pypy/pypy/changeset/9638a3d105fa/ Log: added assert, fixed test (big endian issue with unions) diff --git a/pypy/module/test_lib_pypy/ctypes_tests/test_unions.py b/pypy/module/test_lib_pypy/ctypes_tests/test_unions.py --- a/pypy/module/test_lib_pypy/ctypes_tests/test_unions.py +++ b/pypy/module/test_lib_pypy/ctypes_tests/test_unions.py @@ -1,4 +1,4 @@ - +import sys from ctypes import * from support import BaseCTypesTestChecker @@ -8,8 +8,11 @@ _fields_ = [('x', c_char), ('y', c_int)] stuff = Stuff() - stuff.y = ord('x') - assert stuff.x == 'x' + stuff.y = ord('x') | (ord('z') << 24) + if sys.byteorder == 'little': + assert stuff.x == 'x' + else: + assert stuff.x == 'z' def test_union_of_structures(self): class Stuff(Structure): diff --git a/rpython/jit/backend/zarch/regalloc.py b/rpython/jit/backend/zarch/regalloc.py --- a/rpython/jit/backend/zarch/regalloc.py +++ b/rpython/jit/backend/zarch/regalloc.py @@ -90,6 +90,7 @@ tmp = TempVar() self.temp_boxes.append(tmp) reg = self.force_allocate_reg(tmp) + assert poolloc.displace > 0 self.assembler.mc.LD(reg, poolloc) return reg return poolloc From pypy.commits at gmail.com Thu Feb 11 03:31:48 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 11 Feb 2016 00:31:48 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: fixed two tests where the endian could not match the sequence of operations Message-ID: <56bc46f4.d62d1c0a.82f8b.7d74@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82156:5040efcfecd0 Date: 2016-02-11 09:31 +0100 http://bitbucket.org/pypy/pypy/changeset/5040efcfecd0/ Log: fixed two tests where the endian could not match the sequence of operations diff --git a/pypy/module/pypyjit/test_pypy_c/test_struct.py b/pypy/module/pypyjit/test_pypy_c/test_struct.py --- a/pypy/module/pypyjit/test_pypy_c/test_struct.py +++ b/pypy/module/pypyjit/test_pypy_c/test_struct.py @@ -19,8 +19,8 @@ import struct i = 1 while i < n: - buf = struct.pack("i", i) # ID: pack - x = struct.unpack("i", buf)[0] # ID: unpack + buf = struct.pack(" Author: Richard Plangger Branch: s390x-backend Changeset: r82157:c3ae24173bca Date: 2016-02-11 10:20 +0100 http://bitbucket.org/pypy/pypy/changeset/c3ae24173bca/ Log: misaligned is fine for s390x (rawstorage) diff --git a/rpython/rlib/rawstorage.py b/rpython/rlib/rawstorage.py --- a/rpython/rlib/rawstorage.py +++ b/rpython/rlib/rawstorage.py @@ -46,7 +46,10 @@ from rpython.jit.backend import detect_cpu try: - misaligned_is_fine = detect_cpu.autodetect().startswith('x86') + cpuname = detect_cpu.autodetect() + misaligned_is_fine = cpuname.startswith('x86') or \ + cpuname.startswith('s390x') + del cpuname except detect_cpu.ProcessorAutodetectError: misaligned_is_fine = False From pypy.commits at gmail.com Thu Feb 11 05:35:42 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 02:35:42 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: update the values Message-ID: <56bc63fe.4b921c0a.f1b2b.ffffb2f2@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r701:26ecf707b33d Date: 2016-02-11 11:35 +0100 http://bitbucket.org/pypy/pypy.org/changeset/26ecf707b33d/ Log: update the values diff --git a/don3.html b/don3.html --- a/don3.html +++ b/don3.html @@ -9,13 +9,13 @@ - $53152 of $60000 (88.6%) + $53247 of $60000 (88.7%)
    @@ -23,7 +23,7 @@
  • From pypy.commits at gmail.com Thu Feb 11 08:57:20 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 05:57:20 -0800 (PST) Subject: [pypy-commit] cffi embedding-pypy-win32: I finally managed to run the tests on Windows, but only without Message-ID: <56bc9340.45cec20a.e669e.ffffcf20@mx.google.com> Author: Armin Rigo Branch: embedding-pypy-win32 Changeset: r2623:4383aaecf96d Date: 2016-02-11 14:57 +0100 http://bitbucket.org/cffi/cffi/changeset/4383aaecf96d/ Log: I finally managed to run the tests on Windows, but only without changes in that file and by copying the file "libpypy-c.lib" from translation to a subdirectory "bin" of "sys.prefix" diff --git a/cffi/api.py b/cffi/api.py --- a/cffi/api.py +++ b/cffi/api.py @@ -549,25 +549,24 @@ if value not in lst: lst.append(value) # - if sys.platform == "win32": - # XXX pypy should not reuse the same import library name - template = "python%d%d" - if hasattr(sys, 'gettotalrefcount'): - template += '_d' + if '__pypy__' in sys.builtin_module_names: + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + pythonlib = "pypy-c" else: - if '__pypy__' in sys.builtin_module_names: - if hasattr(sys, 'prefix'): - import os - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - pythonlib = "pypy-c" + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: template = "python%d.%d" if sysconfig.get_config_var('DEBUG_EXT'): template += sysconfig.get_config_var('DEBUG_EXT') - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) - if hasattr(sys, 'abiflags'): - pythonlib += sys.abiflags + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags ensure('libraries', pythonlib) if sys.platform == "win32": ensure('extra_link_args', '/MANIFEST') From pypy.commits at gmail.com Thu Feb 11 09:18:18 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 06:18:18 -0800 (PST) Subject: [pypy-commit] cffi embedding-pypy-win32: Windows need the file 'libpypy-c.lib', which (unless people disagree) I Message-ID: <56bc982a.162f1c0a.a65d5.056b@mx.google.com> Author: Armin Rigo Branch: embedding-pypy-win32 Changeset: r2624:cd622fb7a0d3 Date: 2016-02-11 15:15 +0100 http://bitbucket.org/cffi/cffi/changeset/cd622fb7a0d3/ Log: Windows need the file 'libpypy-c.lib', which (unless people disagree) I will add to pypy distributions next diff --git a/cffi/api.py b/cffi/api.py --- a/cffi/api.py +++ b/cffi/api.py @@ -550,10 +550,19 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: - if hasattr(sys, 'prefix'): - import os - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - pythonlib = "pypy-c" + if sys.platform == "win32": + # we need 'libpypy-c.lib' (included with recent pypy distrib) + # in addition to the runtime 'libpypy-c.dll' + pythonlib = "libpypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', sys.prefix) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) else: if sys.platform == "win32": template = "python%d%d" From pypy.commits at gmail.com Thu Feb 11 09:22:01 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 06:22:01 -0800 (PST) Subject: [pypy-commit] cffi embedding-pypy-win32: ready to merge Message-ID: <56bc9909.42711c0a.1011e.0b16@mx.google.com> Author: Armin Rigo Branch: embedding-pypy-win32 Changeset: r2625:daa3d70b58a9 Date: 2016-02-11 15:21 +0100 http://bitbucket.org/cffi/cffi/changeset/daa3d70b58a9/ Log: ready to merge From pypy.commits at gmail.com Thu Feb 11 09:22:04 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 06:22:04 -0800 (PST) Subject: [pypy-commit] cffi default: hg merge embedding-pypy-win32 Message-ID: <56bc990c.0ab81c0a.beb05.067a@mx.google.com> Author: Armin Rigo Branch: Changeset: r2626:6e18e1a827d0 Date: 2016-02-11 15:21 +0100 http://bitbucket.org/cffi/cffi/changeset/6e18e1a827d0/ Log: hg merge embedding-pypy-win32 Thanks matti for the initial work! diff --git a/cffi/api.py b/cffi/api.py --- a/cffi/api.py +++ b/cffi/api.py @@ -550,10 +550,19 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: - if hasattr(sys, 'prefix'): - import os - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - pythonlib = "pypy-c" + if sys.platform == "win32": + # we need 'libpypy-c.lib' (included with recent pypy distrib) + # in addition to the runtime 'libpypy-c.dll' + pythonlib = "libpypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', sys.prefix) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) else: if sys.platform == "win32": template = "python%d%d" diff --git a/testing/embedding/test_basic.py b/testing/embedding/test_basic.py --- a/testing/embedding/test_basic.py +++ b/testing/embedding/test_basic.py @@ -118,12 +118,18 @@ def execute(self, name): path = self.get_path() env_extra = {'PYTHONPATH': prefix_pythonpath()} - libpath = os.environ.get('LD_LIBRARY_PATH') - if libpath: - libpath = path + ':' + libpath + if sys.platform == 'win32': + _path = os.environ.get('PATH') + # for libpypy-c.dll or Python27.dll + _path = os.path.split(sys.executable)[0] + ';' + _path + env_extra['PATH'] = _path else: - libpath = path - env_extra['LD_LIBRARY_PATH'] = libpath + libpath = os.environ.get('LD_LIBRARY_PATH') + if libpath: + libpath = path + ':' + libpath + else: + libpath = path + env_extra['LD_LIBRARY_PATH'] = libpath print('running %r in %r' % (name, path)) executable_name = name if sys.platform == 'win32': From pypy.commits at gmail.com Thu Feb 11 09:23:05 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 06:23:05 -0800 (PST) Subject: [pypy-commit] pypy cffi-embedding-win32: Add libpypy-c.lib to the archive on windows Message-ID: <56bc9949.284cc20a.511fd.ffffd7dc@mx.google.com> Author: Armin Rigo Branch: cffi-embedding-win32 Changeset: r82159:8f955811f19a Date: 2016-02-11 15:18 +0100 http://bitbucket.org/pypy/pypy/changeset/8f955811f19a/ Log: Add libpypy-c.lib to the archive on windows diff --git a/pypy/tool/release/package.py b/pypy/tool/release/package.py --- a/pypy/tool/release/package.py +++ b/pypy/tool/release/package.py @@ -129,6 +129,9 @@ win_extras = ['libpypy-c.dll', 'sqlite3.dll'] if not options.no_tk: win_extras += ['tcl85.dll', 'tk85.dll'] + # add the .lib too, which is convenient to compile other programs + # that use the .dll (and for cffi's embedding mode) + win_extras.append('libpypy-c.lib') for extra in win_extras: p = pypy_c.dirpath().join(extra) From pypy.commits at gmail.com Thu Feb 11 10:00:03 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 07:00:03 -0800 (PST) Subject: [pypy-commit] pypy cffi-embedding-win32: follow-up for e1b9c0216be7: move it outside pypy_setup_home(), just in Message-ID: <56bca1f3.512f1c0a.2c4f0.ffffa3e2@mx.google.com> Author: Armin Rigo Branch: cffi-embedding-win32 Changeset: r82160:fcdb779aa03c Date: 2016-02-11 15:59 +0100 http://bitbucket.org/pypy/pypy/changeset/fcdb779aa03c/ Log: follow-up for e1b9c0216be7: move it outside pypy_setup_home(), just in case, but keep the logic. Add a comment that describes the problem. diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -105,10 +105,6 @@ space.appexec([w_path], """(path): import sys sys.path[:] = path - import os - sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) - sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) - sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) """) # import site try: diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -56,6 +56,24 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) + + # Annoying: CPython would just use the C-level + # std{in,out,err} as configured by the main application, + # for example in binary mode on Windows or with buffering + # turned off. We can't easily do the same. Instead, go + # for the safest bet (but possibly bad for performance) + # and open sys.std{in,out,err} unbuffered. On Windows I + # guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # this function. + space.appexec([], """(): + import os + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: From pypy.commits at gmail.com Thu Feb 11 10:12:22 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 07:12:22 -0800 (PST) Subject: [pypy-commit] pypy cffi-embedding-win32: Do this patching only once Message-ID: <56bca4d6.2815c20a.8df73.ffffebb4@mx.google.com> Author: Armin Rigo Branch: cffi-embedding-win32 Changeset: r82161:bea8f2df95f8 Date: 2016-02-11 16:02 +0100 http://bitbucket.org/pypy/pypy/changeset/bea8f2df95f8/ Log: Do this patching only once diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -69,6 +69,7 @@ def startup(self, space): from pypy.module._cffi_backend import embedding embedding.glob.space = space + embedding.glob.patched_sys = False def get_dict_rtld_constants(): diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -45,6 +45,26 @@ pass glob = Global() +def patch_sys(space): + # Annoying: CPython would just use the C-level std{in,out,err} as + # configured by the main application, for example in binary mode + # on Windows or with buffering turned off. We can't easily do the + # same. Instead, go for the safest bet (but possibly bad for + # performance) and open sys.std{in,out,err} unbuffered. On + # Windows I guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # pypy_init_embedded_cffi_module(). + if not glob.patched_sys: + space.appexec([], """(): + import os + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + glob.patched_sys = True + + def pypy_init_embedded_cffi_module(version, init_struct): # called from __init__.py name = "?" @@ -56,24 +76,7 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) - - # Annoying: CPython would just use the C-level - # std{in,out,err} as configured by the main application, - # for example in binary mode on Windows or with buffering - # turned off. We can't easily do the same. Instead, go - # for the safest bet (but possibly bad for performance) - # and open sys.std{in,out,err} unbuffered. On Windows I - # guess binary mode is a better default choice. - # - # XXX if needed, we could add support for a flag passed to - # this function. - space.appexec([], """(): - import os - sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) - sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) - sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) - """) - + patch_sys(space) load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: From pypy.commits at gmail.com Thu Feb 11 10:12:24 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 07:12:24 -0800 (PST) Subject: [pypy-commit] pypy cffi-embedding-win32: ready for merge Message-ID: <56bca4d8.07811c0a.20305.14b9@mx.google.com> Author: Armin Rigo Branch: cffi-embedding-win32 Changeset: r82162:4c93a9cebb3e Date: 2016-02-11 16:06 +0100 http://bitbucket.org/pypy/pypy/changeset/4c93a9cebb3e/ Log: ready for merge From pypy.commits at gmail.com Thu Feb 11 10:12:26 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 07:12:26 -0800 (PST) Subject: [pypy-commit] pypy default: hg backout 338d32e89981 Message-ID: <56bca4da.44e21c0a.4d285.fffff29f@mx.google.com> Author: Armin Rigo Branch: Changeset: r82163:6c6109b79b23 Date: 2016-02-11 16:08 +0100 http://bitbucket.org/pypy/pypy/changeset/6c6109b79b23/ Log: hg backout 338d32e89981 will be fixed by the following merge diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -84,68 +84,11 @@ return rffi.cast(rffi.INT, res) # ____________________________________________________________ -if os.name == 'nt': - do_startup = r''' -#include -#define WIN32_LEAN_AND_MEAN -#include -RPY_EXPORTED void rpython_startup_code(void); -RPY_EXPORTED int pypy_setup_home(char *, int); -static unsigned char _cffi_ready = 0; -static const char *volatile _cffi_module_name; -static void _cffi_init_error(const char *msg, const char *extra) -{ - fprintf(stderr, - "\nPyPy initialization failure when loading module '%s':\n%s%s\n", - _cffi_module_name, msg, extra); -} - -BOOL CALLBACK _cffi_init(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *lpContex) -{ - - HMODULE hModule; - TCHAR home[_MAX_PATH]; - rpython_startup_code(); - RPyGilAllocate(); - - GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | - GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, - (LPCTSTR)&_cffi_init, &hModule); - if (hModule == 0 ) { - /* TODO turn the int into a string with FormatMessage */ - - _cffi_init_error("dladdr() failed: ", ""); - return TRUE; - } - GetModuleFileName(hModule, home, _MAX_PATH); - if (pypy_setup_home(home, 1) != 0) { - _cffi_init_error("pypy_setup_home() failed", ""); - return TRUE; - } - _cffi_ready = 1; - fprintf(stderr, "startup succeeded, home %s\n", home); - return TRUE; -} - -RPY_EXPORTED -int pypy_carefully_make_gil(const char *name) -{ - /* For CFFI: this initializes the GIL and loads the home path. - It can be called completely concurrently from unrelated threads. - It assumes that we don't hold the GIL before (if it exists), and we - don't hold it afterwards. - */ - static INIT_ONCE s_init_once; - - _cffi_module_name = name; /* not really thread-safe, but better than - nothing */ - InitOnceExecuteOnce(&s_init_once, _cffi_init, NULL, NULL); - return (int)_cffi_ready - 1; -}''' -else: - do_startup = r""" +eci = ExternalCompilationInfo(separate_module_sources=[ +r""" +/* XXX Windows missing */ #include #include #include @@ -198,7 +141,6 @@ pthread_once(&once_control, _cffi_init); return (int)_cffi_ready - 1; } -""" -eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) +"""]) declare_c_function = rffi.llexternal_use_eci(eci) From pypy.commits at gmail.com Thu Feb 11 10:12:27 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 07:12:27 -0800 (PST) Subject: [pypy-commit] pypy default: hg merge cffi-embedding-win32 Message-ID: <56bca4db.d62d1c0a.82f8b.1a85@mx.google.com> Author: Armin Rigo Branch: Changeset: r82164:450c1a29e5d3 Date: 2016-02-11 16:09 +0100 http://bitbucket.org/pypy/pypy/changeset/450c1a29e5d3/ Log: hg merge cffi-embedding-win32 Support for cffi embedding on Windows. Thanks matti for the initial work diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -69,6 +69,7 @@ def startup(self, space): from pypy.module._cffi_backend import embedding embedding.glob.space = space + embedding.glob.patched_sys = False def get_dict_rtld_constants(): diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -45,6 +45,26 @@ pass glob = Global() +def patch_sys(space): + # Annoying: CPython would just use the C-level std{in,out,err} as + # configured by the main application, for example in binary mode + # on Windows or with buffering turned off. We can't easily do the + # same. Instead, go for the safest bet (but possibly bad for + # performance) and open sys.std{in,out,err} unbuffered. On + # Windows I guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # pypy_init_embedded_cffi_module(). + if not glob.patched_sys: + space.appexec([], """(): + import os + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + glob.patched_sys = True + + def pypy_init_embedded_cffi_module(version, init_struct): # called from __init__.py name = "?" @@ -56,6 +76,7 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) + patch_sys(space) load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: @@ -85,14 +106,86 @@ # ____________________________________________________________ +if os.name == 'nt': -eci = ExternalCompilationInfo(separate_module_sources=[ -r""" -/* XXX Windows missing */ -#include + do_includes = r""" +#define _WIN32_WINNT 0x0501 +#include + +#define CFFI_INIT_HOME_PATH_MAX _MAX_PATH +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + HMODULE hModule = 0; + DWORD res; + + GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + (LPCTSTR)&_cffi_init, &hModule); + + if (hModule == 0 ) { + _cffi_init_error("GetModuleHandleEx() failed", ""); + return -1; + } + res = GetModuleFileName(hModule, output_home_path, CFFI_INIT_HOME_PATH_MAX); + if (res >= CFFI_INIT_HOME_PATH_MAX) { + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static LONG volatile lock = 0; + static int _init_called = 0; + + while (InterlockedCompareExchange(&lock, 1, 0) != 0) { + SwitchToThread(); /* spin loop */ + } + if (!_init_called) { + _cffi_init(); + _init_called = 1; + } + InterlockedCompareExchange(&lock, 0, 1); +} +""" + +else: + + do_includes = r""" #include #include +#define CFFI_INIT_HOME_PATH_MAX PATH_MAX +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + Dl_info info; + dlerror(); /* reset */ + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return -1; + } + if (realpath(info.dli_fname, output_home_path) == NULL) { + perror("realpath() failed"); + _cffi_init_error("realpath() failed", ""); + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + pthread_once(&once_control, _cffi_init); +} +""" + +do_startup = do_includes + r""" RPY_EXPORTED void rpython_startup_code(void); RPY_EXPORTED int pypy_setup_home(char *, int); @@ -108,17 +201,13 @@ static void _cffi_init(void) { - Dl_info info; - char *home; + char home[CFFI_INIT_HOME_PATH_MAX + 1]; rpython_startup_code(); RPyGilAllocate(); - if (dladdr(&_cffi_init, &info) == 0) { - _cffi_init_error("dladdr() failed: ", dlerror()); + if (_cffi_init_home(home) != 0) return; - } - home = realpath(info.dli_fname, NULL); if (pypy_setup_home(home, 1) != 0) { _cffi_init_error("pypy_setup_home() failed", ""); return; @@ -134,13 +223,12 @@ It assumes that we don't hold the GIL before (if it exists), and we don't hold it afterwards. */ - static pthread_once_t once_control = PTHREAD_ONCE_INIT; - _cffi_module_name = name; /* not really thread-safe, but better than nothing */ - pthread_once(&once_control, _cffi_init); + _cffi_init_once(); return (int)_cffi_ready - 1; } -"""]) +""" +eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,15 +4,18 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module +from pypy.module._cffi_backend import cffi1_module, embedding def test_checkmodule(): # prepare_file_argument() is not working without translating the _file # module too def dummy_prepare_file_argument(space, fileobj): - # call load_cffi1_module() too, from a random place like here - cffi1_module.load_cffi1_module(space, "foo", "foo", 42) + # call pypy_init_embedded_cffi_module() from a random place like here + # --- this calls load_cffi1_module(), too + embedding.pypy_init_embedded_cffi_module( + rffi.cast(rffi.INT, embedding.EMBED_VERSION_MIN), + 42) return lltype.nullptr(rffi.CCHARP.TO) old = ctypeptr.prepare_file_argument try: diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py --- a/pypy/objspace/fake/objspace.py +++ b/pypy/objspace/fake/objspace.py @@ -397,9 +397,14 @@ space.wrap(value) class FakeCompiler(object): - pass + def compile(self, code, name, mode, flags): + return FakePyCode() FakeObjSpace.default_compiler = FakeCompiler() +class FakePyCode(W_Root): + def exec_code(self, space, w_globals, w_locals): + return W_Root() + class FakeModule(W_Root): def __init__(self): diff --git a/pypy/tool/release/package.py b/pypy/tool/release/package.py --- a/pypy/tool/release/package.py +++ b/pypy/tool/release/package.py @@ -129,6 +129,9 @@ win_extras = ['libpypy-c.dll', 'sqlite3.dll'] if not options.no_tk: win_extras += ['tcl85.dll', 'tk85.dll'] + # add the .lib too, which is convenient to compile other programs + # that use the .dll (and for cffi's embedding mode) + win_extras.append('libpypy-c.lib') for extra in win_extras: p = pypy_c.dirpath().join(extra) From pypy.commits at gmail.com Thu Feb 11 10:12:29 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 11 Feb 2016 07:12:29 -0800 (PST) Subject: [pypy-commit] pypy default: kill unused import Message-ID: <56bca4dd.8916c20a.fb86b.ffffe8ad@mx.google.com> Author: Armin Rigo Branch: Changeset: r82165:8dd4fb6cbbc4 Date: 2016-02-11 16:10 +0100 http://bitbucket.org/pypy/pypy/changeset/8dd4fb6cbbc4/ Log: kill unused import diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,7 +4,7 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module, embedding +from pypy.module._cffi_backend import embedding def test_checkmodule(): From pypy.commits at gmail.com Thu Feb 11 10:55:19 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 11 Feb 2016 07:55:19 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: using gc_load_r for reading a constptr since index scale and offset are constant and added to the index in rewrite, ndarray test that would fail on little endian (did not distinct this case) Message-ID: <56bcaee7.9a6f1c0a.b0f77.ffffc352@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82166:f535e775dd54 Date: 2016-02-11 16:54 +0100 http://bitbucket.org/pypy/pypy/changeset/f535e775dd54/ Log: using gc_load_r for reading a constptr since index scale and offset are constant and added to the index in rewrite, ndarray test that would fail on little endian (did not distinct this case) diff --git a/pypy/module/micronumpy/test/test_ndarray.py b/pypy/module/micronumpy/test/test_ndarray.py --- a/pypy/module/micronumpy/test/test_ndarray.py +++ b/pypy/module/micronumpy/test/test_ndarray.py @@ -1840,8 +1840,12 @@ assert y[0] == 513 == 0x0201 assert y.dtype == dtype('int16') y[0] = 670 - assert x[0] == 2 - assert x[1] == -98 + if sys.byteorder == 'little': + assert x[0] == -98 + assert x[1] == 2 + else: + assert x[0] == 2 + assert x[1] == -98 f = array([1000, -1234], dtype='i4') nnp = self.non_native_prefix d = f.view(dtype=nnp + 'i4') diff --git a/rpython/jit/backend/llsupport/gc.py b/rpython/jit/backend/llsupport/gc.py --- a/rpython/jit/backend/llsupport/gc.py +++ b/rpython/jit/backend/llsupport/gc.py @@ -168,10 +168,8 @@ array_index = array_index * factor + offset args = [moving_obj_tracker.const_ptr_gcref_array, ConstInt(array_index), - ConstInt(1), # already multiplied to array_index - ConstInt(0), # already added ConstInt(size)] - load_op = ResOperation(rop.GC_LOAD_INDEXED_R, args) + load_op = ResOperation(rop.GC_LOAD_R, args) newops.append(load_op) op.setarg(arg_i, load_op) # From pypy.commits at gmail.com Thu Feb 11 15:27:14 2016 From: pypy.commits at gmail.com (mattip) Date: Thu, 11 Feb 2016 12:27:14 -0800 (PST) Subject: [pypy-commit] pypy default: Merged in devin.jeanpierre/pypy-headers (pull request #348) Message-ID: <56bceea2.8abb1c0a.bb55c.64df@mx.google.com> Author: mattip Branch: Changeset: r82168:8d87f427ab24 Date: 2016-02-11 22:26 +0200 http://bitbucket.org/pypy/pypy/changeset/8d87f427ab24/ Log: Merged in devin.jeanpierre/pypy-headers (pull request #348) Add float-related C macro definitions to cpyext. diff --git a/pypy/module/cpyext/include/Python.h b/pypy/module/cpyext/include/Python.h --- a/pypy/module/cpyext/include/Python.h +++ b/pypy/module/cpyext/include/Python.h @@ -84,6 +84,7 @@ #include "pyconfig.h" #include "object.h" +#include "pymath.h" #include "pyport.h" #include "warnings.h" @@ -115,7 +116,6 @@ #include "compile.h" #include "frameobject.h" #include "eval.h" -#include "pymath.h" #include "pymem.h" #include "pycobject.h" #include "pycapsule.h" diff --git a/pypy/module/cpyext/include/floatobject.h b/pypy/module/cpyext/include/floatobject.h --- a/pypy/module/cpyext/include/floatobject.h +++ b/pypy/module/cpyext/include/floatobject.h @@ -7,6 +7,18 @@ extern "C" { #endif +#define PyFloat_STR_PRECISION 12 + +#ifdef Py_NAN +#define Py_RETURN_NAN return PyFloat_FromDouble(Py_NAN) +#endif + +#define Py_RETURN_INF(sign) do \ + if (copysign(1., sign) == 1.) { \ + return PyFloat_FromDouble(Py_HUGE_VAL); \ + } else { \ + return PyFloat_FromDouble(-Py_HUGE_VAL); \ + } while(0) #ifdef __cplusplus } diff --git a/pypy/module/cpyext/include/pymath.h b/pypy/module/cpyext/include/pymath.h --- a/pypy/module/cpyext/include/pymath.h +++ b/pypy/module/cpyext/include/pymath.h @@ -17,4 +17,35 @@ #define Py_HUGE_VAL HUGE_VAL #endif +/* Py_NAN + * A value that evaluates to a NaN. On IEEE 754 platforms INF*0 or + * INF/INF works. Define Py_NO_NAN in pyconfig.h if your platform + * doesn't support NaNs. + */ +#if !defined(Py_NAN) && !defined(Py_NO_NAN) +#if !defined(__INTEL_COMPILER) + #define Py_NAN (Py_HUGE_VAL * 0.) +#else /* __INTEL_COMPILER */ + #if defined(ICC_NAN_STRICT) + #pragma float_control(push) + #pragma float_control(precise, on) + #pragma float_control(except, on) + #if defined(_MSC_VER) + __declspec(noinline) + #else /* Linux */ + __attribute__((noinline)) + #endif /* _MSC_VER */ + static double __icc_nan() + { + return sqrt(-1.0); + } + #pragma float_control (pop) + #define Py_NAN __icc_nan() + #else /* ICC_NAN_RELAXED as default for Intel Compiler */ + static union { unsigned char buf[8]; double __icc_nan; } __nan_store = {0,0,0,0,0,0,0xf8,0x7f}; + #define Py_NAN (__nan_store.__icc_nan) + #endif /* ICC_NAN_STRICT */ +#endif /* __INTEL_COMPILER */ +#endif + #endif /* Py_PYMATH_H */ diff --git a/pypy/module/cpyext/test/test_floatobject.py b/pypy/module/cpyext/test/test_floatobject.py --- a/pypy/module/cpyext/test/test_floatobject.py +++ b/pypy/module/cpyext/test/test_floatobject.py @@ -45,3 +45,35 @@ ]) assert module.from_string() == 1234.56 assert type(module.from_string()) is float + +class AppTestFloatMacros(AppTestCpythonExtensionBase): + def test_return_nan(self): + import math + + module = self.import_extension('foo', [ + ("return_nan", "METH_NOARGS", + "Py_RETURN_NAN;"), + ]) + assert math.isnan(module.return_nan()) + + def test_return_inf(self): + import math + + module = self.import_extension('foo', [ + ("return_inf", "METH_NOARGS", + "Py_RETURN_INF(10);"), + ]) + inf = module.return_inf() + assert inf > 0 + assert math.isinf(inf) + + def test_return_inf_negative(self): + import math + + module = self.import_extension('foo', [ + ("return_neginf", "METH_NOARGS", + "Py_RETURN_INF(-10);"), + ]) + neginf = module.return_neginf() + assert neginf < 0 + assert math.isinf(neginf) From pypy.commits at gmail.com Thu Feb 11 15:27:19 2016 From: pypy.commits at gmail.com (devin.jeanpierre) Date: Thu, 11 Feb 2016 12:27:19 -0800 (PST) Subject: [pypy-commit] pypy default: Add float-related C macro definitions to cpyext. Message-ID: <56bceea7.6507c20a.b9d0b.5bf2@mx.google.com> Author: Devin Jeanpierre Branch: Changeset: r82167:db58dbf2e616 Date: 2015-10-21 21:11 -0700 http://bitbucket.org/pypy/pypy/changeset/db58dbf2e616/ Log: Add float-related C macro definitions to cpyext. diff --git a/pypy/module/cpyext/include/Python.h b/pypy/module/cpyext/include/Python.h --- a/pypy/module/cpyext/include/Python.h +++ b/pypy/module/cpyext/include/Python.h @@ -84,6 +84,7 @@ #include "pyconfig.h" #include "object.h" +#include "pymath.h" #include "pyport.h" #include "warnings.h" @@ -114,7 +115,6 @@ #include "compile.h" #include "frameobject.h" #include "eval.h" -#include "pymath.h" #include "pymem.h" #include "pycobject.h" #include "pycapsule.h" diff --git a/pypy/module/cpyext/include/floatobject.h b/pypy/module/cpyext/include/floatobject.h --- a/pypy/module/cpyext/include/floatobject.h +++ b/pypy/module/cpyext/include/floatobject.h @@ -7,6 +7,18 @@ extern "C" { #endif +#define PyFloat_STR_PRECISION 12 + +#ifdef Py_NAN +#define Py_RETURN_NAN return PyFloat_FromDouble(Py_NAN) +#endif + +#define Py_RETURN_INF(sign) do \ + if (copysign(1., sign) == 1.) { \ + return PyFloat_FromDouble(Py_HUGE_VAL); \ + } else { \ + return PyFloat_FromDouble(-Py_HUGE_VAL); \ + } while(0) #ifdef __cplusplus } diff --git a/pypy/module/cpyext/include/pymath.h b/pypy/module/cpyext/include/pymath.h --- a/pypy/module/cpyext/include/pymath.h +++ b/pypy/module/cpyext/include/pymath.h @@ -17,4 +17,35 @@ #define Py_HUGE_VAL HUGE_VAL #endif +/* Py_NAN + * A value that evaluates to a NaN. On IEEE 754 platforms INF*0 or + * INF/INF works. Define Py_NO_NAN in pyconfig.h if your platform + * doesn't support NaNs. + */ +#if !defined(Py_NAN) && !defined(Py_NO_NAN) +#if !defined(__INTEL_COMPILER) + #define Py_NAN (Py_HUGE_VAL * 0.) +#else /* __INTEL_COMPILER */ + #if defined(ICC_NAN_STRICT) + #pragma float_control(push) + #pragma float_control(precise, on) + #pragma float_control(except, on) + #if defined(_MSC_VER) + __declspec(noinline) + #else /* Linux */ + __attribute__((noinline)) + #endif /* _MSC_VER */ + static double __icc_nan() + { + return sqrt(-1.0); + } + #pragma float_control (pop) + #define Py_NAN __icc_nan() + #else /* ICC_NAN_RELAXED as default for Intel Compiler */ + static union { unsigned char buf[8]; double __icc_nan; } __nan_store = {0,0,0,0,0,0,0xf8,0x7f}; + #define Py_NAN (__nan_store.__icc_nan) + #endif /* ICC_NAN_STRICT */ +#endif /* __INTEL_COMPILER */ +#endif + #endif /* Py_PYMATH_H */ diff --git a/pypy/module/cpyext/test/test_floatobject.py b/pypy/module/cpyext/test/test_floatobject.py --- a/pypy/module/cpyext/test/test_floatobject.py +++ b/pypy/module/cpyext/test/test_floatobject.py @@ -45,3 +45,35 @@ ]) assert module.from_string() == 1234.56 assert type(module.from_string()) is float + +class AppTestFloatMacros(AppTestCpythonExtensionBase): + def test_return_nan(self): + import math + + module = self.import_extension('foo', [ + ("return_nan", "METH_NOARGS", + "Py_RETURN_NAN;"), + ]) + assert math.isnan(module.return_nan()) + + def test_return_inf(self): + import math + + module = self.import_extension('foo', [ + ("return_inf", "METH_NOARGS", + "Py_RETURN_INF(10);"), + ]) + inf = module.return_inf() + assert inf > 0 + assert math.isinf(inf) + + def test_return_inf_negative(self): + import math + + module = self.import_extension('foo', [ + ("return_neginf", "METH_NOARGS", + "Py_RETURN_INF(-10);"), + ]) + neginf = module.return_neginf() + assert neginf < 0 + assert math.isinf(neginf) From pypy.commits at gmail.com Fri Feb 12 03:30:44 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 12 Feb 2016 00:30:44 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: fixed test. emitting gc_load_r instead of indexed while loading a constant pointer Message-ID: <56bd9834.29cec20a.949f1.fffff6f0@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82169:0274e5b9e6d2 Date: 2016-02-12 09:29 +0100 http://bitbucket.org/pypy/pypy/changeset/0274e5b9e6d2/ Log: fixed test. emitting gc_load_r instead of indexed while loading a constant pointer LD has only 12 bit unsigned offset, LDY needed diff --git a/rpython/jit/backend/llsupport/gc.py b/rpython/jit/backend/llsupport/gc.py --- a/rpython/jit/backend/llsupport/gc.py +++ b/rpython/jit/backend/llsupport/gc.py @@ -164,8 +164,7 @@ array_index = moving_obj_tracker.get_array_index(v) size, offset, _ = unpack_arraydescr(moving_obj_tracker.ptr_array_descr) - factor = size - array_index = array_index * factor + offset + array_index = array_index * size + offset args = [moving_obj_tracker.const_ptr_gcref_array, ConstInt(array_index), ConstInt(size)] diff --git a/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py b/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py --- a/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py +++ b/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py @@ -140,10 +140,10 @@ i2 = getfield_gc_i(ConstPtr(pinned_obj_gcref), descr=pinned_obj_my_int_descr) """, """ [] - p1 = gc_load_indexed_r(ConstPtr(ptr_array_gcref), %(0 * ptr_array_descr.itemsize + 1)s, 1, 0, %(ptr_array_descr.itemsize)s) + p1 = gc_load_r(ConstPtr(ptr_array_gcref), %(0 * ptr_array_descr.itemsize + 1)s, %(ptr_array_descr.itemsize)s) i0 = gc_load_i(p1, 0, -%(pinned_obj_my_int_descr.field_size)s) i1 = gc_load_i(ConstPtr(notpinned_obj_gcref), 0, -%(notpinned_obj_my_int_descr.field_size)s) - p2 = gc_load_indexed_r(ConstPtr(ptr_array_gcref), %(1 * ptr_array_descr.itemsize + 1)s, 1, 0, %(ptr_array_descr.itemsize)s) + p2 = gc_load_r(ConstPtr(ptr_array_gcref), %(1 * ptr_array_descr.itemsize + 1)s, %(ptr_array_descr.itemsize)s) i2 = gc_load_i(p2, 0, -%(pinned_obj_my_int_descr.field_size)s) """) assert len(self.gc_ll_descr.last_moving_obj_tracker._indexes) == 2 diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -880,7 +880,7 @@ # res, base_loc, ofs, size and signed are all locations if size == 8: if result_loc.is_fp_reg(): - self.mc.LD(result_loc, source_loc) + self.mc.LDY(result_loc, source_loc) else: self.mc.LG(result_loc, source_loc) elif size == 4: From pypy.commits at gmail.com Fri Feb 12 06:35:08 2016 From: pypy.commits at gmail.com (cfbolz) Date: Fri, 12 Feb 2016 03:35:08 -0800 (PST) Subject: [pypy-commit] pypy statistics-maps: merge default Message-ID: <56bdc36c.2aacc20a.40ae0.3bd4@mx.google.com> Author: Carl Friedrich Bolz Branch: statistics-maps Changeset: r82171:dae74bd53702 Date: 2016-02-12 12:34 +0100 http://bitbucket.org/pypy/pypy/changeset/dae74bd53702/ Log: merge default diff too long, truncating to 2000 out of 9916 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.0 +Version: 1.5.1 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.1" +__version_info__ = (1, 5, 1) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.1" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, types +import sys, sysconfig, types from .lock import allocate_lock try: @@ -544,28 +544,32 @@ def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # if '__pypy__' in sys.builtin_module_names: if hasattr(sys, 'prefix'): import os - libdir = os.path.join(sys.prefix, 'bin') - dirs = kwds.setdefault('library_dirs', []) - if libdir not in dirs: - dirs.append(libdir) + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) pythonlib = "pypy-c" else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags - libraries = kwds.setdefault('libraries', []) - if pythonlib not in libraries: - libraries.append(pythonlib) + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): @@ -631,7 +635,7 @@ compiled DLL. Use '*' to force distutils' choice, suitable for regular CPython C API modules. Use a file name ending in '.*' to ask for the system's default extension for dynamic libraries - (.so/.dll). + (.so/.dll/.dylib). The default is '*' when building a non-embedded C API extension, and (module_name + '.*') when building an embedded library. @@ -695,6 +699,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,14 +21,12 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def compile(tmpdir, ext, compiler_verbose=0): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose, - target_extension, embedding) + outputfilename = _build(tmpdir, ext, compiler_verbose) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -38,32 +36,7 @@ os.environ[key] = value return outputfilename -def _save_val(name): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - return config_vars.get(name, Ellipsis) - -def _restore_val(name, value): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - config_vars[name] = value - if value is Ellipsis: - del config_vars[name] - -def _win32_hack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): - MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ - MSVCCompiler._remove_visual_c_ref - MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file - -def _win32_unhack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - MSVCCompiler._remove_visual_c_ref = \ - MSVCCompiler._remove_visual_c_ref_CFFI_BAK - -def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def _build(tmpdir, ext, compiler_verbose=0): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -76,25 +49,14 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: - if sys.platform == 'win32' and embedding: - _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 - old_SO = _save_val('SO') - old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: - if target_extension is not None: - _restore_val('SO', target_extension) - _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') cmd_obj = dist.get_command_obj('build_ext') [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) - _restore_val('SO', old_SO) - _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) - if sys.platform == 'win32' and embedding: - _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and @@ -1357,6 +1359,58 @@ parts[-1] += extension return os.path.join(outputdir, *parts), parts + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, compiler_verbose=1, target=None, **kwds): @@ -1382,36 +1436,22 @@ target = '%s.*' % module_name else: target = '*' - if target == '*': - target_module_name = module_name - target_extension = None # use default - else: - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - else: - target += '.so' - # split along the first '.' (not the last one, otherwise the - # preceeding dots are interpreted as splitting package names) - index = target.find('.') - if index < 0: - raise ValueError("target argument %r should be a file name " - "containing a '.'" % (target,)) - target_module_name = target[:index] - target_extension = target[index:] # - ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: + patchlist = [] cwd = os.getcwd() try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose, - target_extension, - embedding=embedding) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) finally: os.chdir(cwd) + _unpatch_meths(patchlist) return outputfilename else: return ext, updated diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst --- a/pypy/doc/faq.rst +++ b/pypy/doc/faq.rst @@ -54,7 +54,8 @@ It is quite common nowadays that xyz is available on PyPI_ and installable with ``pip install xyz``. The simplest solution is to `use virtualenv (as documented here)`_. Then enter (activate) the virtualenv -and type: ``pip install xyz``. +and type: ``pip install xyz``. If you don't know or don't want virtualenv, +you can also install ``pip`` globally by saying ``pypy -m ensurepip``. If you get errors from the C compiler, the module is a CPython C Extension module using unsupported features. `See below.`_ diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -123,3 +123,28 @@ .. branch: fix-cpython-ssl-tests-2.7 Fix SSL tests by importing cpython's patch + +.. branch: remove-getfield-pure + +Remove pure variants of ``getfield_gc_*`` operations from the JIT. Relevant +optimizations instead consult the field descriptor to determine the purity of +the operation. Additionally, pure ``getfield`` operations are now handled +entirely by `rpython/jit/metainterp/optimizeopt/heap.py` rather than +`rpython/jit/metainterp/optimizeopt/pure.py`, which can result in better codegen +for traces containing a large number of pure getfield operations. + +.. branch: exctrans + +Try to ensure that no new functions get annotated during the 'source_c' phase. +Refactor sandboxing to operate at a higher level. + +.. branch: cpyext-bootstrap + +.. branch: vmprof-newstack + +Refactor vmprof to work cross-operating-system. + +.. branch: seperate-strucmember_h + +Seperate structmember.h from Python.h Also enhance creating api functions +to specify which header file they appear in (previously only pypy_decl.h) diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.0" +VERSION = "1.5.1" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -69,6 +69,7 @@ def startup(self, space): from pypy.module._cffi_backend import embedding embedding.glob.space = space + embedding.glob.patched_sys = False def get_dict_rtld_constants(): diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -45,6 +45,26 @@ pass glob = Global() +def patch_sys(space): + # Annoying: CPython would just use the C-level std{in,out,err} as + # configured by the main application, for example in binary mode + # on Windows or with buffering turned off. We can't easily do the + # same. Instead, go for the safest bet (but possibly bad for + # performance) and open sys.std{in,out,err} unbuffered. On + # Windows I guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # pypy_init_embedded_cffi_module(). + if not glob.patched_sys: + space.appexec([], """(): + import os + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + glob.patched_sys = True + + def pypy_init_embedded_cffi_module(version, init_struct): # called from __init__.py name = "?" @@ -56,6 +76,7 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) + patch_sys(space) load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: @@ -85,14 +106,86 @@ # ____________________________________________________________ +if os.name == 'nt': -eci = ExternalCompilationInfo(separate_module_sources=[ -r""" -/* XXX Windows missing */ -#include + do_includes = r""" +#define _WIN32_WINNT 0x0501 +#include + +#define CFFI_INIT_HOME_PATH_MAX _MAX_PATH +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + HMODULE hModule = 0; + DWORD res; + + GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + (LPCTSTR)&_cffi_init, &hModule); + + if (hModule == 0 ) { + _cffi_init_error("GetModuleHandleEx() failed", ""); + return -1; + } + res = GetModuleFileName(hModule, output_home_path, CFFI_INIT_HOME_PATH_MAX); + if (res >= CFFI_INIT_HOME_PATH_MAX) { + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static LONG volatile lock = 0; + static int _init_called = 0; + + while (InterlockedCompareExchange(&lock, 1, 0) != 0) { + SwitchToThread(); /* spin loop */ + } + if (!_init_called) { + _cffi_init(); + _init_called = 1; + } + InterlockedCompareExchange(&lock, 0, 1); +} +""" + +else: + + do_includes = r""" #include #include +#define CFFI_INIT_HOME_PATH_MAX PATH_MAX +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + Dl_info info; + dlerror(); /* reset */ + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return -1; + } + if (realpath(info.dli_fname, output_home_path) == NULL) { + perror("realpath() failed"); + _cffi_init_error("realpath() failed", ""); + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + pthread_once(&once_control, _cffi_init); +} +""" + +do_startup = do_includes + r""" RPY_EXPORTED void rpython_startup_code(void); RPY_EXPORTED int pypy_setup_home(char *, int); @@ -108,17 +201,13 @@ static void _cffi_init(void) { - Dl_info info; - char *home; + char home[CFFI_INIT_HOME_PATH_MAX + 1]; rpython_startup_code(); RPyGilAllocate(); - if (dladdr(&_cffi_init, &info) == 0) { - _cffi_init_error("dladdr() failed: ", dlerror()); + if (_cffi_init_home(home) != 0) return; - } - home = realpath(info.dli_fname, NULL); if (pypy_setup_home(home, 1) != 0) { _cffi_init_error("pypy_setup_home() failed", ""); return; @@ -134,13 +223,12 @@ It assumes that we don't hold the GIL before (if it exists), and we don't hold it afterwards. */ - static pthread_once_t once_control = PTHREAD_ONCE_INIT; - _cffi_module_name = name; /* not really thread-safe, but better than nothing */ - pthread_once(&once_control, _cffi_init); + _cffi_init_once(); return (int)_cffi_ready - 1; } -"""]) +""" +eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,15 +4,18 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module +from pypy.module._cffi_backend import embedding def test_checkmodule(): # prepare_file_argument() is not working without translating the _file # module too def dummy_prepare_file_argument(space, fileobj): - # call load_cffi1_module() too, from a random place like here - cffi1_module.load_cffi1_module(space, "foo", "foo", 42) + # call pypy_init_embedded_cffi_module() from a random place like here + # --- this calls load_cffi1_module(), too + embedding.pypy_init_embedded_cffi_module( + rffi.cast(rffi.INT, embedding.EMBED_VERSION_MIN), + 42) return lltype.nullptr(rffi.CCHARP.TO) old = ctypeptr.prepare_file_argument try: diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -102,7 +102,7 @@ fd = space.appexec([w_socket, space.wrap(orig_fd.fileno()), space.wrap(socket.AF_INET), space.wrap(socket.SOCK_STREAM), space.wrap(0)], - """(_socket, fd, family, type, proto): + """(_socket, fd, family, type, proto): return _socket.fromfd(fd, family, type, proto)""") assert space.unwrap(space.call_method(fd, 'fileno')) @@ -326,7 +326,7 @@ def test_ntoa_exception(self): import _socket - raises(_socket.error, _socket.inet_ntoa, "ab") + raises(_socket.error, _socket.inet_ntoa, b"ab") def test_aton_exceptions(self): import _socket @@ -418,7 +418,7 @@ # it if there is no connection. try: s.connect(("www.python.org", 80)) - except _socket.gaierror, ex: + except _socket.gaierror as ex: skip("GAIError - probably no connection: %s" % str(ex.args)) name = s.getpeername() # Will raise socket.error if not connected assert name[1] == 80 @@ -465,7 +465,7 @@ sizes = {socket.htonl: 32, socket.ntohl: 32, socket.htons: 16, socket.ntohs: 16} for func, size in sizes.items(): - mask = (1L< Author: Carl Friedrich Bolz Branch: statistics-maps Changeset: r82170:c5af13624160 Date: 2016-02-12 12:32 +0100 http://bitbucket.org/pypy/pypy/changeset/c5af13624160/ Log: weed out some fake transitions, log size_estimate, fix some corner cases, draw back arrow diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -93,6 +93,7 @@ lines = ["{"] lines.append(_print_line('type', self.__class__.__name__, 1)) lines.append(_print_line('id', str(objectmodel.compute_unique_id(self)), 1)) + lines.append(_print_line('size_estimate', str(self.size_estimate()), 1)) lines.append(_print_line('instances', self._number_instantiated, 1)) if isinstance(self, PlainAttribute): lines.append(_print_line('back', str(objectmodel.compute_unique_id(self.back)), 1)) @@ -170,7 +171,8 @@ attr = self.find_map_attr(name, index) if attr is None: return self.terminator._write_terminator(obj, name, index, w_value) - attr._count_write(name, index, w_value) + if type(obj) is not Object: + attr._count_write(name, index, w_value) # if the write is not necessary, the storage is already filled from the # time we did the map transition. Therefore, if the value profiler says # so, we can not do the write @@ -304,7 +306,8 @@ def add_attr(self, obj, name, index, w_value): # grumble, jit needs this attr = self._get_new_attr(name, index) - attr._count_write(name, index, w_value) + if type(obj) is not Object: + attr._count_write(name, index, w_value) oldattr = obj._get_mapdict_map() if not jit.we_are_jitted(): size_est = (oldattr._size_estimate + attr.size_estimate() @@ -601,7 +604,8 @@ return jit.promote(self.map) def _set_mapdict_map(self, map): old = self.map - if old is not map and map: + # don't count Object, it's just an intermediate + if old is not map and map and type(self) is not Object: old._count_transition(map) self.map = map # _____________________________________________ diff --git a/pypy/tool/mapstatsdot.py b/pypy/tool/mapstatsdot.py --- a/pypy/tool/mapstatsdot.py +++ b/pypy/tool/mapstatsdot.py @@ -60,6 +60,8 @@ return seen.add(self) if hasattr(self, 'back'): + if self not in self.back.transitions: + output.edge(self.back.id, self.id, dir="none") self.back.dot(output, seen) if not self.instances: return @@ -107,7 +109,7 @@ if writes: for tup, count in writes.iteritems(): key, index, cls = tup.strip('()').split(', ') - if key.startswith('"'): + if key.startswith(('"', "'")): key = eval(key) assert key == self.name assert int(index) == self.nametype @@ -120,7 +122,7 @@ assert len(reads) == 1 for tup, count in reads.iteritems(): key, index = tup.strip('()').split(', ') - if key.startswith('"'): + if key.startswith(('"', "'")): key = eval(key) assert key == self.name assert int(index) == self.nametype From pypy.commits at gmail.com Fri Feb 12 06:35:42 2016 From: pypy.commits at gmail.com (jbs) Date: Fri, 12 Feb 2016 03:35:42 -0800 (PST) Subject: [pypy-commit] pypy reorder-map-attributes: (cfbolz, jbs): turned revursive algorithm into iterative one to eliminate stack overflow Message-ID: <56bdc38e.6217c20a.bbcb1.36a6@mx.google.com> Author: Jasper.Schulz Branch: reorder-map-attributes Changeset: r82172:835464e0677c Date: 2016-02-11 15:55 +0000 http://bitbucket.org/pypy/pypy/changeset/835464e0677c/ Log: (cfbolz, jbs): turned revursive algorithm into iterative one to eliminate stack overflow diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -160,15 +160,9 @@ if self.cache_attrs is not None: return self.cache_attrs.get(key, None) return None - - @jit.look_inside_iff(lambda self, obj, name, index, w_value: - jit.isconstant(self) and - jit.isconstant(name) and - jit.isconstant(index)) + def add_attr(self, obj, name, index, w_value): - reordered = self._try_reorder_and_add(obj, name, index, w_value) - if reordered == NOT_REORDERED: - self._add_attr_without_reordering(obj, name, index, w_value) + self._reorder_and_add(obj, name, index, w_value) if not jit.we_are_jitted(): oldattr = self attr = obj._get_mapdict_map() @@ -181,6 +175,7 @@ attr = self._get_new_attr(name, index) attr._switch_map_and_write_storage(obj, w_value) + @jit.unroll_safe def _switch_map_and_write_storage(self, obj, w_value): if self.length() > obj._mapdict_storage_length(): # note that self.size_estimate() is always at least self.length() @@ -194,27 +189,51 @@ obj._set_mapdict_map(self) obj._mapdict_write_storage(self.storageindex, w_value) - def _try_reorder_and_add(self, obj, name, index, w_value): - attr = self._get_cache_attr(name, index) - if attr is not None: - attr._switch_map_and_write_storage(obj, w_value) - return JUST_REORDERED + @jit.look_inside_iff(lambda self, obj, name, index, w_value: + jit.isconstant(self) and + jit.isconstant(name) and + jit.isconstant(index)) + def _reorder_and_add(self, obj, name, index, w_value): + stack = [] + while True: + current = self + localstack = [] + while True: + attr = current._get_cache_attr(name, index) + if attr is None: + # if not found in all ancestors + if not isinstance(current, PlainAttribute): + self._add_attr_without_reordering(obj, name, index, w_value) + break - elif isinstance(self, PlainAttribute): - w_self_value = obj._mapdict_read_storage(self.storageindex) - reordered = self.back._try_reorder_and_add(obj, name, index, w_value) - if reordered == JUST_REORDERED: - obj._get_mapdict_map()._add_attr_without_reordering( - obj, self.name, self.index, w_self_value) - elif reordered == SOMEWHERE_REORDERED: - obj._get_mapdict_map().add_attr(obj, self.name, self.index, w_self_value) - else: - assert reordered == NOT_REORDERED - return NOT_REORDERED - return SOMEWHERE_REORDERED - else: - # we are terminator - return NOT_REORDERED + # if not found try parent + else: + w_self_value = obj._mapdict_read_storage(current.storageindex) + localstack.append((current, w_self_value)) + current = current.back + else: + attr._switch_map_and_write_storage(obj, w_value) + stack.extend(localstack) + break + + if not stack: + return + + # add the first attribute of the stack without reordering + # to prevent an endless loop + next_map, w_value = stack.pop() + obj._get_mapdict_map()._add_attr_without_reordering( + obj, next_map.name, next_map.index, w_value) + + if not stack: + return + + # readd all other values from the stack (with reordering) + # the last element of the stack will be the new current + next_map, w_value = stack.pop() + name = next_map.name + index = next_map.index + self = obj._get_mapdict_map() def materialize_r_dict(self, space, obj, dict_w): raise NotImplementedError("abstract base class") diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py --- a/pypy/objspace/std/test/test_mapdict.py +++ b/pypy/objspace/std/test/test_mapdict.py @@ -171,6 +171,12 @@ assert obj.map is obj5.map assert obj.map is obj6.map +def test_bug_stack_overflow_insert_attributes(): + cls = Class() + obj = cls.instantiate() + + for i in range(1000): + obj.setdictvalue(space, str(i), i) def test_insert_different_orders_perm(): from itertools import permutations From pypy.commits at gmail.com Fri Feb 12 06:35:44 2016 From: pypy.commits at gmail.com (jbs) Date: Fri, 12 Feb 2016 03:35:44 -0800 (PST) Subject: [pypy-commit] pypy reorder-map-attributes: (cfbolz, jbs): optimized stacks Message-ID: <56bdc390.25fac20a.7006c.3123@mx.google.com> Author: Jasper.Schulz Branch: reorder-map-attributes Changeset: r82173:ece37fb4ad10 Date: 2016-02-11 16:08 +0000 http://bitbucket.org/pypy/pypy/changeset/ece37fb4ad10/ Log: (cfbolz, jbs): optimized stacks diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -194,10 +194,12 @@ jit.isconstant(name) and jit.isconstant(index)) def _reorder_and_add(self, obj, name, index, w_value): - stack = [] + stack_maps = None + stack_values = None + stack_index = 0 while True: current = self - localstack = [] + localstack_index = stack_index while True: attr = current._get_cache_attr(name, index) if attr is None: @@ -209,28 +211,37 @@ # if not found try parent else: w_self_value = obj._mapdict_read_storage(current.storageindex) - localstack.append((current, w_self_value)) + if stack_maps is None: + stack_maps = [None] * self.length() + stack_values = [None] * self.length() + stack_maps[localstack_index] = current + stack_values[localstack_index] = w_self_value + localstack_index += 1 current = current.back else: attr._switch_map_and_write_storage(obj, w_value) - stack.extend(localstack) + stack_index = localstack_index break - if not stack: + if not stack_index: return # add the first attribute of the stack without reordering # to prevent an endless loop - next_map, w_value = stack.pop() + stack_index += -1 + next_map = stack_maps[stack_index] + w_value = stack_values[stack_index] obj._get_mapdict_map()._add_attr_without_reordering( obj, next_map.name, next_map.index, w_value) - if not stack: + if not stack_index: return # readd all other values from the stack (with reordering) # the last element of the stack will be the new current - next_map, w_value = stack.pop() + stack_index += -1 + next_map = stack_maps[stack_index] + w_value = stack_values[stack_index] name = next_map.name index = next_map.index self = obj._get_mapdict_map() From pypy.commits at gmail.com Fri Feb 12 06:39:50 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 12 Feb 2016 03:39:50 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: replaced arith left shift with logical (other backends do not use arith shift there!), Message-ID: <56bdc486.6217c20a.bbcb1.3831@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82174:2b50cfaee409 Date: 2016-02-12 12:39 +0100 http://bitbucket.org/pypy/pypy/changeset/2b50cfaee409/ Log: replaced arith left shift with logical (other backends do not use arith shift there!), do not call memcpy for strings anymore, s390x has a dedicated instr. to do just that. this removes the overhead for calling simplifications to the register allocator and assembler some small testsn diff --git a/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py b/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py --- a/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py +++ b/rpython/jit/backend/llsupport/test/test_pinned_object_rewrite.py @@ -127,7 +127,7 @@ i0 = getfield_gc_i(ConstPtr(pinned_obj_gcref), descr=pinned_obj_my_int_descr) """, """ [] - p1 = gc_load_indexed_r(ConstPtr(ptr_array_gcref), %(0 * ptr_array_descr.itemsize + 1)s, 1, 0, %(ptr_array_descr.itemsize)s) + p1 = gc_load_r(ConstPtr(ptr_array_gcref), %(0 * ptr_array_descr.itemsize + 1)s, %(ptr_array_descr.itemsize)s) i0 = gc_load_i(p1, 0, -%(pinned_obj_my_int_descr.field_size)s) """) assert len(self.gc_ll_descr.last_moving_obj_tracker._indexes) == 1 diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -589,7 +589,7 @@ # set SCRATCH2 to 1 << r1 mc.LGHI(r.SCRATCH2, l.imm(1)) - mc.SLAG(r.SCRATCH2, r.SCRATCH2, l.addr(0,r.SCRATCH)) + mc.SLLG(r.SCRATCH2, r.SCRATCH2, l.addr(0,r.SCRATCH)) # set this bit inside the byte of interest @@ -1002,20 +1002,17 @@ if src_ofs.is_imm(): value = src_ofs.value << scale if check_imm_value(value): - if dst is not src_ptr: - self.mc.LGR(dst, src_ptr) - if value != 0: - self.mc.AGHI(dst, l.imm(value)) + self.mc.AGHIK(dst, src_ptr, l.imm(value)) else: - self.mc.load_imm(dst, value) - self.mc.AGR(dst, src_ptr) + # it is fine to use r1 here, because it will + # only hold a value before invoking the memory copy + self.mc.load_imm(r.SCRATCH, value) + self.mc.AGRK(dst, src_ptr, r.SCRATCH) elif scale == 0: - if dst is not src_ptr: - self.mc.LGR(dst, src_ptr) - self.mc.AGR(dst, src_ofs) + self.mc.AGRK(dst, src_ptr, src_ofs) else: - self.mc.SLLG(dst, src_ofs, l.addr(scale)) - self.mc.AGR(dst, src_ptr) + self.mc.SLLG(r.SCRATCH, src_ofs, l.addr(scale)) + self.mc.AGRK(dst, src_ptr, r.SCRATCH) def _emit_copycontent(self, arglocs, is_unicode): [src_ptr_loc, dst_ptr_loc, @@ -1033,34 +1030,40 @@ assert itemsize == 1 scale = 0 - self._emit_load_for_copycontent(r.SCRATCH, src_ptr_loc, src_ofs_loc, scale) - self._emit_load_for_copycontent(r.SCRATCH2, dst_ptr_loc, dst_ofs_loc, scale) - # - # DO NOT USE r2-r6 before this line! - # either of the parameter (e.g. str_ptr_loc, ...) locations might be allocated + # src and src_len are tmp registers + src = src_ptr_loc + src_len = r.odd_reg(src) + dst = r.r0 + dst_len = r.r1 + self._emit_load_for_copycontent(src, src_ptr_loc, src_ofs_loc, scale) + self._emit_load_for_copycontent(dst, dst_ptr_loc, dst_ofs_loc, scale) if length_loc.is_imm(): length = length_loc.getint() - self.mc.load_imm(r.r4, length << scale) + self.mc.load_imm(dst_len, length << scale) else: if scale > 0: - self.mc.SLAG(r.r4, length_loc, l.addr(scale)) - elif length_loc is not r.r4: - self.mc.LGR(r.r4, length_loc) + self.mc.SLLG(dst_len, length_loc, l.addr(scale)) + else: + self.mc.LGR(dst_len, length_loc) + # ensure that src_len is as long as dst_len, otherwise + # padding bytes are written to dst + self.mc.LGR(src_len, dst_len) - self.mc.LGR(r.r3, r.SCRATCH) - self.mc.LGR(r.r2, r.SCRATCH2) - if basesize != 0: - self.mc.AGHI(r.r3, l.imm(basesize)) - if basesize != 0: - self.mc.AGHI(r.r2, l.imm(basesize)) + self.mc.AGHI(src, l.imm(basesize)) + self.mc.AGHI(dst, l.imm(basesize)) - self.mc.load_imm(self.mc.RAW_CALL_REG, self.memcpy_addr) - self.mc.raw_call() + # s390x has memset directly as a hardware instruction!! + # 0xB8 means we might reference dst later + self.mc.MVCLE(dst, src, l.addr(0xB8)) + # NOTE this instruction can (determined by the cpu), just + # quit the movement any time, thus it is looped until all bytes + # are copied! + self.mc.BRC(c.OF, l.imm(-self.mc.MVCLE_byte_count)) def emit_zero_array(self, op, arglocs, regalloc): base_loc, startindex_loc, length_loc, \ - ofs_loc, itemsize_loc, pad_byte_loc = arglocs + ofs_loc, itemsize_loc = arglocs if ofs_loc.is_imm(): assert check_imm_value(ofs_loc.value) @@ -1073,24 +1076,21 @@ else: self.mc.AGR(base_loc, startindex_loc) assert not length_loc.is_imm() - self.mc.XGR(pad_byte_loc, pad_byte_loc) - pad_plus = r.odd_reg(pad_byte_loc) - self.mc.XGR(pad_plus, pad_plus) - self.mc.XGR(r.SCRATCH, r.SCRATCH) + # contents of r0 do not matter because r1 is zero, so + # no copying takes place + self.mc.XGR(r.r1, r.r1) + + assert base_loc.is_even() + assert length_loc.value == base_loc.value + 1 + # s390x has memset directly as a hardware instruction!! # it needs 5 registers allocated - # dst = rX, length = rX+1 (ensured by the regalloc) - # pad_byte is rY to rY+1 - # scratch register holds the value written to dst - assert pad_byte_loc.is_even() - assert pad_plus.value == pad_byte_loc.value + 1 - assert base_loc.is_even() - assert length_loc.value == base_loc.value + 1 - assert base_loc.value != pad_byte_loc.value + # dst = rX, dst len = rX+1 (ensured by the regalloc) + # src = r0, src len = r1 + self.mc.MVCLE(base_loc, r.r0, l.addr(0)) # NOTE this instruction can (determined by the cpu), just # quit the movement any time, thus it is looped until all bytes # are copied! - self.mc.MVCLE(base_loc, pad_byte_loc, l.addr(0, r.SCRATCH)) self.mc.BRC(c.OF, l.imm(-self.mc.MVCLE_byte_count)) diff --git a/rpython/jit/backend/zarch/regalloc.py b/rpython/jit/backend/zarch/regalloc.py --- a/rpython/jit/backend/zarch/regalloc.py +++ b/rpython/jit/backend/zarch/regalloc.py @@ -161,6 +161,14 @@ def ensure_even_odd_pair(self, var, bindvar, bind_first=True, must_exist=True, load_loc_odd=True, move_regs=True): + """ Allocates two registers that can be used by the instruction. + var: is the original register holding the value + bindvar: is the variable that will be bound + (= self.reg_bindings[bindvar] = new register) + bind_first: the even register will be bound to bindvar, + if bind_first == False: the odd register will + be bound + """ self._check_type(var) prev_loc = self.loc(var, must_exist=must_exist) var2 = TempVar() @@ -592,13 +600,23 @@ return imm(box.getint()) return self.rm.ensure_reg(box, force_in_reg=True, selected_reg=selected_reg) - def ensure_reg_or_any_imm(self, box): + def ensure_reg_or_20bit_imm(self, box, selected_reg=None): if box.type == FLOAT: return self.fprm.ensure_reg(box, True) else: + if helper.check_imm20(box): + return imm(box.getint()) + return self.rm.ensure_reg(box, force_in_reg=True, selected_reg=selected_reg) + + def ensure_reg_or_any_imm(self, box, selected_reg=None): + if box.type == FLOAT: + return self.fprm.ensure_reg(box, True, + selected_reg=selected_reg) + else: if isinstance(box, Const): return imm(box.getint()) - return self.rm.ensure_reg(box, force_in_reg=True) + return self.rm.ensure_reg(box, force_in_reg=True, + selected_reg=selected_reg) def get_scratch_reg(self, type, selected_reg=None): if type == FLOAT: @@ -798,7 +816,7 @@ def _prepare_gc_load(self, op): base_loc = self.ensure_reg(op.getarg(0), force_in_reg=True) - index_loc = self.ensure_reg_or_any_imm(op.getarg(1)) + index_loc = self.ensure_reg_or_20bit_imm(op.getarg(1)) size_box = op.getarg(2) assert isinstance(size_box, ConstInt) size = abs(size_box.value) @@ -815,7 +833,7 @@ def _prepare_gc_load_indexed(self, op): base_loc = self.ensure_reg(op.getarg(0), force_in_reg=True) - index_loc = self.ensure_reg(op.getarg(1), force_in_reg=True) + index_loc = self.ensure_reg_or_20bit_imm(op.getarg(1)) scale_box = op.getarg(2) offset_box = op.getarg(3) size_box = op.getarg(4) @@ -841,7 +859,7 @@ def prepare_gc_store(self, op): base_loc = self.ensure_reg(op.getarg(0), force_in_reg=True) - index_loc = self.ensure_reg_or_any_imm(op.getarg(1)) + index_loc = self.ensure_reg_or_20bit_imm(op.getarg(1)) value_loc = self.ensure_reg(op.getarg(2)) size_box = op.getarg(3) assert isinstance(size_box, ConstInt) @@ -852,7 +870,7 @@ def prepare_gc_store_indexed(self, op): args = op.getarglist() base_loc = self.ensure_reg(op.getarg(0), force_in_reg=True) - index_loc = self.ensure_reg_or_any_imm(op.getarg(1)) + index_loc = self.ensure_reg_or_20bit_imm(op.getarg(1)) value_loc = self.ensure_reg(op.getarg(2)) scale_box = op.getarg(3) offset_box = op.getarg(4) @@ -953,21 +971,20 @@ return self._prepare_call_default(op) def prepare_zero_array(self, op): + # args: base, start, len, scale_start, scale_len itemsize, ofs, _ = unpack_arraydescr(op.getdescr()) startindex_loc = self.ensure_reg_or_16bit_imm(op.getarg(1)) tempvar = TempInt() self.rm.temp_boxes.append(tempvar) ofs_loc = self.ensure_reg_or_16bit_imm(ConstInt(ofs)) - pad_byte, _ = self.rm.ensure_even_odd_pair(tempvar, tempvar, - bind_first=True, must_exist=False, move_regs=False) - base_loc, length_loc = self.rm.ensure_even_odd_pair(op.getarg(0), op, + base_loc, length_loc = self.rm.ensure_even_odd_pair(op.getarg(0), tempvar, bind_first=True, must_exist=False, load_loc_odd=False) length_box = op.getarg(2) ll = self.rm.loc(length_box) if length_loc is not ll: self.assembler.regalloc_mov(ll, length_loc) - return [base_loc, startindex_loc, length_loc, ofs_loc, imm(itemsize), pad_byte] + return [base_loc, startindex_loc, length_loc, ofs_loc, imm(itemsize)] def prepare_cond_call(self, op): self.load_condition_into_cc(op.getarg(0)) @@ -1102,12 +1119,25 @@ return [loc0, loc1] def prepare_copystrcontent(self, op): - src_ptr_loc = self.ensure_reg(op.getarg(0), force_in_reg=True) + """ this function needs five registers. + src & src_len: are allocated using ensure_even_odd_pair. + note that these are tmp registers, thus the actual variable + value is not modified. + src_len: when entering the assembler, src_ofs_loc's value is contained + in src_len register. + """ + src_tmp = TempVar() + src_ptr_loc, _ = \ + self.rm.ensure_even_odd_pair(op.getarg(0), + src_tmp, bind_first=True, + must_exist=False, load_loc_odd=False) + src_ofs_loc = self.ensure_reg_or_any_imm(op.getarg(2)) + self.rm.temp_boxes.append(src_tmp) dst_ptr_loc = self.ensure_reg(op.getarg(1), force_in_reg=True) - src_ofs_loc = self.ensure_reg_or_any_imm(op.getarg(2)) dst_ofs_loc = self.ensure_reg_or_any_imm(op.getarg(3)) length_loc = self.ensure_reg_or_any_imm(op.getarg(4)) - self._spill_before_call(save_all_regs=False) + # no need to spill, we do not call memcpy, but we use s390x's + # hardware instruction to copy memory return [src_ptr_loc, dst_ptr_loc, src_ofs_loc, dst_ofs_loc, length_loc] diff --git a/rpython/jit/backend/zarch/test/test_assembler.py b/rpython/jit/backend/zarch/test/test_assembler.py --- a/rpython/jit/backend/zarch/test/test_assembler.py +++ b/rpython/jit/backend/zarch/test/test_assembler.py @@ -226,6 +226,35 @@ self.a.mc.BCR(con.ANY, r.r14) assert run_asm(self.a) == 15 + def test_shift_same_register(self): + self.a.mc.load_imm(r.r3, 0x1) + self.a.mc.SLLG(r.r2, r.r3, loc.addr(1)) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == 2 + + def test_shift_arith(self): + self.a.mc.load_imm(r.r2, -14) + self.a.mc.SLAG(r.r2, r.r2, loc.addr(1)) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == -28 + + def test_shift_negative_logical(self): + self.a.mc.load_imm(r.r2, -14) + self.a.mc.SLLG(r.r2, r.r2, loc.addr(1)) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == -28 + + def test_shift_negative_logical_2(self): + self.a.mc.load_imm(r.r2, -2) + self.a.mc.SLLG(r.r2, r.r2, loc.addr(63)) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == 0 + + def test_shift_negative_logical_3(self): + self.a.mc.load_imm(r.r2, -2) + self.a.mc.SLLG(r.r3, r.r2, loc.addr(1)) + self.a.mc.BCR(con.ANY, r.r14) + assert run_asm(self.a) == -2 def test_load_small_int_to_reg(self): self.a.mc.LGHI(r.r2, loc.imm(123)) From pypy.commits at gmail.com Fri Feb 12 06:43:34 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 03:43:34 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: an attempt to finalize windows Message-ID: <56bdc566.44e01c0a.f7760.ffffaa1c@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82175:4ff8e1c7f441 Date: 2016-02-12 12:42 +0100 http://bitbucket.org/pypy/pypy/changeset/4ff8e1c7f441/ Log: an attempt to finalize windows diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -89,11 +89,9 @@ s.c_next = vmprof_tl_stack.get_or_make_raw() s.c_value = unique_id s.c_kind = VMPROF_CODE_TAG - print s vmprof_tl_stack.setraw(s) return s def leave_code(s): vmprof_tl_stack.setraw(s.c_next) - print "pop" lltype.free(s, flavor='raw') diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -71,6 +71,43 @@ return _write_all((char*)&header, 5 * sizeof(long) + 4 + namelen); } +/* ************************************************************* + * functions to dump the stack trace + * ************************************************************* + */ + + +static int get_stack_trace(vmprof_stack_t* stack, intptr_t *result, int max_depth, intptr_t pc) +{ + int n = 0; + intptr_t addr = 0; + int bottom_jitted = 0; + // check if the pc is in JIT +#ifdef PYPY_JIT_CODEMAP + if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { + // the bottom part is jitted, means we can fill up the first part + // from the JIT + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + stack = stack->next; // skip the first item as it contains garbage + } +#endif + while (n < max_depth - 1 && stack) { + if (stack->kind == VMPROF_CODE_TAG) { + result[n] = stack->kind; + result[n + 1] = stack->value; + n += 2; + } +#ifdef PYPY_JIT_CODEMAP + else if (stack->kind == VMPROF_JITTED_TAG) { + pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + } +#endif + stack = stack->next; + } + return n; +} + #ifndef RPYTHON_LL2CTYPES static vmprof_stack_t *get_vmprof_stack(void) { diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -80,43 +80,6 @@ #include "vmprof_get_custom_offset.h" -/* ************************************************************* - * functions to dump the stack trace - * ************************************************************* - */ - - -static int get_stack_trace(intptr_t *result, int max_depth, intptr_t pc, ucontext_t *ucontext) -{ - vmprof_stack_t* stack = get_vmprof_stack(); - int n = 0; - intptr_t addr = 0; - int bottom_jitted = 0; - // check if the pc is in JIT -#ifdef PYPY_JIT_CODEMAP - if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { - // the bottom part is jitted, means we can fill up the first part - // from the JIT - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - stack = stack->next; // skip the first item as it contains garbage - } -#endif - while (n < max_depth - 1 && stack) { - if (stack->kind == VMPROF_CODE_TAG) { - result[n] = stack->kind; - result[n + 1] = stack->value; - n += 2; - } -#ifdef PYPY_JIT_CODEMAP - else if (stack->kind == VMPROF_JITTED_TAG) { - pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - } -#endif - stack = stack->next; - } - return n; -} static intptr_t get_current_thread_id(void) { @@ -194,8 +157,8 @@ struct prof_stacktrace_s *st = (struct prof_stacktrace_s *)p->data; st->marker = MARKER_STACKTRACE; st->count = 1; - depth = get_stack_trace(st->stack, - MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext), ucontext); + depth = get_stack_trace(get_vmprof_stack(), st->stack, + MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext)); st->depth = depth; st->stack[depth++] = get_current_thread_id(); p->data_offset = offsetof(struct prof_stacktrace_s, marker); diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -71,82 +71,72 @@ return 0; } -int vmprof_snapshot_thread(prof_stacktrace_s *stack) +int vmprof_snapshot_thread(struct pypy_threadlocal_s *p, prof_stacktrace_s *stack) { - void *addr; - vmprof_stack_t *cur; - long tid; - HANDLE hThread; + void *addr; + vmprof_stack_t *cur; + long tid; + HANDLE hThread; + long depth; + DWORD result; + CONTEXT ctx; #ifdef RPYTHON_LL2CTYPES - return 0; // not much we can do + return 0; // not much we can do #else - OP_THREADLOCALREF_ADDR(addr); -#ifdef RPY_TLOFS_thread_ident // compiled with threads - tid = *(long*)((char*)addr + RPY_TLOFS_thread_ident); - hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, tid); - if (!hThread) { - return -1; - } - result = SuspendThread(hThread); - if(result == 0xffffffff) - return -1; // possible, e.g. attached debugger or thread alread suspended - if (*(long*)((char*)addr + RPY_TLOFS_thread_ident) != tid) { - // swapped threads, bail - ResumeThread(hThread); - return -1; - } -#endif - cur = *(vmprof_stack_t**)((char*)addr + RPY_TLOFS_vmprof_tl_stack); - if (cur) { - printf("%p\n", cur->kind); - } else { - printf("null\n"); - } -#ifdef RPY_TLOFS_thread_ident - ResumeThread(hThread); -#endif - /* HRESULT result; - HANDLE hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, thread_id); - int depth; +#ifndef RPY_TLOFS_thread_ident + return 0; // we can't freeze threads, unsafe +#else + hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, p->thread_ident); if (!hThread) { return -1; } result = SuspendThread(hThread); if(result == 0xffffffff) return -1; // possible, e.g. attached debugger or thread alread suspended - // find the correct thread - depth = read_trace_from_cpy_frame(tstate->frame, stack->stack, - MAX_STACK_DEPTH); + ctx.ContextFlags = CONTEXT_FULL; + if (!GetThreadContext(hThread, &ctx)) + return -1; + depth = get_stack_trace(p->vmprof_tl_stack, + stack->stack, MAX_STACK_DEPTH-2, ctx.Eip); stack->depth = depth; - stack->stack[depth++] = (void*)thread_id; + stack->stack[depth++] = (void*)p->thread_ident; stack->count = 1; stack->marker = MARKER_STACKTRACE; ResumeThread(hThread); - return depth;*/ - return 0; + return depth; +#endif #endif } long __stdcall vmprof_mainloop(void *arg) { + struct pypy_threadlocal_s *p; prof_stacktrace_s *stack = (prof_stacktrace_s*)malloc(SINGLE_BUF_SIZE); - HANDLE hThreadSnap = INVALID_HANDLE_VALUE; int depth; while (1) { - //Sleep(profile_interval_usec * 1000); - Sleep(10); + //Sleep(profile_interval_usec * 1000); + Sleep(10); if (!enabled) { continue; } - depth = vmprof_snapshot_thread(stack); - if (depth > 0) { - _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), - depth * sizeof(void *) + - sizeof(struct prof_stacktrace_s) - - offsetof(struct prof_stacktrace_s, marker)); + _RPython_ThreadLocals_Acquire(); + p = _RPython_ThreadLocals_Head(); // the first one is one behind head + p = _RPython_ThreadLocals_Enum(p); + while (p) { + if (p->ready == 42) { + depth = vmprof_snapshot_thread(p, stack); + if (depth > 0) { + _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), + depth * sizeof(void *) + + sizeof(struct prof_stacktrace_s) - + offsetof(struct prof_stacktrace_s, marker)); + } + } + p = _RPython_ThreadLocals_Enum(p); } + _RPython_ThreadLocals_Release(); } } diff --git a/rpython/rlib/rvmprof/test/test_ztranslation.py b/rpython/rlib/rvmprof/test/test_ztranslation.py --- a/rpython/rlib/rvmprof/test/test_ztranslation.py +++ b/rpython/rlib/rvmprof/test/test_ztranslation.py @@ -3,11 +3,10 @@ sys.path += ['../../../..'] # for subprocess in test_interpreted import py from rpython.tool.udir import udir -from rpython.rlib import rvmprof +from rpython.rlib import rvmprof, rthread from rpython.translator.c.test.test_genc import compile from rpython.rlib.nonconst import NonConstant - class MyCode: def __init__(self, count): self.count = count @@ -39,6 +38,7 @@ PROF_FILE = str(udir.join('test_ztranslation.prof')) def main(argv=[]): + rthread.get_ident() # force TLOFS_thread_ident if NonConstant(False): # Hack to give os.open() the correct annotation os.open('foo', 1, 1) diff --git a/rpython/translator/c/src/threadlocal.c b/rpython/translator/c/src/threadlocal.c --- a/rpython/translator/c/src/threadlocal.c +++ b/rpython/translator/c/src/threadlocal.c @@ -85,6 +85,11 @@ return prev->next; } +struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(void) +{ + return &linkedlist_head; +} + static void _RPy_ThreadLocals_Init(void *p) { struct pypy_threadlocal_s *tls = (struct pypy_threadlocal_s *)p; diff --git a/rpython/translator/c/src/threadlocal.h b/rpython/translator/c/src/threadlocal.h --- a/rpython/translator/c/src/threadlocal.h +++ b/rpython/translator/c/src/threadlocal.h @@ -27,6 +27,9 @@ RPY_EXTERN struct pypy_threadlocal_s * _RPython_ThreadLocals_Enum(struct pypy_threadlocal_s *prev); +/* will return the head of the list */ +RPY_EXTERN struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(); + #define OP_THREADLOCALREF_ACQUIRE(r) _RPython_ThreadLocals_Acquire() #define OP_THREADLOCALREF_RELEASE(r) _RPython_ThreadLocals_Release() #define OP_THREADLOCALREF_ENUM(p, r) r = _RPython_ThreadLocals_Enum(p) From pypy.commits at gmail.com Fri Feb 12 07:01:20 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 04:01:20 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: unskip that test Message-ID: <56bdc990.2aacc20a.40ae0.456b@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82176:5ba08dba8e90 Date: 2016-02-12 13:00 +0100 http://bitbucket.org/pypy/pypy/changeset/5ba08dba8e90/ Log: unskip that test diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -9,10 +9,6 @@ class CompiledVmprofTest(CCompiledMixin): CPUClass = getcpuclass() - def setup(self): - if self.CPUClass.backend_name != 'x86_64': - py.test.skip("vmprof only supports x86-64 CPUs at the moment") - def _get_TranslationContext(self): t = TranslationContext() t.config.translation.gc = 'incminimark' From pypy.commits at gmail.com Fri Feb 12 07:40:06 2016 From: pypy.commits at gmail.com (jbs) Date: Fri, 12 Feb 2016 04:40:06 -0800 (PST) Subject: [pypy-commit] pypy reorder-map-attributes: (cfbolz, jbs): fixed error with inserting two attributes without reordering Message-ID: <56bdd2a6.07811c0a.89504.ffffc078@mx.google.com> Author: Jasper.Schulz Branch: reorder-map-attributes Changeset: r82177:bf4e8b1d6198 Date: 2016-02-12 12:39 +0000 http://bitbucket.org/pypy/pypy/changeset/bf4e8b1d6198/ Log: (cfbolz, jbs): fixed error with inserting two attributes without reordering diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -20,7 +20,6 @@ # note: we use "x * NUM_DIGITS_POW2" instead of "x << NUM_DIGITS" because # we want to propagate knowledge that the result cannot be negative -NOT_REORDERED, JUST_REORDERED, SOMEWHERE_REORDERED = range(3) class AbstractAttribute(object): _immutable_fields_ = ['terminator'] @@ -221,21 +220,22 @@ else: attr._switch_map_and_write_storage(obj, w_value) stack_index = localstack_index + + if not stack_index: + return + + # add the first attribute of the stack without reordering + # to prevent an endless loop + stack_index += -1 + next_map = stack_maps[stack_index] + w_value = stack_values[stack_index] + obj._get_mapdict_map()._add_attr_without_reordering( + obj, next_map.name, next_map.index, w_value) + break if not stack_index: return - - # add the first attribute of the stack without reordering - # to prevent an endless loop - stack_index += -1 - next_map = stack_maps[stack_index] - w_value = stack_values[stack_index] - obj._get_mapdict_map()._add_attr_without_reordering( - obj, next_map.name, next_map.index, w_value) - - if not stack_index: - return # readd all other values from the stack (with reordering) # the last element of the stack will be the new current diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py --- a/pypy/objspace/std/test/test_mapdict.py +++ b/pypy/objspace/std/test/test_mapdict.py @@ -171,6 +171,23 @@ assert obj.map is obj5.map assert obj.map is obj6.map +def test_insert_different_orders_4(): + cls = Class() + obj = cls.instantiate() + obj2 = cls.instantiate() + + obj.setdictvalue(space, "a", 10) + obj.setdictvalue(space, "b", 20) + obj.setdictvalue(space, "c", 30) + obj.setdictvalue(space, "d", 40) + + obj2.setdictvalue(space, "d", 50) + obj2.setdictvalue(space, "c", 50) + obj2.setdictvalue(space, "b", 50) + obj2.setdictvalue(space, "a", 50) + + assert obj.map is obj2.map + def test_bug_stack_overflow_insert_attributes(): cls = Class() obj = cls.instantiate() @@ -182,16 +199,20 @@ from itertools import permutations cls = Class() seen_maps = {} - for i, attributes in enumerate(permutations("abcdef")): - obj = cls.instantiate() - key = "" - for j, attr in enumerate(attributes): - obj.setdictvalue(space, attr, i*10+j) - key = "".join(sorted(key+attr)) - if key in seen_maps: - assert obj.map is seen_maps[key] - else: - seen_maps[key] = obj.map + for preexisting in ['', 'x', 'xy']: + for i, attributes in enumerate(permutations("abcdef")): + obj = cls.instantiate() + for i, attr in enumerate(preexisting): + obj.setdictvalue(space, attr, i*1000) + key = preexisting + for j, attr in enumerate(attributes): + obj.setdictvalue(space, attr, i*10+j) + key = "".join(sorted(key+attr)) + if key in seen_maps: + assert obj.map is seen_maps[key] + else: + seen_maps[key] = obj.map + print len(seen_maps) def test_attr_immutability(monkeypatch): From pypy.commits at gmail.com Fri Feb 12 09:31:56 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 06:31:56 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: fix includes Message-ID: <56bdecdc.e853c20a.82641.1622@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82178:fc3b344814fa Date: 2016-02-12 15:30 +0100 http://bitbucket.org/pypy/pypy/changeset/fc3b344814fa/ Log: fix includes diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -3,6 +3,7 @@ from rpython.jit.backend.test.support import CCompiledMixin from rpython.rlib.jit import JitDriver from rpython.tool.udir import udir +from rpython.rlib import rthread from rpython.translator.translator import TranslationContext from rpython.jit.backend.detect_cpu import getcpuclass @@ -58,6 +59,7 @@ tmpfilename = str(udir.join('test_rvmprof')) def f(num): + rthread.get_ident() # register TLOFS_thread_ident code = MyCode("py:x:foo:3") rvmprof.register_code(code, get_name) fd = os.open(tmpfilename, os.O_WRONLY | os.O_CREAT, 0666) diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -35,6 +35,7 @@ #include "vmprof_stack.h" #include "vmprof_getpc.h" #include "vmprof_mt.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" /************************************************************/ @@ -78,9 +79,6 @@ static char atfork_hook_installed = 0; -#include "vmprof_get_custom_offset.h" - - static intptr_t get_current_thread_id(void) { /* xxx This function is a hack on two fronts: diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -25,6 +25,7 @@ #include #include #include "vmprof_stack.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" #include From pypy.commits at gmail.com Fri Feb 12 09:44:38 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 06:44:38 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: vmprof works on most supported platforms Message-ID: <56bdefd6.418f1c0a.ff4be.ffffecbd@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82179:dd2e39de0ed3 Date: 2016-02-12 15:43 +0100 http://bitbucket.org/pypy/pypy/changeset/dd2e39de0ed3/ Log: vmprof works on most supported platforms diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -36,13 +36,13 @@ "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", - "_csv", "cppyy", "_pypyjson" + "_csv", "cppyy", "_pypyjson", "_vmprof", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): +#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') +# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): # it's not enough that we get x86_64 - working_modules.add('_vmprof') +# working_modules.add('_vmprof') translation_modules = default_modules.copy() translation_modules.update([ From pypy.commits at gmail.com Fri Feb 12 10:06:55 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 07:06:55 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: try Message-ID: <56bdf50f.8ab71c0a.aacf4.fffff4ed@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82180:4fd53f064ff3 Date: 2016-02-12 16:06 +0100 http://bitbucket.org/pypy/pypy/changeset/4fd53f064ff3/ Log: try diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -27,6 +27,7 @@ #include "vmprof_stack.h" #include "vmprof_get_custom_offset.h" #include "vmprof_common.h" +#include "threadlocal.h" #include // This file has been inspired (but not copied from since the LICENSE From pypy.commits at gmail.com Fri Feb 12 10:10:07 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 07:10:07 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: like this? Message-ID: <56bdf5cf.85b01c0a.a6c75.fffff657@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82181:b5e79fce3295 Date: 2016-02-12 16:09 +0100 http://bitbucket.org/pypy/pypy/changeset/b5e79fce3295/ Log: like this? diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -24,10 +24,10 @@ #include #include #include +#include "src/threadlocal.h" #include "vmprof_stack.h" #include "vmprof_get_custom_offset.h" #include "vmprof_common.h" -#include "threadlocal.h" #include // This file has been inspired (but not copied from since the LICENSE From pypy.commits at gmail.com Fri Feb 12 10:25:26 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 07:25:26 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: definitely not like this Message-ID: <56bdf966.8673c20a.a2e1b.ffff8802@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82182:51d875e08f23 Date: 2016-02-12 16:24 +0100 http://bitbucket.org/pypy/pypy/changeset/51d875e08f23/ Log: definitely not like this diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -24,7 +24,6 @@ #include #include #include -#include "src/threadlocal.h" #include "vmprof_stack.h" #include "vmprof_get_custom_offset.h" #include "vmprof_common.h" From pypy.commits at gmail.com Fri Feb 12 10:29:23 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 07:29:23 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: another try Message-ID: <56bdfa53.890bc30a.7fea6.ffff88b6@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82183:d2da9855e450 Date: 2016-02-12 16:28 +0100 http://bitbucket.org/pypy/pypy/changeset/d2da9855e450/ Log: another try diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -24,6 +24,7 @@ #include #include #include +#include "structdef.h" #include "vmprof_stack.h" #include "vmprof_get_custom_offset.h" #include "vmprof_common.h" From pypy.commits at gmail.com Fri Feb 12 10:59:19 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 07:59:19 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: I don't want to care for now Message-ID: <56be0157.4d0d1c0a.622cd.0803@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82184:21558c2ed75a Date: 2016-02-12 16:58 +0100 http://bitbucket.org/pypy/pypy/changeset/21558c2ed75a/ Log: I don't want to care for now diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -113,6 +113,7 @@ long __stdcall vmprof_mainloop(void *arg) { +#ifndef RPYTHON_LL2CTYPES struct pypy_threadlocal_s *p; prof_stacktrace_s *stack = (prof_stacktrace_s*)malloc(SINGLE_BUF_SIZE); int depth; @@ -140,6 +141,7 @@ } _RPython_ThreadLocals_Release(); } +#endif } RPY_EXTERN From pypy.commits at gmail.com Fri Feb 12 11:03:08 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 08:03:08 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: we don't have structdef just yet Message-ID: <56be023c.284cc20a.511fd.ffff998f@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82185:1b32334f76eb Date: 2016-02-12 17:02 +0100 http://bitbucket.org/pypy/pypy/changeset/1b32334f76eb/ Log: we don't have structdef just yet diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -24,7 +24,6 @@ #include #include #include -#include "structdef.h" #include "vmprof_stack.h" #include "vmprof_get_custom_offset.h" #include "vmprof_common.h" From pypy.commits at gmail.com Fri Feb 12 11:34:23 2016 From: pypy.commits at gmail.com (NZinov) Date: Fri, 12 Feb 2016 08:34:23 -0800 (PST) Subject: [pypy-commit] pypy default: Fix copy-paste typo in rlib/objectmodel doc Message-ID: <56be098f.44e21c0a.4d285.ffff90c2@mx.google.com> Author: Nikolay Zinov Branch: Changeset: r82186:80f2fd7e17c3 Date: 2016-02-12 19:20 +0300 http://bitbucket.org/pypy/pypy/changeset/80f2fd7e17c3/ Log: Fix copy-paste typo in rlib/objectmodel doc diff --git a/rpython/doc/rlib.rst b/rpython/doc/rlib.rst --- a/rpython/doc/rlib.rst +++ b/rpython/doc/rlib.rst @@ -52,7 +52,7 @@ backend emits code, the function is called to determine the value. ``CDefinedIntSymbolic``: - Instances of ``ComputedIntSymbolic`` are also treated like integers of + Instances of ``CDefinedIntSymbolic`` are also treated like integers of unknown value by the annotator. When C code is emitted they will be represented by the attribute ``expr`` of the symbolic (which is also the first argument of the constructor). From pypy.commits at gmail.com Fri Feb 12 11:34:25 2016 From: pypy.commits at gmail.com (NZinov) Date: Fri, 12 Feb 2016 08:34:25 -0800 (PST) Subject: [pypy-commit] pypy default: merge default Message-ID: <56be0991.e213c20a.eec43.536e@mx.google.com> Author: Nikolay Zinov Branch: Changeset: r82187:e418c04f44ad Date: 2016-02-12 19:28 +0300 http://bitbucket.org/pypy/pypy/changeset/e418c04f44ad/ Log: merge default diff too long, truncating to 2000 out of 2205 lines diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.0 +Version: 1.5.1 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.1" +__version_info__ = (1, 5, 1) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.1" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, types +import sys, sysconfig, types from .lock import allocate_lock try: @@ -544,28 +544,32 @@ def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # if '__pypy__' in sys.builtin_module_names: if hasattr(sys, 'prefix'): import os - libdir = os.path.join(sys.prefix, 'bin') - dirs = kwds.setdefault('library_dirs', []) - if libdir not in dirs: - dirs.append(libdir) + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) pythonlib = "pypy-c" else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags - libraries = kwds.setdefault('libraries', []) - if pythonlib not in libraries: - libraries.append(pythonlib) + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): @@ -631,7 +635,7 @@ compiled DLL. Use '*' to force distutils' choice, suitable for regular CPython C API modules. Use a file name ending in '.*' to ask for the system's default extension for dynamic libraries - (.so/.dll). + (.so/.dll/.dylib). The default is '*' when building a non-embedded C API extension, and (module_name + '.*') when building an embedded library. @@ -695,6 +699,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,14 +21,12 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def compile(tmpdir, ext, compiler_verbose=0): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose, - target_extension, embedding) + outputfilename = _build(tmpdir, ext, compiler_verbose) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -38,32 +36,7 @@ os.environ[key] = value return outputfilename -def _save_val(name): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - return config_vars.get(name, Ellipsis) - -def _restore_val(name, value): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - config_vars[name] = value - if value is Ellipsis: - del config_vars[name] - -def _win32_hack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): - MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ - MSVCCompiler._remove_visual_c_ref - MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file - -def _win32_unhack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - MSVCCompiler._remove_visual_c_ref = \ - MSVCCompiler._remove_visual_c_ref_CFFI_BAK - -def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def _build(tmpdir, ext, compiler_verbose=0): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -76,25 +49,14 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: - if sys.platform == 'win32' and embedding: - _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 - old_SO = _save_val('SO') - old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: - if target_extension is not None: - _restore_val('SO', target_extension) - _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') cmd_obj = dist.get_command_obj('build_ext') [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) - _restore_val('SO', old_SO) - _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) - if sys.platform == 'win32' and embedding: - _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and @@ -1357,6 +1359,58 @@ parts[-1] += extension return os.path.join(outputdir, *parts), parts + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, compiler_verbose=1, target=None, **kwds): @@ -1382,36 +1436,22 @@ target = '%s.*' % module_name else: target = '*' - if target == '*': - target_module_name = module_name - target_extension = None # use default - else: - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - else: - target += '.so' - # split along the first '.' (not the last one, otherwise the - # preceeding dots are interpreted as splitting package names) - index = target.find('.') - if index < 0: - raise ValueError("target argument %r should be a file name " - "containing a '.'" % (target,)) - target_module_name = target[:index] - target_extension = target[index:] # - ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: + patchlist = [] cwd = os.getcwd() try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose, - target_extension, - embedding=embedding) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) finally: os.chdir(cwd) + _unpatch_meths(patchlist) return outputfilename else: return ext, updated diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -142,4 +142,9 @@ .. branch: vmprof-newstack -Refactor vmprof to work cross-operating-system. \ No newline at end of file +Refactor vmprof to work cross-operating-system. + +.. branch: seperate-strucmember_h + +Seperate structmember.h from Python.h Also enhance creating api functions +to specify which header file they appear in (previously only pypy_decl.h) diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.0" +VERSION = "1.5.1" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -69,6 +69,7 @@ def startup(self, space): from pypy.module._cffi_backend import embedding embedding.glob.space = space + embedding.glob.patched_sys = False def get_dict_rtld_constants(): diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -45,6 +45,26 @@ pass glob = Global() +def patch_sys(space): + # Annoying: CPython would just use the C-level std{in,out,err} as + # configured by the main application, for example in binary mode + # on Windows or with buffering turned off. We can't easily do the + # same. Instead, go for the safest bet (but possibly bad for + # performance) and open sys.std{in,out,err} unbuffered. On + # Windows I guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # pypy_init_embedded_cffi_module(). + if not glob.patched_sys: + space.appexec([], """(): + import os + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + glob.patched_sys = True + + def pypy_init_embedded_cffi_module(version, init_struct): # called from __init__.py name = "?" @@ -56,6 +76,7 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) + patch_sys(space) load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: @@ -84,72 +105,87 @@ return rffi.cast(rffi.INT, res) # ____________________________________________________________ + if os.name == 'nt': - do_startup = r''' -#include -#define WIN32_LEAN_AND_MEAN + + do_includes = r""" +#define _WIN32_WINNT 0x0501 #include -RPY_EXPORTED void rpython_startup_code(void); -RPY_EXPORTED int pypy_setup_home(char *, int); -static unsigned char _cffi_ready = 0; -static const char *volatile _cffi_module_name; +#define CFFI_INIT_HOME_PATH_MAX _MAX_PATH +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); -static void _cffi_init_error(const char *msg, const char *extra) +static int _cffi_init_home(char *output_home_path) { - fprintf(stderr, - "\nPyPy initialization failure when loading module '%s':\n%s%s\n", - _cffi_module_name, msg, extra); -} - -BOOL CALLBACK _cffi_init(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *lpContex) -{ - - HMODULE hModule; - TCHAR home[_MAX_PATH]; - rpython_startup_code(); - RPyGilAllocate(); + HMODULE hModule = 0; + DWORD res; GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, (LPCTSTR)&_cffi_init, &hModule); + if (hModule == 0 ) { - /* TODO turn the int into a string with FormatMessage */ - - _cffi_init_error("dladdr() failed: ", ""); - return TRUE; + _cffi_init_error("GetModuleHandleEx() failed", ""); + return -1; } - GetModuleFileName(hModule, home, _MAX_PATH); - if (pypy_setup_home(home, 1) != 0) { - _cffi_init_error("pypy_setup_home() failed", ""); - return TRUE; + res = GetModuleFileName(hModule, output_home_path, CFFI_INIT_HOME_PATH_MAX); + if (res >= CFFI_INIT_HOME_PATH_MAX) { + return -1; } - _cffi_ready = 1; - fprintf(stderr, "startup succeeded, home %s\n", home); - return TRUE; + return 0; } -RPY_EXPORTED -int pypy_carefully_make_gil(const char *name) +static void _cffi_init_once(void) { - /* For CFFI: this initializes the GIL and loads the home path. - It can be called completely concurrently from unrelated threads. - It assumes that we don't hold the GIL before (if it exists), and we - don't hold it afterwards. - */ - static INIT_ONCE s_init_once; + static LONG volatile lock = 0; + static int _init_called = 0; - _cffi_module_name = name; /* not really thread-safe, but better than - nothing */ - InitOnceExecuteOnce(&s_init_once, _cffi_init, NULL, NULL); - return (int)_cffi_ready - 1; -}''' + while (InterlockedCompareExchange(&lock, 1, 0) != 0) { + SwitchToThread(); /* spin loop */ + } + if (!_init_called) { + _cffi_init(); + _init_called = 1; + } + InterlockedCompareExchange(&lock, 0, 1); +} +""" + else: - do_startup = r""" -#include + + do_includes = r""" #include #include +#define CFFI_INIT_HOME_PATH_MAX PATH_MAX +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + Dl_info info; + dlerror(); /* reset */ + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return -1; + } + if (realpath(info.dli_fname, output_home_path) == NULL) { + perror("realpath() failed"); + _cffi_init_error("realpath() failed", ""); + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + pthread_once(&once_control, _cffi_init); +} +""" + +do_startup = do_includes + r""" RPY_EXPORTED void rpython_startup_code(void); RPY_EXPORTED int pypy_setup_home(char *, int); @@ -165,17 +201,13 @@ static void _cffi_init(void) { - Dl_info info; - char *home; + char home[CFFI_INIT_HOME_PATH_MAX + 1]; rpython_startup_code(); RPyGilAllocate(); - if (dladdr(&_cffi_init, &info) == 0) { - _cffi_init_error("dladdr() failed: ", dlerror()); + if (_cffi_init_home(home) != 0) return; - } - home = realpath(info.dli_fname, NULL); if (pypy_setup_home(home, 1) != 0) { _cffi_init_error("pypy_setup_home() failed", ""); return; @@ -191,11 +223,9 @@ It assumes that we don't hold the GIL before (if it exists), and we don't hold it afterwards. */ - static pthread_once_t once_control = PTHREAD_ONCE_INIT; - _cffi_module_name = name; /* not really thread-safe, but better than nothing */ - pthread_once(&once_control, _cffi_init); + _cffi_init_once(); return (int)_cffi_ready - 1; } """ diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,15 +4,18 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module +from pypy.module._cffi_backend import embedding def test_checkmodule(): # prepare_file_argument() is not working without translating the _file # module too def dummy_prepare_file_argument(space, fileobj): - # call load_cffi1_module() too, from a random place like here - cffi1_module.load_cffi1_module(space, "foo", "foo", 42) + # call pypy_init_embedded_cffi_module() from a random place like here + # --- this calls load_cffi1_module(), too + embedding.pypy_init_embedded_cffi_module( + rffi.cast(rffi.INT, embedding.EMBED_VERSION_MIN), + 42) return lltype.nullptr(rffi.CCHARP.TO) old = ctypeptr.prepare_file_argument try: diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -59,7 +59,7 @@ class CConfig: _compilation_info_ = ExternalCompilationInfo( include_dirs=include_dirs, - includes=['Python.h', 'stdarg.h'], + includes=['Python.h', 'stdarg.h', 'structmember.h'], compile_extra=['-DPy_BUILD_CORE'], ) @@ -129,6 +129,7 @@ for name in constant_names: setattr(CConfig_constants, name, rffi_platform.ConstantInteger(name)) udir.join('pypy_decl.h').write("/* Will be filled later */\n") +udir.join('pypy_structmember_decl.h').write("/* Will be filled later */\n") udir.join('pypy_macros.h').write("/* Will be filled later */\n") globals().update(rffi_platform.configure(CConfig_constants)) @@ -147,7 +148,7 @@ # XXX: 20 lines of code to recursively copy a directory, really?? assert dstdir.check(dir=True) headers = include_dir.listdir('*.h') + include_dir.listdir('*.inl') - for name in ("pypy_decl.h", "pypy_macros.h"): + for name in ("pypy_decl.h", "pypy_macros.h", "pypy_structmember_decl.h"): headers.append(udir.join(name)) _copy_header_files(headers, dstdir) @@ -232,7 +233,7 @@ wrapper.c_name = cpyext_namespace.uniquename(self.c_name) return wrapper -def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, external=True, +def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header='pypy_decl.h', gil=None): """ Declares a function to be exported. @@ -241,8 +242,8 @@ special value 'CANNOT_FAIL' (also when restype is Void) turns an eventual exception into a wrapped SystemError. Unwrapped exceptions also cause a SytemError. - - set `external` to False to get a C function pointer, but not exported by - the API headers. + - `header` is the header file to export the function in, Set to None to get + a C function pointer, but not exported by the API headers. - set `gil` to "acquire", "release" or "around" to acquire the GIL, release the GIL, or both """ @@ -263,7 +264,7 @@ def decorate(func): func_name = func.func_name - if external: + if header is not None: c_name = None else: c_name = func_name @@ -271,7 +272,7 @@ c_name=c_name, gil=gil) func.api_func = api_function - if external: + if header is not None: assert func_name not in FUNCTIONS, ( "%s already registered" % func_name) @@ -363,8 +364,9 @@ unwrapper_catch = make_unwrapper(True) unwrapper_raise = make_unwrapper(False) - if external: + if header is not None: FUNCTIONS[func_name] = api_function + FUNCTIONS_BY_HEADER.setdefault(header, {})[func_name] = api_function INTERPLEVEL_API[func_name] = unwrapper_catch # used in tests return unwrapper_raise # used in 'normal' RPython code. return decorate @@ -383,6 +385,7 @@ INTERPLEVEL_API = {} FUNCTIONS = {} +FUNCTIONS_BY_HEADER = {} # These are C symbols which cpyext will export, but which are defined in .c # files somewhere in the implementation of cpyext (rather than being defined in @@ -811,6 +814,7 @@ global_code = '\n'.join(global_objects) prologue = ("#include \n" + "#include \n" "#include \n") code = (prologue + struct_declaration_code + @@ -960,7 +964,8 @@ "NOT_RPYTHON" # implement function callbacks and generate function decls functions = [] - pypy_decls = [] + decls = {} + pypy_decls = decls['pypy_decl.h'] = [] pypy_decls.append("#ifndef _PYPY_PYPY_DECL_H\n") pypy_decls.append("#define _PYPY_PYPY_DECL_H\n") pypy_decls.append("#ifndef PYPY_STANDALONE\n") @@ -973,17 +978,23 @@ for decl in FORWARD_DECLS: pypy_decls.append("%s;" % (decl,)) - for name, func in sorted(FUNCTIONS.iteritems()): - restype, args = c_function_signature(db, func) - pypy_decls.append("PyAPI_FUNC(%s) %s(%s);" % (restype, name, args)) - if api_struct: - callargs = ', '.join('arg%d' % (i,) - for i in range(len(func.argtypes))) - if func.restype is lltype.Void: - body = "{ _pypyAPI.%s(%s); }" % (name, callargs) - else: - body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) - functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) + for header_name, header_functions in FUNCTIONS_BY_HEADER.iteritems(): + if header_name not in decls: + header = decls[header_name] = [] + else: + header = decls[header_name] + + for name, func in sorted(header_functions.iteritems()): + restype, args = c_function_signature(db, func) + header.append("PyAPI_FUNC(%s) %s(%s);" % (restype, name, args)) + if api_struct: + callargs = ', '.join('arg%d' % (i,) + for i in range(len(func.argtypes))) + if func.restype is lltype.Void: + body = "{ _pypyAPI.%s(%s); }" % (name, callargs) + else: + body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) + functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) for name in VA_TP_LIST: name_no_star = process_va_name(name) header = ('%s pypy_va_get_%s(va_list* vp)' % @@ -1007,8 +1018,9 @@ pypy_decls.append("#endif /*PYPY_STANDALONE*/\n") pypy_decls.append("#endif /*_PYPY_PYPY_DECL_H*/\n") - pypy_decl_h = udir.join('pypy_decl.h') - pypy_decl_h.write('\n'.join(pypy_decls)) + for header_name, header_decls in decls.iteritems(): + decl_h = udir.join(header_name) + decl_h.write('\n'.join(header_decls)) return functions separate_module_files = [source_dir / "varargwrapper.c", diff --git a/pypy/module/cpyext/bufferobject.py b/pypy/module/cpyext/bufferobject.py --- a/pypy/module/cpyext/bufferobject.py +++ b/pypy/module/cpyext/bufferobject.py @@ -73,7 +73,7 @@ "Don't know how to realize a buffer")) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def buffer_dealloc(space, py_obj): py_buf = rffi.cast(PyBufferObject, py_obj) if py_buf.c_b_base: diff --git a/pypy/module/cpyext/frameobject.py b/pypy/module/cpyext/frameobject.py --- a/pypy/module/cpyext/frameobject.py +++ b/pypy/module/cpyext/frameobject.py @@ -39,7 +39,7 @@ py_frame.c_f_locals = make_ref(space, frame.get_w_locals()) rffi.setintfield(py_frame, 'c_f_lineno', frame.getorcreatedebug().f_lineno) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def frame_dealloc(space, py_obj): py_frame = rffi.cast(PyFrameObject, py_obj) py_code = rffi.cast(PyObject, py_frame.c_f_code) diff --git a/pypy/module/cpyext/funcobject.py b/pypy/module/cpyext/funcobject.py --- a/pypy/module/cpyext/funcobject.py +++ b/pypy/module/cpyext/funcobject.py @@ -56,7 +56,7 @@ assert isinstance(w_obj, Function) py_func.c_func_name = make_ref(space, space.wrap(w_obj.name)) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def function_dealloc(space, py_obj): py_func = rffi.cast(PyFunctionObject, py_obj) Py_DecRef(space, py_func.c_func_name) @@ -75,7 +75,7 @@ rffi.setintfield(py_code, 'c_co_flags', co_flags) rffi.setintfield(py_code, 'c_co_argcount', w_obj.co_argcount) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def code_dealloc(space, py_obj): py_code = rffi.cast(PyCodeObject, py_obj) Py_DecRef(space, py_code.c_co_name) diff --git a/pypy/module/cpyext/include/Python.h b/pypy/module/cpyext/include/Python.h --- a/pypy/module/cpyext/include/Python.h +++ b/pypy/module/cpyext/include/Python.h @@ -84,6 +84,7 @@ #include "pyconfig.h" #include "object.h" +#include "pymath.h" #include "pyport.h" #include "warnings.h" @@ -115,7 +116,6 @@ #include "compile.h" #include "frameobject.h" #include "eval.h" -#include "pymath.h" #include "pymem.h" #include "pycobject.h" #include "pycapsule.h" @@ -132,9 +132,6 @@ /* Missing definitions */ #include "missing.h" -// XXX This shouldn't be included here -#include "structmember.h" - #include /* Define macros for inline documentation. */ diff --git a/pypy/module/cpyext/include/floatobject.h b/pypy/module/cpyext/include/floatobject.h --- a/pypy/module/cpyext/include/floatobject.h +++ b/pypy/module/cpyext/include/floatobject.h @@ -7,6 +7,18 @@ extern "C" { #endif +#define PyFloat_STR_PRECISION 12 + +#ifdef Py_NAN +#define Py_RETURN_NAN return PyFloat_FromDouble(Py_NAN) +#endif + +#define Py_RETURN_INF(sign) do \ + if (copysign(1., sign) == 1.) { \ + return PyFloat_FromDouble(Py_HUGE_VAL); \ + } else { \ + return PyFloat_FromDouble(-Py_HUGE_VAL); \ + } while(0) #ifdef __cplusplus } diff --git a/pypy/module/cpyext/include/pymath.h b/pypy/module/cpyext/include/pymath.h --- a/pypy/module/cpyext/include/pymath.h +++ b/pypy/module/cpyext/include/pymath.h @@ -17,4 +17,35 @@ #define Py_HUGE_VAL HUGE_VAL #endif +/* Py_NAN + * A value that evaluates to a NaN. On IEEE 754 platforms INF*0 or + * INF/INF works. Define Py_NO_NAN in pyconfig.h if your platform + * doesn't support NaNs. + */ +#if !defined(Py_NAN) && !defined(Py_NO_NAN) +#if !defined(__INTEL_COMPILER) + #define Py_NAN (Py_HUGE_VAL * 0.) +#else /* __INTEL_COMPILER */ + #if defined(ICC_NAN_STRICT) + #pragma float_control(push) + #pragma float_control(precise, on) + #pragma float_control(except, on) + #if defined(_MSC_VER) + __declspec(noinline) + #else /* Linux */ + __attribute__((noinline)) + #endif /* _MSC_VER */ + static double __icc_nan() + { + return sqrt(-1.0); + } + #pragma float_control (pop) + #define Py_NAN __icc_nan() + #else /* ICC_NAN_RELAXED as default for Intel Compiler */ + static union { unsigned char buf[8]; double __icc_nan; } __nan_store = {0,0,0,0,0,0,0xf8,0x7f}; + #define Py_NAN (__nan_store.__icc_nan) + #endif /* ICC_NAN_STRICT */ +#endif /* __INTEL_COMPILER */ +#endif + #endif /* Py_PYMATH_H */ diff --git a/pypy/module/cpyext/include/structmember.h b/pypy/module/cpyext/include/structmember.h --- a/pypy/module/cpyext/include/structmember.h +++ b/pypy/module/cpyext/include/structmember.h @@ -4,54 +4,85 @@ extern "C" { #endif + +/* Interface to map C struct members to Python object attributes */ + #include /* For offsetof */ + +/* The offsetof() macro calculates the offset of a structure member + in its structure. Unfortunately this cannot be written down + portably, hence it is provided by a Standard C header file. + For pre-Standard C compilers, here is a version that usually works + (but watch out!): */ + #ifndef offsetof #define offsetof(type, member) ( (int) & ((type*)0) -> member ) #endif +/* An array of memberlist structures defines the name, type and offset + of selected members of a C structure. These can be read by + PyMember_Get() and set by PyMember_Set() (except if their READONLY flag + is set). The array must be terminated with an entry whose name + pointer is NULL. */ + + typedef struct PyMemberDef { - /* Current version, use this */ - char *name; - int type; - Py_ssize_t offset; - int flags; - char *doc; + /* Current version, use this */ + char *name; + int type; + Py_ssize_t offset; + int flags; + char *doc; } PyMemberDef; +/* Types */ +#define T_SHORT 0 +#define T_INT 1 +#define T_LONG 2 +#define T_FLOAT 3 +#define T_DOUBLE 4 +#define T_STRING 5 +#define T_OBJECT 6 +/* XXX the ordering here is weird for binary compatibility */ +#define T_CHAR 7 /* 1-character string */ +#define T_BYTE 8 /* 8-bit signed int */ +/* unsigned variants: */ +#define T_UBYTE 9 +#define T_USHORT 10 +#define T_UINT 11 +#define T_ULONG 12 -/* Types. These constants are also in structmemberdefs.py. */ -#define T_SHORT 0 -#define T_INT 1 -#define T_LONG 2 -#define T_FLOAT 3 -#define T_DOUBLE 4 -#define T_STRING 5 -#define T_OBJECT 6 -#define T_CHAR 7 /* 1-character string */ -#define T_BYTE 8 /* 8-bit signed int */ -#define T_UBYTE 9 -#define T_USHORT 10 -#define T_UINT 11 -#define T_ULONG 12 -#define T_STRING_INPLACE 13 /* Strings contained in the structure */ -#define T_BOOL 14 -#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError - when the value is NULL, instead of - converting to None. */ -#define T_LONGLONG 17 -#define T_ULONGLONG 18 -#define T_PYSSIZET 19 +/* Added by Jack: strings contained in the structure */ +#define T_STRING_INPLACE 13 + +/* Added by Lillo: bools contained in the structure (assumed char) */ +#define T_BOOL 14 + +#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError + when the value is NULL, instead of + converting to None. */ +#ifdef HAVE_LONG_LONG +#define T_LONGLONG 17 +#define T_ULONGLONG 18 +#endif /* HAVE_LONG_LONG */ + +#define T_PYSSIZET 19 /* Py_ssize_t */ /* Flags. These constants are also in structmemberdefs.py. */ -#define READONLY 1 -#define RO READONLY /* Shorthand */ +#define READONLY 1 +#define RO READONLY /* Shorthand */ #define READ_RESTRICTED 2 #define PY_WRITE_RESTRICTED 4 -#define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) +#define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) + + +/* API functions. */ +#include "pypy_structmember_decl.h" #ifdef __cplusplus } #endif #endif /* !Py_STRUCTMEMBER_H */ + diff --git a/pypy/module/cpyext/methodobject.py b/pypy/module/cpyext/methodobject.py --- a/pypy/module/cpyext/methodobject.py +++ b/pypy/module/cpyext/methodobject.py @@ -50,7 +50,7 @@ py_func.c_m_self = make_ref(space, w_obj.w_self) py_func.c_m_module = make_ref(space, w_obj.w_module) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def cfunction_dealloc(space, py_obj): py_func = rffi.cast(PyCFunctionObject, py_obj) Py_DecRef(space, py_func.c_m_self) diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -70,7 +70,7 @@ alloc : allocate and basic initialization of a raw PyObject attach : Function called to tie a raw structure to a pypy object realize : Function called to create a pypy object from a raw struct - dealloc : a cpython_api(external=False), similar to PyObject_dealloc + dealloc : a cpython_api(header=None), similar to PyObject_dealloc """ tp_basestruct = kw.pop('basestruct', PyObject.TO) diff --git a/pypy/module/cpyext/pytraceback.py b/pypy/module/cpyext/pytraceback.py --- a/pypy/module/cpyext/pytraceback.py +++ b/pypy/module/cpyext/pytraceback.py @@ -41,7 +41,7 @@ rffi.setintfield(py_traceback, 'c_tb_lasti', traceback.lasti) rffi.setintfield(py_traceback, 'c_tb_lineno',traceback.get_lineno()) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def traceback_dealloc(space, py_obj): py_traceback = rffi.cast(PyTracebackObject, py_obj) Py_DecRef(space, rffi.cast(PyObject, py_traceback.c_tb_next)) diff --git a/pypy/module/cpyext/sliceobject.py b/pypy/module/cpyext/sliceobject.py --- a/pypy/module/cpyext/sliceobject.py +++ b/pypy/module/cpyext/sliceobject.py @@ -36,7 +36,7 @@ py_slice.c_stop = make_ref(space, w_obj.w_stop) py_slice.c_step = make_ref(space, w_obj.w_step) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def slice_dealloc(space, py_obj): """Frees allocated PyStringObject resources. """ diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -309,7 +309,7 @@ return space.wrap(generic_cpy_call(space, func_target, w_self, w_other)) - at cpython_api([PyTypeObjectPtr, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyTypeObjectPtr, PyObject, PyObject], PyObject, header=None) def slot_tp_new(space, type, w_args, w_kwds): from pypy.module.cpyext.tupleobject import PyTuple_Check pyo = rffi.cast(PyObject, type) @@ -320,30 +320,30 @@ w_args_new = space.newtuple(args_w) return space.call(w_func, w_args_new, w_kwds) - at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, external=False) + at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, header=None) def slot_tp_init(space, w_self, w_args, w_kwds): w_descr = space.lookup(w_self, '__init__') args = Arguments.frompacked(space, w_args, w_kwds) space.get_and_call_args(w_descr, w_self, args) return 0 - at cpython_api([PyObject, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyObject, PyObject, PyObject], PyObject, header=None) def slot_tp_call(space, w_self, w_args, w_kwds): return space.call(w_self, w_args, w_kwds) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_str(space, w_self): return space.str(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_nb_int(space, w_self): return space.int(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iter(space, w_self): return space.iter(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iternext(space, w_self): return space.next(w_self) @@ -371,7 +371,7 @@ return @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, - error=-1, external=True) # XXX should not be exported + error=-1) # XXX should be header=None @func_renamer("cpyext_tp_setattro_%s" % (typedef.name,)) def slot_tp_setattro(space, w_self, w_name, w_value): if w_value is not None: @@ -385,8 +385,7 @@ if getattr_fn is None: return - @cpython_api([PyObject, PyObject], PyObject, - external=True) + @cpython_api([PyObject, PyObject], PyObject) @func_renamer("cpyext_tp_getattro_%s" % (typedef.name,)) def slot_tp_getattro(space, w_self, w_name): return space.call_function(getattr_fn, w_self, w_name) diff --git a/pypy/module/cpyext/stringobject.py b/pypy/module/cpyext/stringobject.py --- a/pypy/module/cpyext/stringobject.py +++ b/pypy/module/cpyext/stringobject.py @@ -103,7 +103,7 @@ track_reference(space, py_obj, w_obj) return w_obj - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def string_dealloc(space, py_obj): """Frees allocated PyStringObject resources. """ diff --git a/pypy/module/cpyext/structmember.py b/pypy/module/cpyext/structmember.py --- a/pypy/module/cpyext/structmember.py +++ b/pypy/module/cpyext/structmember.py @@ -31,8 +31,10 @@ (T_PYSSIZET, rffi.SSIZE_T, PyLong_AsSsize_t), ]) +_HEADER = 'pypy_structmember_decl.h' - at cpython_api([PyObject, lltype.Ptr(PyMemberDef)], PyObject) + + at cpython_api([PyObject, lltype.Ptr(PyMemberDef)], PyObject, header=_HEADER) def PyMember_GetOne(space, obj, w_member): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset @@ -83,7 +85,8 @@ return w_result - at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, + error=-1, header=_HEADER) def PyMember_SetOne(space, obj, w_member, w_value): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset diff --git a/pypy/module/cpyext/test/test_cpyext.py b/pypy/module/cpyext/test/test_cpyext.py --- a/pypy/module/cpyext/test/test_cpyext.py +++ b/pypy/module/cpyext/test/test_cpyext.py @@ -863,3 +863,15 @@ os.unlink('_imported_already') except OSError: pass + + def test_no_structmember(self): + """structmember.h should not be included by default.""" + mod = self.import_extension('foo', [ + ('bar', 'METH_NOARGS', + ''' + /* reuse a name that is #defined in structmember.h */ + int RO; + Py_RETURN_NONE; + ''' + ), + ]) diff --git a/pypy/module/cpyext/test/test_floatobject.py b/pypy/module/cpyext/test/test_floatobject.py --- a/pypy/module/cpyext/test/test_floatobject.py +++ b/pypy/module/cpyext/test/test_floatobject.py @@ -45,3 +45,35 @@ ]) assert module.from_string() == 1234.56 assert type(module.from_string()) is float + +class AppTestFloatMacros(AppTestCpythonExtensionBase): + def test_return_nan(self): + import math + + module = self.import_extension('foo', [ + ("return_nan", "METH_NOARGS", + "Py_RETURN_NAN;"), + ]) + assert math.isnan(module.return_nan()) + + def test_return_inf(self): + import math + + module = self.import_extension('foo', [ + ("return_inf", "METH_NOARGS", + "Py_RETURN_INF(10);"), + ]) + inf = module.return_inf() + assert inf > 0 + assert math.isinf(inf) + + def test_return_inf_negative(self): + import math + + module = self.import_extension('foo', [ + ("return_neginf", "METH_NOARGS", + "Py_RETURN_INF(-10);"), + ]) + neginf = module.return_neginf() + assert neginf < 0 + assert math.isinf(neginf) diff --git a/pypy/module/cpyext/test/test_intobject.py b/pypy/module/cpyext/test/test_intobject.py --- a/pypy/module/cpyext/test/test_intobject.py +++ b/pypy/module/cpyext/test/test_intobject.py @@ -99,6 +99,7 @@ """), ], prologue=""" + #include "structmember.h" typedef struct { PyObject_HEAD diff --git a/pypy/module/cpyext/test/test_translate.py b/pypy/module/cpyext/test/test_translate.py --- a/pypy/module/cpyext/test/test_translate.py +++ b/pypy/module/cpyext/test/test_translate.py @@ -19,7 +19,7 @@ @specialize.memo() def get_tp_function(space, typedef): - @cpython_api([], lltype.Signed, error=-1, external=False) + @cpython_api([], lltype.Signed, error=-1, header=None) def slot_tp_function(space): return typedef.value diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -183,7 +183,7 @@ if pto.c_tp_new: add_tp_new_wrapper(space, dict_w, pto) - at cpython_api([PyObject, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyObject, PyObject, PyObject], PyObject, header=None) def tp_new_wrapper(space, self, w_args, w_kwds): tp_new = rffi.cast(PyTypeObjectPtr, self).c_tp_new @@ -311,7 +311,7 @@ dealloc=type_dealloc) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def subtype_dealloc(space, obj): pto = obj.c_ob_type base = pto @@ -327,7 +327,7 @@ # hopefully this does not clash with the memory model assumed in # extension modules - at cpython_api([PyObject, Py_ssize_tP], lltype.Signed, external=False, + at cpython_api([PyObject, Py_ssize_tP], lltype.Signed, header=None, error=CANNOT_FAIL) def str_segcount(space, w_obj, ref): if ref: @@ -335,7 +335,7 @@ return 1 @cpython_api([PyObject, Py_ssize_t, rffi.VOIDPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def str_getreadbuffer(space, w_str, segment, ref): from pypy.module.cpyext.stringobject import PyString_AsString if segment != 0: @@ -348,7 +348,7 @@ return space.len_w(w_str) @cpython_api([PyObject, Py_ssize_t, rffi.CCHARPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def str_getcharbuffer(space, w_str, segment, ref): from pypy.module.cpyext.stringobject import PyString_AsString if segment != 0: @@ -361,7 +361,7 @@ return space.len_w(w_str) @cpython_api([PyObject, Py_ssize_t, rffi.VOIDPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def buf_getreadbuffer(space, pyref, segment, ref): from pypy.module.cpyext.bufferobject import PyBufferObject if segment != 0: @@ -393,7 +393,7 @@ buf_getreadbuffer.api_func.get_wrapper(space)) pto.c_tp_as_buffer = c_buf - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def type_dealloc(space, obj): from pypy.module.cpyext.object import PyObject_dealloc obj_pto = rffi.cast(PyTypeObjectPtr, obj) diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -75,7 +75,7 @@ track_reference(space, py_obj, w_obj) return w_obj - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def unicode_dealloc(space, py_obj): py_unicode = rffi.cast(PyUnicodeObject, py_obj) if py_unicode.c_buffer: diff --git a/pypy/module/sys/interp_encoding.py b/pypy/module/sys/interp_encoding.py --- a/pypy/module/sys/interp_encoding.py +++ b/pypy/module/sys/interp_encoding.py @@ -34,7 +34,11 @@ elif sys.platform == "darwin": base_encoding = "utf-8" else: - base_encoding = None + # In CPython, the default base encoding is NULL. This is paired with a + # comment that says "If non-NULL, this is different than the default + # encoding for strings". Therefore, the default filesystem encoding is the + # default encoding for strings, which is ASCII. + base_encoding = "ascii" def _getfilesystemencoding(space): encoding = base_encoding diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py @@ -1847,3 +1847,8 @@ thread.start_new_thread(f, ()) time.sleep(1.5) assert seen == ['init!', 'init done'] + 6 * [7] + + def test_sizeof_struct_directly(self): + # only works with the Python FFI instances + ffi = FFI(backend=self.Backend()) + assert ffi.sizeof("struct{int a;}") == ffi.sizeof("int") diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_ffi_backend.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_ffi_backend.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_ffi_backend.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_ffi_backend.py @@ -420,3 +420,7 @@ ]: x = ffi.sizeof(name) assert 1 <= x <= 16 + + def test_ffi_def_extern(self): + ffi = FFI() + py.test.raises(ValueError, ffi.def_extern) diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_verify.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_verify.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_verify.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_verify.py @@ -92,8 +92,8 @@ assert lib.sin(1.23) == math.sin(1.23) def _Wconversion(cdef, source, **kargs): - if sys.platform == 'win32': - py.test.skip("needs GCC or Clang") + if sys.platform in ('win32', 'darwin'): + py.test.skip("needs GCC") ffi = FFI() ffi.cdef(cdef) py.test.raises(VerificationError, ffi.verify, source, **kargs) diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py @@ -1714,3 +1714,33 @@ # a case where 'onerror' is not callable py.test.raises(TypeError, ffi.def_extern(name='bar', onerror=42), lambda x: x) + +def test_extern_python_stdcall(): + ffi = FFI() + ffi.cdef(""" + extern "Python" int __stdcall foo(int); + extern "Python" int WINAPI bar(int); + int (__stdcall * mycb1)(int); + int indirect_call(int); + """) + lib = verify(ffi, 'test_extern_python_stdcall', """ + #ifndef _MSC_VER + # define __stdcall + #endif + static int (__stdcall * mycb1)(int); + static int indirect_call(int x) { + return mycb1(x); + } + """) + # + @ffi.def_extern() + def foo(x): + return x + 42 + @ffi.def_extern() + def bar(x): + return x + 43 + assert lib.foo(100) == 142 + assert lib.bar(100) == 143 + lib.mycb1 = lib.foo + assert lib.mycb1(200) == 242 + assert lib.indirect_call(300) == 342 diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_verify1.py b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_verify1.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_verify1.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_verify1.py @@ -72,8 +72,8 @@ assert lib.sin(1.23) == math.sin(1.23) def _Wconversion(cdef, source, **kargs): - if sys.platform == 'win32': - py.test.skip("needs GCC or Clang") + if sys.platform in ('win32', 'darwin'): + py.test.skip("needs GCC") ffi = FFI() ffi.cdef(cdef) py.test.raises(VerificationError, ffi.verify, source, **kargs) @@ -2092,20 +2092,20 @@ old = sys.getdlopenflags() try: ffi1 = FFI() - ffi1.cdef("int foo_verify_dlopen_flags;") + ffi1.cdef("int foo_verify_dlopen_flags_1;") sys.setdlopenflags(ffi1.RTLD_GLOBAL | ffi1.RTLD_NOW) - lib1 = ffi1.verify("int foo_verify_dlopen_flags;") + lib1 = ffi1.verify("int foo_verify_dlopen_flags_1;") finally: sys.setdlopenflags(old) ffi2 = FFI() ffi2.cdef("int *getptr(void);") lib2 = ffi2.verify(""" - extern int foo_verify_dlopen_flags; - static int *getptr(void) { return &foo_verify_dlopen_flags; } + extern int foo_verify_dlopen_flags_1; + static int *getptr(void) { return &foo_verify_dlopen_flags_1; } """) p = lib2.getptr() - assert ffi1.addressof(lib1, 'foo_verify_dlopen_flags') == p + assert ffi1.addressof(lib1, 'foo_verify_dlopen_flags_1') == p def test_consider_not_implemented_function_type(): ffi = FFI() diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py @@ -49,7 +49,8 @@ import setuptools except ImportError: From pypy.commits at gmail.com Fri Feb 12 12:45:27 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 12 Feb 2016 09:45:27 -0800 (PST) Subject: [pypy-commit] pypy default: Aaaaaa finally found and fix an issue with stacklets on shadowstack Message-ID: <56be1a37.0ab81c0a.41694.2e12@mx.google.com> Author: Armin Rigo Branch: Changeset: r82188:1929d5b7982f Date: 2016-02-12 18:44 +0100 http://bitbucket.org/pypy/pypy/changeset/1929d5b7982f/ Log: Aaaaaa finally found and fix an issue with stacklets on shadowstack diff --git a/rpython/rlib/_stacklet_shadowstack.py b/rpython/rlib/_stacklet_shadowstack.py --- a/rpython/rlib/_stacklet_shadowstack.py +++ b/rpython/rlib/_stacklet_shadowstack.py @@ -30,6 +30,11 @@ mixlevelannotator.finish() lltype.attachRuntimeTypeInfo(STACKLET, destrptr=destrptr) +# Note: it's important that this is a light finalizer, otherwise +# the GC will call it but still expect the object to stay around for +# a while---and it can't stay around, because s_sscopy points to +# freed nonsense and customtrace() will crash + at rgc.must_be_light_finalizer def stacklet_destructor(stacklet): sscopy = stacklet.s_sscopy if sscopy: diff --git a/rpython/translator/backendopt/finalizer.py b/rpython/translator/backendopt/finalizer.py --- a/rpython/translator/backendopt/finalizer.py +++ b/rpython/translator/backendopt/finalizer.py @@ -18,7 +18,7 @@ """ ok_operations = ['ptr_nonzero', 'ptr_eq', 'ptr_ne', 'free', 'same_as', 'direct_ptradd', 'force_cast', 'track_alloc_stop', - 'raw_free'] + 'raw_free', 'adr_eq', 'adr_ne'] def analyze_light_finalizer(self, graph): result = self.analyze_direct_call(graph) From pypy.commits at gmail.com Fri Feb 12 14:38:40 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 12 Feb 2016 11:38:40 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Improve (but not fix) app_main.py's Python 3.5 compatibility for testing on top of CPython and forward-compatibility. Message-ID: <56be34c0.a3f6c20a.6ba4d.ffffeb0c@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82189:249ebc5d87a0 Date: 2016-02-12 18:06 +0100 http://bitbucket.org/pypy/pypy/changeset/249ebc5d87a0/ Log: Improve (but not fix) app_main.py's Python 3.5 compatibility for testing on top of CPython and forward-compatibility. diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py --- a/pypy/interpreter/app_main.py +++ b/pypy/interpreter/app_main.py @@ -668,11 +668,14 @@ # assume it's a pyc file only if its name says so. # CPython goes to great lengths to detect other cases # of pyc file format, but I think it's ok not to care. - import _frozen_importlib + try: + from _frozen_importlib import SourcelessFileLoader + except ImportError: + from _frozen_importlib_external import SourcelessFileLoader if IS_WINDOWS: filename = filename.lower() if filename.endswith('.pyc') or filename.endswith('.pyo'): - loader = _frozen_importlib.SourcelessFileLoader('__main__', filename) + loader = SourcelessFileLoader('__main__', filename) args = (loader.load_module,) else: filename = sys.argv[0] From pypy.commits at gmail.com Fri Feb 12 14:38:44 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 12 Feb 2016 11:38:44 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix test_pyc_commandline_argument() by creating the .pyc file with the same python executable app_main.py is run with. Also check for correct output. Message-ID: <56be34c4.84c9c20a.db8ee.ffffe597@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82191:bba9ea8739cc Date: 2016-02-12 20:36 +0100 http://bitbucket.org/pypy/pypy/changeset/bba9ea8739cc/ Log: Fix test_pyc_commandline_argument() by creating the .pyc file with the same python executable app_main.py is run with. Also check for correct output. diff --git a/pypy/interpreter/test/test_app_main.py b/pypy/interpreter/test/test_app_main.py --- a/pypy/interpreter/test/test_app_main.py +++ b/pypy/interpreter/test/test_app_main.py @@ -37,21 +37,11 @@ def getscript_pyc(space, source): p = _get_next_path() p.write(str(py.code.Source(source))) - w_dir = space.wrap(str(p.dirpath())) - w_modname = space.wrap(p.purebasename) - space.appexec([w_dir, w_modname], """(dir, modname): - import sys - d = sys.modules.copy() - sys.path.insert(0, dir) - __import__(modname) - sys.path.pop(0) - for key in list(sys.modules.keys()): - if key not in d: - del sys.modules[key] - """) + subprocess.check_call([python3, "-c", "import " + p.purebasename], + env={'PYTHONPATH': str(p.dirpath())}) # the .pyc file should have been created above pycache = p.dirpath('__pycache__') - pycs = pycache.listdir(p.basename + '*.pyc') + pycs = pycache.listdir(p.purebasename + '*.pyc') assert len(pycs) == 1 return str(pycs[0]) @@ -926,7 +916,7 @@ p = getscript_pyc(self.space, "print(6*7)\n") assert os.path.isfile(p) and p.endswith('.pyc') data = self.run(p) - assert data == 'in _run_compiled_module\n' + assert data == '42\n' def test_main_in_dir_commandline_argument(self): if not hasattr(runpy, '_run_module_as_main'): From pypy.commits at gmail.com Fri Feb 12 14:38:42 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 12 Feb 2016 11:38:42 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Explicitly pass name to SourcelessFileLoader.load_module(). This is seemingly sometimes needed when running on top of CPython. Message-ID: <56be34c2.41df1c0a.d1f8b.574c@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82190:4939f7202eaf Date: 2016-02-12 18:52 +0100 http://bitbucket.org/pypy/pypy/changeset/4939f7202eaf/ Log: Explicitly pass name to SourcelessFileLoader.load_module(). This is seemingly sometimes needed when running on top of CPython. diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py --- a/pypy/interpreter/app_main.py +++ b/pypy/interpreter/app_main.py @@ -676,7 +676,7 @@ filename = filename.lower() if filename.endswith('.pyc') or filename.endswith('.pyo'): loader = SourcelessFileLoader('__main__', filename) - args = (loader.load_module,) + args = (loader.load_module, loader.name) else: filename = sys.argv[0] for hook in sys.path_hooks: From pypy.commits at gmail.com Fri Feb 12 16:15:47 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 12 Feb 2016 13:15:47 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix. Message-ID: <56be4b83.4d0d1c0a.622cd.763a@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82192:7fc36d3705b5 Date: 2016-02-12 22:09 +0100 http://bitbucket.org/pypy/pypy/changeset/7fc36d3705b5/ Log: Fix. diff --git a/pypy/interpreter/test/test_pycode.py b/pypy/interpreter/test/test_pycode.py --- a/pypy/interpreter/test/test_pycode.py +++ b/pypy/interpreter/test/test_pycode.py @@ -15,5 +15,5 @@ sys.stdout = stdout print '>>>\n' + output + '\n<<<' assert ' 1 (7)' in output - assert ' 3 (None)' in output - assert ' 16 RETURN_VALUE ' in output + assert ' 4 (None)' in output + assert ' 19 RETURN_VALUE ' in output From pypy.commits at gmail.com Fri Feb 12 16:35:38 2016 From: pypy.commits at gmail.com (jbs) Date: Fri, 12 Feb 2016 13:35:38 -0800 (PST) Subject: [pypy-commit] pypy reorder-map-attributes: fixed always adding last attribute to map cache Message-ID: <56be502a.046f1c0a.665af.7c79@mx.google.com> Author: Jasper.Schulz Branch: reorder-map-attributes Changeset: r82193:a64e20eaa1db Date: 2016-02-12 21:34 +0000 http://bitbucket.org/pypy/pypy/changeset/a64e20eaa1db/ Log: fixed always adding last attribute to map cache diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -219,19 +219,19 @@ current = current.back else: attr._switch_map_and_write_storage(obj, w_value) - stack_index = localstack_index - + if not localstack_index: + return + if not stack_index: - return - - # add the first attribute of the stack without reordering - # to prevent an endless loop - stack_index += -1 - next_map = stack_maps[stack_index] - w_value = stack_values[stack_index] - obj._get_mapdict_map()._add_attr_without_reordering( - obj, next_map.name, next_map.index, w_value) + # add the first attribute of the stack without reordering + # to prevent an endless loop + localstack_index += -1 + next_map = stack_maps[localstack_index] + w_value = stack_values[localstack_index] + obj._get_mapdict_map()._add_attr_without_reordering( + obj, next_map.name, next_map.index, w_value) + stack_index = localstack_index break if not stack_index: diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py --- a/pypy/objspace/std/test/test_mapdict.py +++ b/pypy/objspace/std/test/test_mapdict.py @@ -188,6 +188,29 @@ assert obj.map is obj2.map +def test_insert_different_orders_5(): + cls = Class() + obj = cls.instantiate() + obj2 = cls.instantiate() + + obj.setdictvalue(space, "a", 10) + obj.setdictvalue(space, "b", 20) + obj.setdictvalue(space, "c", 30) + obj.setdictvalue(space, "d", 40) + + obj2.setdictvalue(space, "d", 50) + obj2.setdictvalue(space, "c", 50) + obj2.setdictvalue(space, "b", 50) + obj2.setdictvalue(space, "a", 50) + + obj3 = cls.instantiate() + obj3.setdictvalue(space, "d", 50) + obj3.setdictvalue(space, "c", 50) + obj3.setdictvalue(space, "b", 50) + obj3.setdictvalue(space, "a", 50) + + assert obj.map is obj3.map + def test_bug_stack_overflow_insert_attributes(): cls = Class() obj = cls.instantiate() From pypy.commits at gmail.com Fri Feb 12 17:01:44 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 12 Feb 2016 14:01:44 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default (+ fixes) Message-ID: <56be5648.0ab81c0a.41694.7fac@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82195:5d115756a4ce Date: 2016-02-12 23:00 +0100 http://bitbucket.org/pypy/pypy/changeset/5d115756a4ce/ Log: hg merge default (+ fixes) diff too long, truncating to 2000 out of 5677 lines diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.0 +Version: 1.5.1 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.1" +__version_info__ = (1, 5, 1) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.1" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, types +import sys, sysconfig, types from .lock import allocate_lock try: @@ -544,28 +544,32 @@ def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # if '__pypy__' in sys.builtin_module_names: if hasattr(sys, 'prefix'): import os - libdir = os.path.join(sys.prefix, 'bin') - dirs = kwds.setdefault('library_dirs', []) - if libdir not in dirs: - dirs.append(libdir) + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) pythonlib = "pypy-c" else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags - libraries = kwds.setdefault('libraries', []) - if pythonlib not in libraries: - libraries.append(pythonlib) + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): @@ -631,7 +635,7 @@ compiled DLL. Use '*' to force distutils' choice, suitable for regular CPython C API modules. Use a file name ending in '.*' to ask for the system's default extension for dynamic libraries - (.so/.dll). + (.so/.dll/.dylib). The default is '*' when building a non-embedded C API extension, and (module_name + '.*') when building an embedded library. @@ -695,6 +699,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,14 +21,12 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def compile(tmpdir, ext, compiler_verbose=0): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose, - target_extension, embedding) + outputfilename = _build(tmpdir, ext, compiler_verbose) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -38,32 +36,7 @@ os.environ[key] = value return outputfilename -def _save_val(name): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - return config_vars.get(name, Ellipsis) - -def _restore_val(name, value): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - config_vars[name] = value - if value is Ellipsis: - del config_vars[name] - -def _win32_hack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): - MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ - MSVCCompiler._remove_visual_c_ref - MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file - -def _win32_unhack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - MSVCCompiler._remove_visual_c_ref = \ - MSVCCompiler._remove_visual_c_ref_CFFI_BAK - -def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def _build(tmpdir, ext, compiler_verbose=0): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -76,25 +49,14 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: - if sys.platform == 'win32' and embedding: - _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 - old_SO = _save_val('SO') - old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: - if target_extension is not None: - _restore_val('SO', target_extension) - _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') cmd_obj = dist.get_command_obj('build_ext') [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) - _restore_val('SO', old_SO) - _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) - if sys.platform == 'win32' and embedding: - _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and @@ -1357,6 +1359,58 @@ parts[-1] += extension return os.path.join(outputdir, *parts), parts + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, compiler_verbose=1, target=None, **kwds): @@ -1382,36 +1436,22 @@ target = '%s.*' % module_name else: target = '*' - if target == '*': - target_module_name = module_name - target_extension = None # use default - else: - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - else: - target += '.so' - # split along the first '.' (not the last one, otherwise the - # preceeding dots are interpreted as splitting package names) - index = target.find('.') - if index < 0: - raise ValueError("target argument %r should be a file name " - "containing a '.'" % (target,)) - target_module_name = target[:index] - target_extension = target[index:] # - ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: + patchlist = [] cwd = os.getcwd() try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose, - target_extension, - embedding=embedding) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) finally: os.chdir(cwd) + _unpatch_meths(patchlist) return outputfilename else: return ext, updated diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -139,3 +139,12 @@ Refactor sandboxing to operate at a higher level. .. branch: cpyext-bootstrap + +.. branch: vmprof-newstack + +Refactor vmprof to work cross-operating-system. + +.. branch: seperate-strucmember_h + +Seperate structmember.h from Python.h Also enhance creating api functions +to specify which header file they appear in (previously only pypy_decl.h) diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.0" +VERSION = "1.5.1" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -69,6 +69,7 @@ def startup(self, space): from pypy.module._cffi_backend import embedding embedding.glob.space = space + embedding.glob.patched_sys = False def get_dict_rtld_constants(): diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -45,6 +45,26 @@ pass glob = Global() +def patch_sys(space): + # Annoying: CPython would just use the C-level std{in,out,err} as + # configured by the main application, for example in binary mode + # on Windows or with buffering turned off. We can't easily do the + # same. Instead, go for the safest bet (but possibly bad for + # performance) and open sys.std{in,out,err} unbuffered. On + # Windows I guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # pypy_init_embedded_cffi_module(). + if not glob.patched_sys: + space.appexec([], """(): + import os + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + glob.patched_sys = True + + def pypy_init_embedded_cffi_module(version, init_struct): # called from __init__.py name = "?" @@ -56,6 +76,7 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) + patch_sys(space) load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: @@ -85,14 +106,86 @@ # ____________________________________________________________ +if os.name == 'nt': -eci = ExternalCompilationInfo(separate_module_sources=[ -r""" -/* XXX Windows missing */ -#include + do_includes = r""" +#define _WIN32_WINNT 0x0501 +#include + +#define CFFI_INIT_HOME_PATH_MAX _MAX_PATH +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + HMODULE hModule = 0; + DWORD res; + + GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + (LPCTSTR)&_cffi_init, &hModule); + + if (hModule == 0 ) { + _cffi_init_error("GetModuleHandleEx() failed", ""); + return -1; + } + res = GetModuleFileName(hModule, output_home_path, CFFI_INIT_HOME_PATH_MAX); + if (res >= CFFI_INIT_HOME_PATH_MAX) { + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static LONG volatile lock = 0; + static int _init_called = 0; + + while (InterlockedCompareExchange(&lock, 1, 0) != 0) { + SwitchToThread(); /* spin loop */ + } + if (!_init_called) { + _cffi_init(); + _init_called = 1; + } + InterlockedCompareExchange(&lock, 0, 1); +} +""" + +else: + + do_includes = r""" #include #include +#define CFFI_INIT_HOME_PATH_MAX PATH_MAX +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + Dl_info info; + dlerror(); /* reset */ + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return -1; + } + if (realpath(info.dli_fname, output_home_path) == NULL) { + perror("realpath() failed"); + _cffi_init_error("realpath() failed", ""); + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + pthread_once(&once_control, _cffi_init); +} +""" + +do_startup = do_includes + r""" RPY_EXPORTED void rpython_startup_code(void); RPY_EXPORTED int pypy_setup_home(char *, int); @@ -108,17 +201,13 @@ static void _cffi_init(void) { - Dl_info info; - char *home; + char home[CFFI_INIT_HOME_PATH_MAX + 1]; rpython_startup_code(); RPyGilAllocate(); - if (dladdr(&_cffi_init, &info) == 0) { - _cffi_init_error("dladdr() failed: ", dlerror()); + if (_cffi_init_home(home) != 0) return; - } - home = realpath(info.dli_fname, NULL); if (pypy_setup_home(home, 1) != 0) { _cffi_init_error("pypy_setup_home() failed", ""); return; @@ -134,13 +223,12 @@ It assumes that we don't hold the GIL before (if it exists), and we don't hold it afterwards. */ - static pthread_once_t once_control = PTHREAD_ONCE_INIT; - _cffi_module_name = name; /* not really thread-safe, but better than nothing */ - pthread_once(&once_control, _cffi_init); + _cffi_init_once(); return (int)_cffi_ready - 1; } -"""]) +""" +eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,14 +4,17 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module +from pypy.module._cffi_backend import embedding def test_checkmodule(): # prepare_file_argument() is not working without translating the _file # module too def dummy_prepare_file_argument(space, fileobj): - # call load_cffi1_module() too, from a random place like here - cffi1_module.load_cffi1_module(space, "foo", "foo", 42) + # call pypy_init_embedded_cffi_module() from a random place like here + # --- this calls load_cffi1_module(), too + embedding.pypy_init_embedded_cffi_module( + rffi.cast(rffi.INT, embedding.EMBED_VERSION_MIN), + 42) return lltype.nullptr(rffi.CCHARP.TO) old = ctypeptr.prepare_file_argument try: diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -457,7 +457,7 @@ sizes = {socket.htonl: 32, socket.ntohl: 32, socket.htons: 16, socket.ntohs: 16} for func, size in sizes.items(): - mask = (1<\n" + "#include \n" "#include \n") code = (prologue + struct_declaration_code + @@ -958,7 +962,8 @@ "NOT_RPYTHON" # implement function callbacks and generate function decls functions = [] - pypy_decls = [] + decls = {} + pypy_decls = decls['pypy_decl.h'] = [] pypy_decls.append("#ifndef _PYPY_PYPY_DECL_H\n") pypy_decls.append("#define _PYPY_PYPY_DECL_H\n") pypy_decls.append("#ifndef PYPY_STANDALONE\n") @@ -971,17 +976,23 @@ for decl in FORWARD_DECLS: pypy_decls.append("%s;" % (decl,)) - for name, func in sorted(FUNCTIONS.iteritems()): - restype, args = c_function_signature(db, func) From pypy.commits at gmail.com Fri Feb 12 17:01:42 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 12 Feb 2016 14:01:42 -0800 (PST) Subject: [pypy-commit] pypy llvm-translation-backend: hg merge default (+ fixes) Message-ID: <56be5646.44e21c0a.4d285.fffffbae@mx.google.com> Author: Manuel Jacob Branch: llvm-translation-backend Changeset: r82194:eaaed1d67e19 Date: 2016-02-12 22:30 +0100 http://bitbucket.org/pypy/pypy/changeset/eaaed1d67e19/ Log: hg merge default (+ fixes) diff too long, truncating to 2000 out of 12525 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -685,13 +685,17 @@ # the previous version of this code did. This should work for # CPython too. The point is that on PyPy with cpyext, the # config var 'SO' is just ".so" but we want to return - # ".pypy-VERSION.so" instead. - so_ext = _get_c_extension_suffix() + # ".pypy-VERSION.so" instead. Note a further tweak for cffi's + # embedding mode: if EXT_SUFFIX is also defined, use that + # directly. + so_ext = get_config_var('EXT_SUFFIX') if so_ext is None: - so_ext = get_config_var('SO') # fall-back - # extensions in debug_mode are named 'module_d.pyd' under windows - if os.name == 'nt' and self.debug: - so_ext = '_d.pyd' + so_ext = _get_c_extension_suffix() + if so_ext is None: + so_ext = get_config_var('SO') # fall-back + # extensions in debug_mode are named 'module_d.pyd' under windows + if os.name == 'nt' and self.debug: + so_ext = '_d.pyd' return os.path.join(*ext_path) + so_ext def get_export_symbols (self, ext): diff --git a/lib-python/2.7/test/capath/0e4015b9.0 b/lib-python/2.7/test/capath/0e4015b9.0 new file mode 100644 --- /dev/null +++ b/lib-python/2.7/test/capath/0e4015b9.0 @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv +bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG +A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo +b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 +aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ +Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm +Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= +-----END CERTIFICATE----- diff --git a/lib-python/2.7/test/capath/ce7b8643.0 b/lib-python/2.7/test/capath/ce7b8643.0 new file mode 100644 --- /dev/null +++ b/lib-python/2.7/test/capath/ce7b8643.0 @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv +bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG +A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo +b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 +aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ +Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm +Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= +-----END CERTIFICATE----- diff --git a/lib-python/2.7/test/https_svn_python_org_root.pem b/lib-python/2.7/test/https_svn_python_org_root.pem deleted file mode 100644 --- a/lib-python/2.7/test/https_svn_python_org_root.pem +++ /dev/null @@ -1,41 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 -IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB -IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA -Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO -BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi -MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ -ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ -8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 -zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y -fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 -w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc -G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k -epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q -laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ -QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU -fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 -YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w -ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY -gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe -MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 -IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy -dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw -czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 -dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl -aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC -AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg -b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB -ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc -nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg -18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c -gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl -Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY -sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T -SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF -CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum -GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk -zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW -omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD ------END CERTIFICATE----- diff --git a/lib-python/2.7/test/selfsigned_pythontestdotnet.pem b/lib-python/2.7/test/selfsigned_pythontestdotnet.pem --- a/lib-python/2.7/test/selfsigned_pythontestdotnet.pem +++ b/lib-python/2.7/test/selfsigned_pythontestdotnet.pem @@ -1,5 +1,5 @@ -----BEGIN CERTIFICATE----- -MIIChzCCAfCgAwIBAgIJAKGU95wKR8pSMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG @@ -8,9 +8,9 @@ aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv -EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjKTAnMCUGA1UdEQQeMByCGnNl -bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MA0GCSqGSIb3DQEBBQUAA4GBAIOXmdtM -eG9qzP9TiXW/Gc/zI4cBfdCpC+Y4gOfC9bQUC7hefix4iO3+iZjgy3X/FaRxUUoV -HKiXcXIaWqTSUWp45cSh0MbwZXudp6JIAptzdAhvvCrPKeC9i9GvxsPD4LtDAL97 -vSaxQBezA7hdxZd90/EeyMgVZgAnTCnvAWX9 +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= -----END CERTIFICATE----- diff --git a/lib-python/2.7/test/test_ssl.py b/lib-python/2.7/test/test_ssl.py --- a/lib-python/2.7/test/test_ssl.py +++ b/lib-python/2.7/test/test_ssl.py @@ -57,7 +57,8 @@ SIGNED_CERTFILE2 = data_file("keycert4.pem") SIGNING_CA = data_file("pycacert.pem") -SVN_PYTHON_ORG_ROOT_CERT = data_file("https_svn_python_org_root.pem") +REMOTE_HOST = "self-signed.pythontest.net" +REMOTE_ROOT_CERT = data_file("selfsigned_pythontestdotnet.pem") EMPTYCERT = data_file("nullcert.pem") BADCERT = data_file("badcert.pem") @@ -244,7 +245,7 @@ self.assertEqual(p['subjectAltName'], san) def test_DER_to_PEM(self): - with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: + with open(CAFILE_CACERT, 'r') as f: pem = f.read() d1 = ssl.PEM_cert_to_DER_cert(pem) p2 = ssl.DER_cert_to_PEM_cert(d1) @@ -792,7 +793,7 @@ # Mismatching key and cert ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) with self.assertRaisesRegexp(ssl.SSLError, "key values mismatch"): - ctx.load_cert_chain(SVN_PYTHON_ORG_ROOT_CERT, ONLYKEY) + ctx.load_cert_chain(CAFILE_CACERT, ONLYKEY) # Password protected key and cert ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD) ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode()) @@ -1013,7 +1014,7 @@ ctx.load_verify_locations(CERTFILE) self.assertEqual(ctx.cert_store_stats(), {'x509_ca': 0, 'crl': 0, 'x509': 1}) - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + ctx.load_verify_locations(CAFILE_CACERT) self.assertEqual(ctx.cert_store_stats(), {'x509_ca': 1, 'crl': 0, 'x509': 2}) @@ -1023,8 +1024,8 @@ # CERTFILE is not flagged as X509v3 Basic Constraints: CA:TRUE ctx.load_verify_locations(CERTFILE) self.assertEqual(ctx.get_ca_certs(), []) - # but SVN_PYTHON_ORG_ROOT_CERT is a CA cert - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + # but CAFILE_CACERT is a CA cert + ctx.load_verify_locations(CAFILE_CACERT) self.assertEqual(ctx.get_ca_certs(), [{'issuer': ((('organizationName', 'Root CA'),), (('organizationalUnitName', 'http://www.cacert.org'),), @@ -1040,7 +1041,7 @@ (('emailAddress', 'support at cacert.org'),)), 'version': 3}]) - with open(SVN_PYTHON_ORG_ROOT_CERT) as f: + with open(CAFILE_CACERT) as f: pem = f.read() der = ssl.PEM_cert_to_DER_cert(pem) self.assertEqual(ctx.get_ca_certs(True), [der]) @@ -1215,11 +1216,11 @@ class NetworkedTests(unittest.TestCase): def test_connect(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_NONE) try: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) self.assertEqual({}, s.getpeercert()) finally: s.close() @@ -1228,27 +1229,27 @@ s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED) self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", - s.connect, ("svn.python.org", 443)) + s.connect, (REMOTE_HOST, 443)) s.close() # this should succeed because we specify the root cert s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) self.assertTrue(s.getpeercert()) finally: s.close() def test_connect_ex(self): # Issue #11326: check connect_ex() implementation - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - self.assertEqual(0, s.connect_ex(("svn.python.org", 443))) + self.assertEqual(0, s.connect_ex((REMOTE_HOST, 443))) self.assertTrue(s.getpeercert()) finally: s.close() @@ -1256,14 +1257,14 @@ def test_non_blocking_connect_ex(self): # Issue #11326: non-blocking connect_ex() should allow handshake # to proceed after the socket gets ready. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT, + ca_certs=REMOTE_ROOT_CERT, do_handshake_on_connect=False) try: s.setblocking(False) - rc = s.connect_ex(('svn.python.org', 443)) + rc = s.connect_ex((REMOTE_HOST, 443)) # EWOULDBLOCK under Windows, EINPROGRESS elsewhere self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK)) # Wait for connect to finish @@ -1285,58 +1286,62 @@ def test_timeout_connect_ex(self): # Issue #12065: on a timeout, connect_ex() should return the original # errno (mimicking the behaviour of non-SSL sockets). - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT, + ca_certs=REMOTE_ROOT_CERT, do_handshake_on_connect=False) try: s.settimeout(0.0000001) - rc = s.connect_ex(('svn.python.org', 443)) + rc = s.connect_ex((REMOTE_HOST, 443)) if rc == 0: - self.skipTest("svn.python.org responded too quickly") + self.skipTest("REMOTE_HOST responded too quickly") self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK)) finally: s.close() def test_connect_ex_error(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - rc = s.connect_ex(("svn.python.org", 444)) + rc = s.connect_ex((REMOTE_HOST, 444)) # Issue #19919: Windows machines or VMs hosted on Windows # machines sometimes return EWOULDBLOCK. - self.assertIn(rc, (errno.ECONNREFUSED, errno.EWOULDBLOCK)) + errors = ( + errno.ECONNREFUSED, errno.EHOSTUNREACH, errno.ETIMEDOUT, + errno.EWOULDBLOCK, + ) + self.assertIn(rc, errors) finally: s.close() def test_connect_with_context(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): # Same as test_connect, but with a separately created context ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: self.assertEqual({}, s.getpeercert()) finally: s.close() # Same with a server hostname s = ctx.wrap_socket(socket.socket(socket.AF_INET), - server_hostname="svn.python.org") - s.connect(("svn.python.org", 443)) + server_hostname=REMOTE_HOST) + s.connect((REMOTE_HOST, 443)) s.close() # This should fail because we have no verification certs ctx.verify_mode = ssl.CERT_REQUIRED s = ctx.wrap_socket(socket.socket(socket.AF_INET)) self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", - s.connect, ("svn.python.org", 443)) + s.connect, (REMOTE_HOST, 443)) s.close() # This should succeed because we specify the root cert - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + ctx.load_verify_locations(REMOTE_ROOT_CERT) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1349,12 +1354,12 @@ # OpenSSL 0.9.8n and 1.0.0, as a result the capath directory must # contain both versions of each certificate (same content, different # filename) for this test to be portable across OpenSSL releases. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=CAPATH) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1365,7 +1370,7 @@ ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=BYTES_CAPATH) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1373,15 +1378,15 @@ s.close() def test_connect_cadata(self): - with open(CAFILE_CACERT) as f: + with open(REMOTE_ROOT_CERT) as f: pem = f.read().decode('ascii') der = ssl.PEM_cert_to_DER_cert(pem) - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(cadata=pem) with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) cert = s.getpeercert() self.assertTrue(cert) @@ -1390,7 +1395,7 @@ ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(cadata=der) with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) cert = s.getpeercert() self.assertTrue(cert) @@ -1399,9 +1404,9 @@ # Issue #5238: creating a file-like object with makefile() shouldn't # delay closing the underlying "real socket" (here tested with its # file descriptor, hence skipping the test under Windows). - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ss = ssl.wrap_socket(socket.socket(socket.AF_INET)) - ss.connect(("svn.python.org", 443)) + ss.connect((REMOTE_HOST, 443)) fd = ss.fileno() f = ss.makefile() f.close() @@ -1415,9 +1420,9 @@ self.assertEqual(e.exception.errno, errno.EBADF) def test_non_blocking_handshake(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = socket.socket(socket.AF_INET) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) s.setblocking(False) s = ssl.wrap_socket(s, cert_reqs=ssl.CERT_NONE, @@ -1460,12 +1465,12 @@ if support.verbose: sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem)) - _test_get_server_certificate('svn.python.org', 443, SVN_PYTHON_ORG_ROOT_CERT) + _test_get_server_certificate(REMOTE_HOST, 443, REMOTE_ROOT_CERT) if support.IPV6_ENABLED: _test_get_server_certificate('ipv6.google.com', 443) def test_ciphers(self): - remote = ("svn.python.org", 443) + remote = (REMOTE_HOST, 443) with support.transient_internet(remote[0]): with closing(ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_NONE, ciphers="ALL")) as s: @@ -1510,13 +1515,13 @@ def test_get_ca_certs_capath(self): # capath certs are loaded on request - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=CAPATH) self.assertEqual(ctx.get_ca_certs(), []) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1527,12 +1532,12 @@ @needs_sni def test_context_setget(self): # Check that the context of a connected socket can be replaced. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx1 = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ctx2 = ssl.SSLContext(ssl.PROTOCOL_SSLv23) s = socket.socket(socket.AF_INET) with closing(ctx1.wrap_socket(s)) as ss: - ss.connect(("svn.python.org", 443)) + ss.connect((REMOTE_HOST, 443)) self.assertIs(ss.context, ctx1) self.assertIs(ss._sslobj.context, ctx1) ss.context = ctx2 @@ -3026,7 +3031,7 @@ pass for filename in [ - CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, BYTES_CERTFILE, + CERTFILE, REMOTE_ROOT_CERT, BYTES_CERTFILE, ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY, SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA, BADCERT, BADKEY, EMPTYCERT]: diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.4.2 +Version: 1.5.1 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.4.2" -__version_info__ = (1, 4, 2) +__version__ = "1.5.1" +__version_info__ = (1, 5, 1) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -146,8 +146,9 @@ ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) #define _cffi_convert_array_from_object \ ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 #define _cffi_call_python \ - ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[25]) + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) #define _CFFI_NUM_EXPORTS 26 typedef struct _ctypedescr CTypeDescrObject; @@ -206,7 +207,8 @@ /********** end CPython-specific section **********/ #else _CFFI_UNUSED_FN -static void (*_cffi_call_python)(struct _cffi_externpy_s *, char *); +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org #endif @@ -229,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.1" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, types +import sys, sysconfig, types from .lock import allocate_lock try: @@ -74,6 +74,7 @@ self._windows_unicode = None self._init_once_cache = {} self._cdef_version = None + self._embedding = None if hasattr(backend, 'set_ffi'): backend.set_ffi(self) for name in backend.__dict__: @@ -101,13 +102,21 @@ If 'packed' is specified as True, all structs declared inside this cdef are packed, i.e. laid out without any field alignment at all. """ + self._cdef(csource, override=override, packed=packed) + + def embedding_api(self, csource, packed=False): + self._cdef(csource, packed=packed, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): if not isinstance(csource, str): # unicode, on Python 2 if not isinstance(csource, basestring): raise TypeError("cdef() argument must be a string") csource = csource.encode('ascii') with self._lock: self._cdef_version = object() - self._parser.parse(csource, override=override, packed=packed) + self._parser.parse(csource, override=override, **options) self._cdefsources.append(csource) if override: for cache in self._function_caches: @@ -533,6 +542,35 @@ ('_UNICODE', '1')] kwds['define_macros'] = defmacros + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + pythonlib = "pypy-c" + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): raise ValueError("set_source() cannot be called several times " @@ -592,14 +630,23 @@ recompile(self, module_name, source, c_file=filename, call_c_compiler=False, **kwds) - def compile(self, tmpdir='.', verbose=0): + def compile(self, tmpdir='.', verbose=0, target=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ from .recompiler import recompile # if not hasattr(self, '_assigned_source'): raise ValueError("set_source() must be called before compile()") module_name, source, source_extension, kwds = self._assigned_source return recompile(self, module_name, source, tmpdir=tmpdir, - source_extension=source_extension, + target=target, source_extension=source_extension, compiler_verbose=verbose, **kwds) def init_once(self, func, tag): @@ -626,6 +673,36 @@ self._init_once_cache[tag] = (True, result) return result + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,8 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._override = False - self._packed = False + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -281,16 +280,15 @@ msg = 'parse error\n%s' % (msg,) raise api.CDefError(msg) - def parse(self, csource, override=False, packed=False): - prev_override = self._override - prev_packed = self._packed + def parse(self, csource, override=False, packed=False, dllexport=False): + prev_options = self._options try: - self._override = override - self._packed = packed + self._options = {'override': override, + 'packed': packed, + 'dllexport': dllexport} self._internal_parse(csource) finally: - self._override = prev_override - self._packed = prev_packed + self._options = prev_options def _internal_parse(self, csource): ast, macros, csource = self._parse(csource) @@ -376,10 +374,13 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._inside_extern_python: - self._declare('extern_python ' + decl.name, tp) + if self._options.get('dllexport'): + tag = 'dllexport_python ' + elif self._inside_extern_python: + tag = 'extern_python ' else: - self._declare('function ' + decl.name, tp) + tag = 'function ' + self._declare(tag + decl.name, tp) def _parse_decl(self, decl): node = decl.type @@ -449,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._override: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -728,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._packed + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -53,14 +53,14 @@ try: distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) # - cmd_obj = dist.get_command_obj('build_ext') - [soname] = cmd_obj.get_outputs() return soname try: diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -3,6 +3,7 @@ from .cffi_opcode import * VERSION = "0x2601" +VERSION_EMBEDDED = "0x2701" class GlobalExpr: @@ -281,6 +282,29 @@ lines[i:i+1] = self._rel_readlines('parse_c_type.h') prnt(''.join(lines)) # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('#define _CFFI_PYTHON_STARTUP_CODE %s' % + (self._string_literal(self.ffi._embedding),)) + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') From pypy.commits at gmail.com Fri Feb 12 21:30:58 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 12 Feb 2016 18:30:58 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Merged in marky1991/pypy_new/py3.3 (pull request #396) Message-ID: <56be9562.6217c20a.bbcb1.460e@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82200:32dbf468ae33 Date: 2016-02-13 03:30 +0100 http://bitbucket.org/pypy/pypy/changeset/32dbf468ae33/ Log: Merged in marky1991/pypy_new/py3.3 (pull request #396) Fix test_reimport_builtin in py3.3 diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -58,8 +58,23 @@ self.save_module_content_for_future_reload() def save_module_content_for_future_reload(self): - self.w_initialdict = self.space.call_method(self.w_dict, 'copy') - + # Because setdictvalue is unable to immediately load all attributes + # (due to an importlib bootstrapping problem), this method needs to be + # able to support saving the content of a module's dict without + # requiring that the entire dict already be loaded. To support that + # properly, when updating the dict, we must be careful to never + # overwrite the value of a key already in w_initialdict. (So as to avoid + # overriding the builtin value with a user-provided value) + if not self.space.is_none(self.w_initialdict): + new_items = self.w_dict.iteritems() + while True: + w_key, w_value = new_items.next_item() + if w_key is None: + break + if not self.space.is_true(self.space.contains(self.w_initialdict, w_key)): + self.space.setitem(self.w_initialdict, w_key, w_value) + else: + self.w_initialdict = self.space.call_method(self.w_dict, 'copy') def get_applevel_name(cls): """ NOT_RPYTHON """ @@ -90,6 +105,7 @@ def setdictvalue(self, space, attr, w_value): if self.lazy: self._load_lazily(space, attr) + self.save_module_content_for_future_reload() space.setitem_str(self.w_dict, attr, w_value) return True diff --git a/pypy/module/imp/interp_imp.py b/pypy/module/imp/interp_imp.py --- a/pypy/module/imp/interp_imp.py +++ b/pypy/module/imp/interp_imp.py @@ -86,7 +86,7 @@ return # force_init is needed to make reload actually reload instead of just # using the already-present module in sys.modules. - return space.getbuiltinmodule(name, force_init=True) + return space.getbuiltinmodule(name, force_init=True, reuse=False) def init_frozen(space, w_name): return None From pypy.commits at gmail.com Fri Feb 12 21:31:04 2016 From: pypy.commits at gmail.com (marky1991) Date: Fri, 12 Feb 2016 18:31:04 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix test_reimport_builtin again. Message-ID: <56be9568.84c9c20a.db8ee.42d7@mx.google.com> Author: Mark Young Branch: py3.3 Changeset: r82196:38a9de4803aa Date: 2016-02-11 23:48 -0500 http://bitbucket.org/pypy/pypy/changeset/38a9de4803aa/ Log: Fix test_reimport_builtin again. diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -58,8 +58,17 @@ self.save_module_content_for_future_reload() def save_module_content_for_future_reload(self): - self.w_initialdict = self.space.call_method(self.w_dict, 'copy') - + if not self.space.is_none(self.w_initialdict): + present_keys = self.w_initialdict.w_keys() + new_keys = self.w_dict.w_keys() + key_count = new_keys.length() + for i in range(key_count): + key = new_keys.getitem(i) + val_to_set = self.space.getitem(self.w_dict, key) + if not self.space.is_true(self.space.contains(present_keys, key)): + self.space.setitem(self.w_initialdict, key, val_to_set) + else: + self.w_initialdict = self.space.call_method(self.w_dict, 'copy') def get_applevel_name(cls): """ NOT_RPYTHON """ @@ -90,6 +99,7 @@ def setdictvalue(self, space, attr, w_value): if self.lazy: self._load_lazily(space, attr) + self.save_module_content_for_future_reload() space.setitem_str(self.w_dict, attr, w_value) return True diff --git a/pypy/module/imp/interp_imp.py b/pypy/module/imp/interp_imp.py --- a/pypy/module/imp/interp_imp.py +++ b/pypy/module/imp/interp_imp.py @@ -86,7 +86,7 @@ return # force_init is needed to make reload actually reload instead of just # using the already-present module in sys.modules. - return space.getbuiltinmodule(name, force_init=True) + return space.getbuiltinmodule(name, force_init=True, reuse=False) def init_frozen(space, w_name): return None From pypy.commits at gmail.com Fri Feb 12 21:31:05 2016 From: pypy.commits at gmail.com (marky1991) Date: Fri, 12 Feb 2016 18:31:05 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Make my changes respect proper rpython convention. Also, change to a items() based approach for save_module_content_for_reload. Message-ID: <56be9569.418f1c0a.ff4be.ffffb111@mx.google.com> Author: Mark Young Branch: py3.3 Changeset: r82197:0832f3abe320 Date: 2016-02-12 20:51 -0500 http://bitbucket.org/pypy/pypy/changeset/0832f3abe320/ Log: Make my changes respect proper rpython convention. Also, change to a items() based approach for save_module_content_for_reload. diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -58,15 +58,20 @@ self.save_module_content_for_future_reload() def save_module_content_for_future_reload(self): + # Because setdictvalue is unable to immediately load all attributes + # (due to an importlib bootstrapping problem), this method needs to be + # able to able to save single names at a time. The idea now is to save + # in pieces, being careful to never overwrite the value of a key + # already in w_initialdict. if not self.space.is_none(self.w_initialdict): - present_keys = self.w_initialdict.w_keys() - new_keys = self.w_dict.w_keys() - key_count = new_keys.length() - for i in range(key_count): - key = new_keys.getitem(i) - val_to_set = self.space.getitem(self.w_dict, key) - if not self.space.is_true(self.space.contains(present_keys, key)): - self.space.setitem(self.w_initialdict, key, val_to_set) + w_present_keys = self.w_initialdict.w_keys() + w_new_items = self.w_dict.iteritems() + while True: + w_key, w_value = w_new_items.next_item() + if w_key is None: + break + if not self.space.is_true(self.space.contains(w_present_keys, w_key)): + self.space.setitem(self.w_initialdict, w_key, w_value) else: self.w_initialdict = self.space.call_method(self.w_dict, 'copy') From pypy.commits at gmail.com Fri Feb 12 21:31:07 2016 From: pypy.commits at gmail.com (marky1991) Date: Fri, 12 Feb 2016 18:31:07 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Incorrectly marked an interpreter-level object as wrapped. Also, attempt to improve the comment for save_module_content_for_reload. Message-ID: <56be956b.520e1c0a.f7659.ffffb4e7@mx.google.com> Author: Mark Young Branch: py3.3 Changeset: r82198:cb8d0d322095 Date: 2016-02-12 21:05 -0500 http://bitbucket.org/pypy/pypy/changeset/cb8d0d322095/ Log: Incorrectly marked an interpreter-level object as wrapped. Also, attempt to improve the comment for save_module_content_for_reload. diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -60,14 +60,16 @@ def save_module_content_for_future_reload(self): # Because setdictvalue is unable to immediately load all attributes # (due to an importlib bootstrapping problem), this method needs to be - # able to able to save single names at a time. The idea now is to save - # in pieces, being careful to never overwrite the value of a key - # already in w_initialdict. + # able to support saving the content of a module's dict without + # requiring that the entire dict already be loaded. To support that + # properly, when updating the dict, we must be careful to never + # overwrite the value of a key already in w_initialdict. (So as to avoid + # overriding the builtin value with a user-provided value) if not self.space.is_none(self.w_initialdict): w_present_keys = self.w_initialdict.w_keys() - w_new_items = self.w_dict.iteritems() + new_items = self.w_dict.iteritems() while True: - w_key, w_value = w_new_items.next_item() + w_key, w_value = new_items.next_item() if w_key is None: break if not self.space.is_true(self.space.contains(w_present_keys, w_key)): From pypy.commits at gmail.com Fri Feb 12 21:31:09 2016 From: pypy.commits at gmail.com (marky1991) Date: Fri, 12 Feb 2016 18:31:09 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Don't do an O(n) membership check on the dict's keys. Just do the O(1) membership check in the dict itself. Message-ID: <56be956d.0c2d1c0a.3495f.ffffb49e@mx.google.com> Author: Mark Young Branch: py3.3 Changeset: r82199:41b2dabb978a Date: 2016-02-12 21:16 -0500 http://bitbucket.org/pypy/pypy/changeset/41b2dabb978a/ Log: Don't do an O(n) membership check on the dict's keys. Just do the O(1) membership check in the dict itself. diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -66,13 +66,12 @@ # overwrite the value of a key already in w_initialdict. (So as to avoid # overriding the builtin value with a user-provided value) if not self.space.is_none(self.w_initialdict): - w_present_keys = self.w_initialdict.w_keys() new_items = self.w_dict.iteritems() while True: w_key, w_value = new_items.next_item() if w_key is None: break - if not self.space.is_true(self.space.contains(w_present_keys, w_key)): + if not self.space.is_true(self.space.contains(self.w_initialdict, w_key)): self.space.setitem(self.w_initialdict, w_key, w_value) else: self.w_initialdict = self.space.call_method(self.w_dict, 'copy') From pypy.commits at gmail.com Fri Feb 12 22:47:45 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 12 Feb 2016 19:47:45 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Tentatively fix translation. Message-ID: <56bea761.c3e01c0a.6c77a.ffffbe10@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82201:fce240f22548 Date: 2016-02-13 04:37 +0100 http://bitbucket.org/pypy/pypy/changeset/fce240f22548/ Log: Tentatively fix translation. This is not a proper fix because a MixedModule's dictionary does not have to be a W_DictMultiObject. However, this is good enough for running the build bots. I'll commit a proper fix tomorrow. diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -66,7 +66,10 @@ # overwrite the value of a key already in w_initialdict. (So as to avoid # overriding the builtin value with a user-provided value) if not self.space.is_none(self.w_initialdict): - new_items = self.w_dict.iteritems() + w_dict = self.w_dict + from pypy.objspace.std.dictmultiobject import W_DictMultiObject + assert isinstance(w_dict, W_DictMultiObject) + new_items = w_dict.iteritems() while True: w_key, w_value = new_items.next_item() if w_key is None: From pypy.commits at gmail.com Fri Feb 12 22:47:47 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 12 Feb 2016 19:47:47 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56bea763.4e8e1c0a.65f35.ffffc3f3@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82202:b7e79d170a32 Date: 2016-02-13 04:38 +0100 http://bitbucket.org/pypy/pypy/changeset/b7e79d170a32/ Log: hg merge py3k diff too long, truncating to 2000 out of 5677 lines diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.0 +Version: 1.5.1 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.1" +__version_info__ = (1, 5, 1) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.1" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, types +import sys, sysconfig, types from .lock import allocate_lock try: @@ -544,28 +544,32 @@ def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # if '__pypy__' in sys.builtin_module_names: if hasattr(sys, 'prefix'): import os - libdir = os.path.join(sys.prefix, 'bin') - dirs = kwds.setdefault('library_dirs', []) - if libdir not in dirs: - dirs.append(libdir) + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) pythonlib = "pypy-c" else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags - libraries = kwds.setdefault('libraries', []) - if pythonlib not in libraries: - libraries.append(pythonlib) + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): @@ -631,7 +635,7 @@ compiled DLL. Use '*' to force distutils' choice, suitable for regular CPython C API modules. Use a file name ending in '.*' to ask for the system's default extension for dynamic libraries - (.so/.dll). + (.so/.dll/.dylib). The default is '*' when building a non-embedded C API extension, and (module_name + '.*') when building an embedded library. @@ -695,6 +699,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,14 +21,12 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def compile(tmpdir, ext, compiler_verbose=0): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose, - target_extension, embedding) + outputfilename = _build(tmpdir, ext, compiler_verbose) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -38,32 +36,7 @@ os.environ[key] = value return outputfilename -def _save_val(name): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - return config_vars.get(name, Ellipsis) - -def _restore_val(name, value): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - config_vars[name] = value - if value is Ellipsis: - del config_vars[name] - -def _win32_hack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): - MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ - MSVCCompiler._remove_visual_c_ref - MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file - -def _win32_unhack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - MSVCCompiler._remove_visual_c_ref = \ - MSVCCompiler._remove_visual_c_ref_CFFI_BAK - -def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def _build(tmpdir, ext, compiler_verbose=0): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -76,25 +49,14 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: - if sys.platform == 'win32' and embedding: - _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 - old_SO = _save_val('SO') - old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: - if target_extension is not None: - _restore_val('SO', target_extension) - _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') cmd_obj = dist.get_command_obj('build_ext') [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) - _restore_val('SO', old_SO) - _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) - if sys.platform == 'win32' and embedding: - _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and @@ -1357,6 +1359,58 @@ parts[-1] += extension return os.path.join(outputdir, *parts), parts + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, compiler_verbose=1, target=None, **kwds): @@ -1382,36 +1436,22 @@ target = '%s.*' % module_name else: target = '*' - if target == '*': - target_module_name = module_name - target_extension = None # use default - else: - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - else: - target += '.so' - # split along the first '.' (not the last one, otherwise the - # preceeding dots are interpreted as splitting package names) - index = target.find('.') - if index < 0: - raise ValueError("target argument %r should be a file name " - "containing a '.'" % (target,)) - target_module_name = target[:index] - target_extension = target[index:] # - ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: + patchlist = [] cwd = os.getcwd() try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose, - target_extension, - embedding=embedding) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) finally: os.chdir(cwd) + _unpatch_meths(patchlist) return outputfilename else: return ext, updated diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -139,3 +139,12 @@ Refactor sandboxing to operate at a higher level. .. branch: cpyext-bootstrap + +.. branch: vmprof-newstack + +Refactor vmprof to work cross-operating-system. + +.. branch: seperate-strucmember_h + +Seperate structmember.h from Python.h Also enhance creating api functions +to specify which header file they appear in (previously only pypy_decl.h) diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.0" +VERSION = "1.5.1" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -69,6 +69,7 @@ def startup(self, space): from pypy.module._cffi_backend import embedding embedding.glob.space = space + embedding.glob.patched_sys = False def get_dict_rtld_constants(): diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -45,6 +45,26 @@ pass glob = Global() +def patch_sys(space): + # Annoying: CPython would just use the C-level std{in,out,err} as + # configured by the main application, for example in binary mode + # on Windows or with buffering turned off. We can't easily do the + # same. Instead, go for the safest bet (but possibly bad for + # performance) and open sys.std{in,out,err} unbuffered. On + # Windows I guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # pypy_init_embedded_cffi_module(). + if not glob.patched_sys: + space.appexec([], """(): + import os + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + glob.patched_sys = True + + def pypy_init_embedded_cffi_module(version, init_struct): # called from __init__.py name = "?" @@ -56,6 +76,7 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) + patch_sys(space) load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: @@ -85,14 +106,86 @@ # ____________________________________________________________ +if os.name == 'nt': -eci = ExternalCompilationInfo(separate_module_sources=[ -r""" -/* XXX Windows missing */ -#include + do_includes = r""" +#define _WIN32_WINNT 0x0501 +#include + +#define CFFI_INIT_HOME_PATH_MAX _MAX_PATH +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + HMODULE hModule = 0; + DWORD res; + + GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + (LPCTSTR)&_cffi_init, &hModule); + + if (hModule == 0 ) { + _cffi_init_error("GetModuleHandleEx() failed", ""); + return -1; + } + res = GetModuleFileName(hModule, output_home_path, CFFI_INIT_HOME_PATH_MAX); + if (res >= CFFI_INIT_HOME_PATH_MAX) { + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static LONG volatile lock = 0; + static int _init_called = 0; + + while (InterlockedCompareExchange(&lock, 1, 0) != 0) { + SwitchToThread(); /* spin loop */ + } + if (!_init_called) { + _cffi_init(); + _init_called = 1; + } + InterlockedCompareExchange(&lock, 0, 1); +} +""" + +else: + + do_includes = r""" #include #include +#define CFFI_INIT_HOME_PATH_MAX PATH_MAX +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + Dl_info info; + dlerror(); /* reset */ + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return -1; + } + if (realpath(info.dli_fname, output_home_path) == NULL) { + perror("realpath() failed"); + _cffi_init_error("realpath() failed", ""); + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + pthread_once(&once_control, _cffi_init); +} +""" + +do_startup = do_includes + r""" RPY_EXPORTED void rpython_startup_code(void); RPY_EXPORTED int pypy_setup_home(char *, int); @@ -108,17 +201,13 @@ static void _cffi_init(void) { - Dl_info info; - char *home; + char home[CFFI_INIT_HOME_PATH_MAX + 1]; rpython_startup_code(); RPyGilAllocate(); - if (dladdr(&_cffi_init, &info) == 0) { - _cffi_init_error("dladdr() failed: ", dlerror()); + if (_cffi_init_home(home) != 0) return; - } - home = realpath(info.dli_fname, NULL); if (pypy_setup_home(home, 1) != 0) { _cffi_init_error("pypy_setup_home() failed", ""); return; @@ -134,13 +223,12 @@ It assumes that we don't hold the GIL before (if it exists), and we don't hold it afterwards. */ - static pthread_once_t once_control = PTHREAD_ONCE_INIT; - _cffi_module_name = name; /* not really thread-safe, but better than nothing */ - pthread_once(&once_control, _cffi_init); + _cffi_init_once(); return (int)_cffi_ready - 1; } -"""]) +""" +eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,14 +4,17 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module +from pypy.module._cffi_backend import embedding def test_checkmodule(): # prepare_file_argument() is not working without translating the _file # module too def dummy_prepare_file_argument(space, fileobj): - # call load_cffi1_module() too, from a random place like here - cffi1_module.load_cffi1_module(space, "foo", "foo", 42) + # call pypy_init_embedded_cffi_module() from a random place like here + # --- this calls load_cffi1_module(), too + embedding.pypy_init_embedded_cffi_module( + rffi.cast(rffi.INT, embedding.EMBED_VERSION_MIN), + 42) return lltype.nullptr(rffi.CCHARP.TO) old = ctypeptr.prepare_file_argument try: diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -457,7 +457,7 @@ sizes = {socket.htonl: 32, socket.ntohl: 32, socket.htons: 16, socket.ntohs: 16} for func, size in sizes.items(): - mask = (1<\n" + "#include \n" "#include \n") code = (prologue + struct_declaration_code + @@ -962,7 +966,8 @@ "NOT_RPYTHON" # implement function callbacks and generate function decls functions = [] - pypy_decls = [] + decls = {} + pypy_decls = decls['pypy_decl.h'] = [] pypy_decls.append("#ifndef _PYPY_PYPY_DECL_H\n") pypy_decls.append("#define _PYPY_PYPY_DECL_H\n") pypy_decls.append("#ifndef PYPY_STANDALONE\n") @@ -975,17 +980,23 @@ for decl in FORWARD_DECLS: pypy_decls.append("%s;" % (decl,)) - for name, func in sorted(FUNCTIONS.iteritems()): - restype, args = c_function_signature(db, func) From pypy.commits at gmail.com Sat Feb 13 02:48:20 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 23:48:20 -0800 (PST) Subject: [pypy-commit] pypy default: Enable windows support for vmprof Message-ID: <56bedfc4.41df1c0a.d1f8b.ffffec29@mx.google.com> Author: fijal Branch: Changeset: r82204:9dbad5cf71a6 Date: 2016-02-13 08:47 +0100 http://bitbucket.org/pypy/pypy/changeset/9dbad5cf71a6/ Log: Enable windows support for vmprof diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -36,13 +36,13 @@ "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", - "_csv", "cppyy", "_pypyjson" + "_csv", "cppyy", "_pypyjson", "_vmprof", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): +#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') +# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): # it's not enough that we get x86_64 - working_modules.add('_vmprof') +# working_modules.add('_vmprof') translation_modules = default_modules.copy() translation_modules.update([ diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -3,16 +3,13 @@ from rpython.jit.backend.test.support import CCompiledMixin from rpython.rlib.jit import JitDriver from rpython.tool.udir import udir +from rpython.rlib import rthread from rpython.translator.translator import TranslationContext from rpython.jit.backend.detect_cpu import getcpuclass class CompiledVmprofTest(CCompiledMixin): CPUClass = getcpuclass() - def setup(self): - if self.CPUClass.backend_name != 'x86_64': - py.test.skip("vmprof only supports x86-64 CPUs at the moment") - def _get_TranslationContext(self): t = TranslationContext() t.config.translation.gc = 'incminimark' @@ -62,6 +59,7 @@ tmpfilename = str(udir.join('test_rvmprof')) def f(num): + rthread.get_ident() # register TLOFS_thread_ident code = MyCode("py:x:foo:3") rvmprof.register_code(code, get_name) fd = os.open(tmpfilename, os.O_WRONLY | os.O_CREAT, 0666) diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -30,11 +30,11 @@ def setup(): + compile_extra = ['-DRPYTHON_LL2CTYPES'] platform.verify_eci(ExternalCompilationInfo( - compile_extra=['-DRPYTHON_LL2CTYPES'], + compile_extra=compile_extra, **eci_kwds)) - eci = global_eci vmprof_init = rffi.llexternal("vmprof_init", [rffi.INT, rffi.DOUBLE, rffi.CCHARP], diff --git a/rpython/rlib/rvmprof/src/rvmprof.c b/rpython/rlib/rvmprof/src/rvmprof.c --- a/rpython/rlib/rvmprof/src/rvmprof.c +++ b/rpython/rlib/rvmprof/src/rvmprof.c @@ -1,23 +1,21 @@ #define _GNU_SOURCE 1 - #ifdef RPYTHON_LL2CTYPES /* only for testing: ll2ctypes sets RPY_EXTERN from the command-line */ -# ifndef RPY_EXTERN -# define RPY_EXTERN RPY_EXPORTED -# endif -# define RPY_EXPORTED extern __attribute__((visibility("default"))) -# define VMPROF_ADDR_OF_TRAMPOLINE(addr) 0 +#ifndef RPY_EXTERN +#define RPY_EXTERN RPY_EXPORTED +#endif +#ifdef _WIN32 +#define RPY_EXPORTED __declspec(dllexport) +#else +#define RPY_EXPORTED extern __attribute__((visibility("default"))) +#endif #else - # include "common_header.h" # include "structdef.h" # include "src/threadlocal.h" # include "rvmprof.h" -/*# ifndef VMPROF_ADDR_OF_TRAMPOLINE -# error "RPython program using rvmprof, but not calling vmprof_execute_code()" -# endif*/ #endif diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -7,9 +7,6 @@ static long profile_interval_usec = 0; static int opened_profile(char *interp_name); -#define MAX_STACK_DEPTH \ - ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) - #define MARKER_STACKTRACE '\x01' #define MARKER_VIRTUAL_IP '\x02' #define MARKER_TRAILER '\x03' @@ -20,6 +17,9 @@ #define VERSION_THREAD_ID '\x01' #define VERSION_TAG '\x02' +#define MAX_STACK_DEPTH \ + ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) + typedef struct prof_stacktrace_s { char padding[sizeof(long) - 1]; char marker; @@ -71,6 +71,43 @@ return _write_all((char*)&header, 5 * sizeof(long) + 4 + namelen); } +/* ************************************************************* + * functions to dump the stack trace + * ************************************************************* + */ + + +static int get_stack_trace(vmprof_stack_t* stack, intptr_t *result, int max_depth, intptr_t pc) +{ + int n = 0; + intptr_t addr = 0; + int bottom_jitted = 0; + // check if the pc is in JIT +#ifdef PYPY_JIT_CODEMAP + if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { + // the bottom part is jitted, means we can fill up the first part + // from the JIT + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + stack = stack->next; // skip the first item as it contains garbage + } +#endif + while (n < max_depth - 1 && stack) { + if (stack->kind == VMPROF_CODE_TAG) { + result[n] = stack->kind; + result[n + 1] = stack->value; + n += 2; + } +#ifdef PYPY_JIT_CODEMAP + else if (stack->kind == VMPROF_JITTED_TAG) { + pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + } +#endif + stack = stack->next; + } + return n; +} + #ifndef RPYTHON_LL2CTYPES static vmprof_stack_t *get_vmprof_stack(void) { diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -35,6 +35,7 @@ #include "vmprof_stack.h" #include "vmprof_getpc.h" #include "vmprof_mt.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" /************************************************************/ @@ -78,46 +79,6 @@ static char atfork_hook_installed = 0; -#include "vmprof_get_custom_offset.h" - -/* ************************************************************* - * functions to dump the stack trace - * ************************************************************* - */ - - -static int get_stack_trace(intptr_t *result, int max_depth, intptr_t pc, ucontext_t *ucontext) -{ - vmprof_stack_t* stack = get_vmprof_stack(); - int n = 0; - intptr_t addr = 0; - int bottom_jitted = 0; - // check if the pc is in JIT -#ifdef PYPY_JIT_CODEMAP - if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { - // the bottom part is jitted, means we can fill up the first part - // from the JIT - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - stack = stack->next; // skip the first item as it contains garbage - } -#endif - while (n < max_depth - 1 && stack) { - if (stack->kind == VMPROF_CODE_TAG) { - result[n] = stack->kind; - result[n + 1] = stack->value; - n += 2; - } -#ifdef PYPY_JIT_CODEMAP - else if (stack->kind == VMPROF_JITTED_TAG) { - pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - } -#endif - stack = stack->next; - } - return n; -} - static intptr_t get_current_thread_id(void) { /* xxx This function is a hack on two fronts: @@ -194,8 +155,8 @@ struct prof_stacktrace_s *st = (struct prof_stacktrace_s *)p->data; st->marker = MARKER_STACKTRACE; st->count = 1; - depth = get_stack_trace(st->stack, - MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext), ucontext); + depth = get_stack_trace(get_vmprof_stack(), st->stack, + MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext)); st->depth = depth; st->stack[depth++] = get_current_thread_id(); p->data_offset = offsetof(struct prof_stacktrace_s, marker); diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -10,13 +10,30 @@ return 0; } +#if defined(_MSC_VER) +#include +typedef SSIZE_T ssize_t; +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include #include "vmprof_stack.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" #include // This file has been inspired (but not copied from since the LICENSE // would not allow it) from verysleepy profiler +#define SINGLE_BUF_SIZE 8192 + volatile int thread_started = 0; volatile int enabled = 0; @@ -55,52 +72,75 @@ return 0; } -int vmprof_snapshot_thread(DWORD thread_id, PyThreadState *tstate, prof_stacktrace_s *stack) +int vmprof_snapshot_thread(struct pypy_threadlocal_s *p, prof_stacktrace_s *stack) { - HRESULT result; - HANDLE hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, thread_id); - int depth; + void *addr; + vmprof_stack_t *cur; + long tid; + HANDLE hThread; + long depth; + DWORD result; + CONTEXT ctx; + +#ifdef RPYTHON_LL2CTYPES + return 0; // not much we can do +#else +#ifndef RPY_TLOFS_thread_ident + return 0; // we can't freeze threads, unsafe +#else + hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, p->thread_ident); if (!hThread) { return -1; } result = SuspendThread(hThread); if(result == 0xffffffff) return -1; // possible, e.g. attached debugger or thread alread suspended - // find the correct thread - depth = read_trace_from_cpy_frame(tstate->frame, stack->stack, - MAX_STACK_DEPTH); + ctx.ContextFlags = CONTEXT_FULL; + if (!GetThreadContext(hThread, &ctx)) + return -1; + depth = get_stack_trace(p->vmprof_tl_stack, + stack->stack, MAX_STACK_DEPTH-2, ctx.Eip); stack->depth = depth; - stack->stack[depth++] = (void*)thread_id; + stack->stack[depth++] = (void*)p->thread_ident; stack->count = 1; stack->marker = MARKER_STACKTRACE; ResumeThread(hThread); return depth; +#endif +#endif } long __stdcall vmprof_mainloop(void *arg) { +#ifndef RPYTHON_LL2CTYPES + struct pypy_threadlocal_s *p; prof_stacktrace_s *stack = (prof_stacktrace_s*)malloc(SINGLE_BUF_SIZE); - HANDLE hThreadSnap = INVALID_HANDLE_VALUE; int depth; - PyThreadState *tstate; while (1) { - Sleep(profile_interval_usec * 1000); + //Sleep(profile_interval_usec * 1000); + Sleep(10); if (!enabled) { continue; } - tstate = PyInterpreterState_Head()->tstate_head; - while (tstate) { - depth = vmprof_snapshot_thread(tstate->thread_id, tstate, stack); - if (depth > 0) { - _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), - depth * sizeof(void *) + - sizeof(struct prof_stacktrace_s) - - offsetof(struct prof_stacktrace_s, marker)); + _RPython_ThreadLocals_Acquire(); + p = _RPython_ThreadLocals_Head(); // the first one is one behind head + p = _RPython_ThreadLocals_Enum(p); + while (p) { + if (p->ready == 42) { + depth = vmprof_snapshot_thread(p, stack); + if (depth > 0) { + _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), + depth * sizeof(void *) + + sizeof(struct prof_stacktrace_s) - + offsetof(struct prof_stacktrace_s, marker)); + } } - tstate = tstate->next; + p = _RPython_ThreadLocals_Enum(p); } + _RPython_ThreadLocals_Release(); } +#endif } RPY_EXTERN diff --git a/rpython/rlib/rvmprof/src/vmprof_stack.h b/rpython/rlib/rvmprof/src/vmprof_stack.h --- a/rpython/rlib/rvmprof/src/vmprof_stack.h +++ b/rpython/rlib/rvmprof/src/vmprof_stack.h @@ -1,7 +1,11 @@ #ifndef _VMPROF_STACK_H_ #define _VMPROF_STACK_H_ +#ifdef _WIN32 +#define intptr_t long // XXX windows VC++ 2008 lacks stdint.h +#else #include +#endif #define VMPROF_CODE_TAG 1 /* <- also in cintf.py */ #define VMPROF_BLACKHOLE_TAG 2 diff --git a/rpython/rlib/rvmprof/test/test_ztranslation.py b/rpython/rlib/rvmprof/test/test_ztranslation.py --- a/rpython/rlib/rvmprof/test/test_ztranslation.py +++ b/rpython/rlib/rvmprof/test/test_ztranslation.py @@ -3,11 +3,10 @@ sys.path += ['../../../..'] # for subprocess in test_interpreted import py from rpython.tool.udir import udir -from rpython.rlib import rvmprof +from rpython.rlib import rvmprof, rthread from rpython.translator.c.test.test_genc import compile from rpython.rlib.nonconst import NonConstant - class MyCode: def __init__(self, count): self.count = count @@ -39,6 +38,7 @@ PROF_FILE = str(udir.join('test_ztranslation.prof')) def main(argv=[]): + rthread.get_ident() # force TLOFS_thread_ident if NonConstant(False): # Hack to give os.open() the correct annotation os.open('foo', 1, 1) diff --git a/rpython/translator/c/src/threadlocal.c b/rpython/translator/c/src/threadlocal.c --- a/rpython/translator/c/src/threadlocal.c +++ b/rpython/translator/c/src/threadlocal.c @@ -85,6 +85,11 @@ return prev->next; } +struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(void) +{ + return &linkedlist_head; +} + static void _RPy_ThreadLocals_Init(void *p) { struct pypy_threadlocal_s *tls = (struct pypy_threadlocal_s *)p; diff --git a/rpython/translator/c/src/threadlocal.h b/rpython/translator/c/src/threadlocal.h --- a/rpython/translator/c/src/threadlocal.h +++ b/rpython/translator/c/src/threadlocal.h @@ -27,6 +27,9 @@ RPY_EXTERN struct pypy_threadlocal_s * _RPython_ThreadLocals_Enum(struct pypy_threadlocal_s *prev); +/* will return the head of the list */ +RPY_EXTERN struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(); + #define OP_THREADLOCALREF_ACQUIRE(r) _RPython_ThreadLocals_Acquire() #define OP_THREADLOCALREF_RELEASE(r) _RPython_ThreadLocals_Release() #define OP_THREADLOCALREF_ENUM(p, r) r = _RPython_ThreadLocals_Enum(p) From pypy.commits at gmail.com Sat Feb 13 02:48:18 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 12 Feb 2016 23:48:18 -0800 (PST) Subject: [pypy-commit] pypy windows-vmprof-support: close to be merged branch Message-ID: <56bedfc2.e853c20a.82641.1bf8@mx.google.com> Author: fijal Branch: windows-vmprof-support Changeset: r82203:ca16af5d2708 Date: 2016-02-13 08:47 +0100 http://bitbucket.org/pypy/pypy/changeset/ca16af5d2708/ Log: close to be merged branch From pypy.commits at gmail.com Sat Feb 13 03:55:14 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 00:55:14 -0800 (PST) Subject: [pypy-commit] cffi release-1.5: hg merge default Message-ID: <56beef72.cb371c0a.4fc4.01ba@mx.google.com> Author: Armin Rigo Branch: release-1.5 Changeset: r2627:1a2d841f7896 Date: 2016-02-13 09:54 +0100 http://bitbucket.org/cffi/cffi/changeset/1a2d841f7896/ Log: hg merge default diff --git a/MANIFEST.in b/MANIFEST.in --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ recursive-include cffi *.py *.h recursive-include c *.c *.h *.asm *.py win64.obj -recursive-include testing *.py +recursive-include testing *.py *.c *.h recursive-include doc *.py *.rst Makefile *.bat -recursive-include demo py.cleanup *.py manual.c +recursive-include demo py.cleanup *.py embedding_test.c manual.c include AUTHORS LICENSE setup.py setup_base.py diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c --- a/c/_cffi_backend.c +++ b/c/_cffi_backend.c @@ -2,7 +2,7 @@ #include #include "structmember.h" -#define CFFI_VERSION "1.5.0" +#define CFFI_VERSION "1.5.1" #ifdef MS_WIN32 #include diff --git a/c/call_python.c b/c/call_python.c --- a/c/call_python.c +++ b/c/call_python.c @@ -115,6 +115,7 @@ static int _update_cache_to_call_python(struct _cffi_externpy_s *externpy) { PyObject *interpstate_dict, *interpstate_key, *infotuple, *old1, *new1; + PyObject *old2; interpstate_dict = _get_interpstate_dict(); if (interpstate_dict == NULL) @@ -127,14 +128,17 @@ infotuple = PyDict_GetItem(interpstate_dict, interpstate_key); Py_DECREF(interpstate_key); if (infotuple == NULL) - return 1; /* no ffi.def_extern() from this subinterpreter */ + return 3; /* no ffi.def_extern() from this subinterpreter */ new1 = PyThreadState_GET()->interp->modules; Py_INCREF(new1); + Py_INCREF(infotuple); old1 = (PyObject *)externpy->reserved1; + old2 = (PyObject *)externpy->reserved2; externpy->reserved1 = new1; /* holds a reference */ - externpy->reserved2 = infotuple; /* doesn't hold a reference */ + externpy->reserved2 = infotuple; /* holds a reference (issue #246) */ Py_XDECREF(old1); + Py_XDECREF(old2); return 0; /* no error */ @@ -213,9 +217,11 @@ gil_release(state); } if (err) { - static const char *msg[2] = { + static const char *msg[] = { "no code was attached to it yet with @ffi.def_extern()", - "got internal exception (out of memory?)" }; + "got internal exception (out of memory / shutdown issue)", + "@ffi.def_extern() was not called in the current subinterpreter", + }; fprintf(stderr, "extern \"Python\": function %s() called, " "but %s. Returning 0.\n", externpy->name, msg[err-1]); memset(args, 0, externpy->size_of_result); diff --git a/c/test_c.py b/c/test_c.py --- a/c/test_c.py +++ b/c/test_c.py @@ -12,7 +12,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/cffi/__init__.py b/cffi/__init__.py --- a/cffi/__init__.py +++ b/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.1" +__version_info__ = (1, 5, 1) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/cffi/_cffi_include.h b/cffi/_cffi_include.h --- a/cffi/_cffi_include.h +++ b/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/cffi/_embedding.h b/cffi/_embedding.h --- a/cffi/_embedding.h +++ b/cffi/_embedding.h @@ -233,7 +233,7 @@ f = PySys_GetObject((char *)"stderr"); if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.5.0" + "\ncompiled with cffi version: 1.5.1" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/cffi/api.py b/cffi/api.py --- a/cffi/api.py +++ b/cffi/api.py @@ -1,4 +1,4 @@ -import sys, types +import sys, sysconfig, types from .lock import allocate_lock try: @@ -550,17 +550,28 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: - if hasattr(sys, 'prefix'): - import os - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - pythonlib = "pypy-c" + if sys.platform == "win32": + # we need 'libpypy-c.lib' (included with recent pypy distrib) + # in addition to the runtime 'libpypy-c.dll' + pythonlib = "libpypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', sys.prefix) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): @@ -697,6 +708,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/cffi/cparser.py b/cffi/cparser.py --- a/cffi/cparser.py +++ b/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/cffi/recompiler.py b/cffi/recompiler.py --- a/cffi/recompiler.py +++ b/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and diff --git a/cffi/vengine_cpy.py b/cffi/vengine_cpy.py --- a/cffi/vengine_cpy.py +++ b/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/cffi/vengine_gen.py b/cffi/vengine_gen.py --- a/cffi/vengine_gen.py +++ b/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/cffi/verifier.py b/cffi/verifier.py --- a/cffi/verifier.py +++ b/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/doc/source/conf.py b/doc/source/conf.py --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -47,7 +47,7 @@ # The short X.Y version. version = '1.5' # The full version, including alpha/beta/rc tags. -release = '1.5.0' +release = '1.5.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/installation.rst b/doc/source/installation.rst --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -51,11 +51,11 @@ Download and Installation: -* http://pypi.python.org/packages/source/c/cffi/cffi-1.5.0.tar.gz +* http://pypi.python.org/packages/source/c/cffi/cffi-1.5.1.tar.gz - - MD5: dec8441e67880494ee881305059af656 + - MD5: ... - - SHA: fd21011ba2a3cab627001b52c69fd7274517e549 + - SHA: ... * Or grab the most current version from the `Bitbucket page`_: ``hg clone https://bitbucket.org/cffi/cffi`` diff --git a/doc/source/using.rst b/doc/source/using.rst --- a/doc/source/using.rst +++ b/doc/source/using.rst @@ -476,16 +476,20 @@ Python function object that is, at runtime, attached with ``@ffi.def_extern()``. -The ``@ffi.def_extern()`` decorator should be applied to a global -function, but *only once.* This is because each function from the cdef with -``extern "Python"`` turns into only one C function. To support some -corner cases, it is possible to redefine the attached Python function -by calling ``@ffi.def_extern()`` again---but this is not recommended! -Better write the single global Python function more flexibly in the -first place. Calling ``@ffi.def_extern()`` again changes the C logic -to call the new Python function; the old Python function is not -callable any more and the C function pointer you get from -``lib.my_function`` is always the same. +The ``@ffi.def_extern()`` decorator should be applied to **global +functions,** one for each ``extern "Python"`` function of the same +name. + +To support some corner cases, it is possible to redefine the attached +Python function by calling ``@ffi.def_extern()`` again for the same +name---but this is not recommended! Better attach a single global +Python function for this name, and write it more flexibly in the first +place. This is because each ``extern "Python"`` function turns into +only one C function. Calling ``@ffi.def_extern()`` again changes this +function's C logic to call the new Python function; the old Python +function is not callable any more. The C function pointer you get +from ``lib.my_function`` is always this C function's address, i.e. it +remains the same. Extern "Python" and ``void *`` arguments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/source/whatsnew.rst b/doc/source/whatsnew.rst --- a/doc/source/whatsnew.rst +++ b/doc/source/whatsnew.rst @@ -3,6 +3,18 @@ ====================== +v1.5.1 +====== + +* A few installation-time tweaks (thanks Stefano!) + +* Issue #245: Win32: ``__stdcall`` was never generated for + ``extern "Python"`` functions + +* Issue #246: trying to be more robust against CPython's fragile + interpreter shutdown logic + + v1.5.0 ====== diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -144,7 +144,7 @@ `Mailing list `_ """, - version='1.5.0', + version='1.5.1', packages=['cffi'] if cpython else [], package_data={'cffi': ['_cffi_include.h', 'parse_c_type.h', '_embedding.h']} diff --git a/testing/cffi0/backend_tests.py b/testing/cffi0/backend_tests.py --- a/testing/cffi0/backend_tests.py +++ b/testing/cffi0/backend_tests.py @@ -1846,3 +1846,8 @@ thread.start_new_thread(f, ()) time.sleep(1.5) assert seen == ['init!', 'init done'] + 6 * [7] + + def test_sizeof_struct_directly(self): + # only works with the Python FFI instances + ffi = FFI(backend=self.Backend()) + assert ffi.sizeof("struct{int a;}") == ffi.sizeof("int") diff --git a/testing/cffi0/test_ffi_backend.py b/testing/cffi0/test_ffi_backend.py --- a/testing/cffi0/test_ffi_backend.py +++ b/testing/cffi0/test_ffi_backend.py @@ -419,3 +419,7 @@ ]: x = ffi.sizeof(name) assert 1 <= x <= 16 + + def test_ffi_def_extern(self): + ffi = FFI() + py.test.raises(ValueError, ffi.def_extern) diff --git a/testing/cffi1/test_recompiler.py b/testing/cffi1/test_recompiler.py --- a/testing/cffi1/test_recompiler.py +++ b/testing/cffi1/test_recompiler.py @@ -1713,3 +1713,33 @@ # a case where 'onerror' is not callable py.test.raises(TypeError, ffi.def_extern(name='bar', onerror=42), lambda x: x) + +def test_extern_python_stdcall(): + ffi = FFI() + ffi.cdef(""" + extern "Python" int __stdcall foo(int); + extern "Python" int WINAPI bar(int); + int (__stdcall * mycb1)(int); + int indirect_call(int); + """) + lib = verify(ffi, 'test_extern_python_stdcall', """ + #ifndef _MSC_VER + # define __stdcall + #endif + static int (__stdcall * mycb1)(int); + static int indirect_call(int x) { + return mycb1(x); + } + """) + # + @ffi.def_extern() + def foo(x): + return x + 42 + @ffi.def_extern() + def bar(x): + return x + 43 + assert lib.foo(100) == 142 + assert lib.bar(100) == 143 + lib.mycb1 = lib.foo + assert lib.mycb1(200) == 242 + assert lib.indirect_call(300) == 342 diff --git a/testing/cffi1/test_zdist.py b/testing/cffi1/test_zdist.py --- a/testing/cffi1/test_zdist.py +++ b/testing/cffi1/test_zdist.py @@ -48,7 +48,8 @@ import setuptools except ImportError: py.test.skip("setuptools not found") - self.run(['setup.py', 'egg_info'], cwd=self.rootdir) + if os.path.exists(os.path.join(self.rootdir, 'setup.py')): + self.run(['setup.py', 'egg_info'], cwd=self.rootdir) TestDist._setuptools_ready = True def check_produced_files(self, content, curdir=None): diff --git a/testing/embedding/test_basic.py b/testing/embedding/test_basic.py --- a/testing/embedding/test_basic.py +++ b/testing/embedding/test_basic.py @@ -4,10 +4,6 @@ from testing.udir import udir import cffi -if hasattr(sys, 'gettotalrefcount'): - py.test.skip("tried hard and failed to have these tests run " - "in a debug-mode python") - local_dir = os.path.dirname(os.path.abspath(__file__)) _link_error = '?' @@ -29,6 +25,14 @@ py.test.skip(str(_link_error)) +def prefix_pythonpath(): + cffi_base = os.path.dirname(os.path.dirname(local_dir)) + pythonpath = os.environ.get('PYTHONPATH', '').split(os.pathsep) + if cffi_base not in pythonpath: + pythonpath.insert(0, cffi_base) + return os.pathsep.join(pythonpath) + + class EmbeddingTests: _compiled_modules = {} @@ -69,8 +73,7 @@ # find a solution to that: we could hack sys.path inside the # script run here, but we can't hack it in the same way in # execute(). - env_extra = {'PYTHONPATH': - os.path.dirname(os.path.dirname(local_dir))} + env_extra = {'PYTHONPATH': prefix_pythonpath()} output = self._run([sys.executable, os.path.join(local_dir, filename)], env_extra=env_extra) match = re.compile(r"\bFILENAME: (.+)").search(output) @@ -114,14 +117,19 @@ def execute(self, name): path = self.get_path() - env_extra = {} - env_extra['PYTHONPATH'] = os.path.dirname(os.path.dirname(local_dir)) - libpath = os.environ.get('LD_LIBRARY_PATH') - if libpath: - libpath = path + ':' + libpath + env_extra = {'PYTHONPATH': prefix_pythonpath()} + if sys.platform == 'win32': + _path = os.environ.get('PATH') + # for libpypy-c.dll or Python27.dll + _path = os.path.split(sys.executable)[0] + ';' + _path + env_extra['PATH'] = _path else: - libpath = path - env_extra['LD_LIBRARY_PATH'] = libpath + libpath = os.environ.get('LD_LIBRARY_PATH') + if libpath: + libpath = path + ':' + libpath + else: + libpath = path + env_extra['LD_LIBRARY_PATH'] = libpath print('running %r in %r' % (name, path)) executable_name = name if sys.platform == 'win32': From pypy.commits at gmail.com Sat Feb 13 04:21:16 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 01:21:16 -0800 (PST) Subject: [pypy-commit] cffi default: Pfff, debug=True goes in the way on Windows if you don't explicitly Message-ID: <56bef58c.2457c20a.460fe.ffff9904@mx.google.com> Author: Armin Rigo Branch: Changeset: r2628:3c3b6f19b5f6 Date: 2016-02-13 10:20 +0100 http://bitbucket.org/cffi/cffi/changeset/3c3b6f19b5f6/ Log: Pfff, debug=True goes in the way on Windows if you don't explicitly have the debug version of the MS CRT installed first diff --git a/testing/embedding/test_basic.py b/testing/embedding/test_basic.py --- a/testing/embedding/test_basic.py +++ b/testing/embedding/test_basic.py @@ -100,6 +100,7 @@ c = distutils.ccompiler.new_compiler() print('compiling %s with %r' % (name, modules)) extra_preargs = [] + debug = True if sys.platform == 'win32': libfiles = [] for m in modules: @@ -108,9 +109,12 @@ libfiles.append('Release\\%s.lib' % m[:-4]) modules = libfiles extra_preargs.append('/MANIFEST') + debug = False # you need to install extra stuff + # for this to work elif threads: extra_preargs.append('-pthread') - objects = c.compile([filename], macros=sorted(defines.items()), debug=True) + objects = c.compile([filename], macros=sorted(defines.items()), + debug=debug) c.link_executable(objects + modules, name, extra_preargs=extra_preargs) finally: os.chdir(curdir) From pypy.commits at gmail.com Sat Feb 13 04:29:38 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 01:29:38 -0800 (PST) Subject: [pypy-commit] cffi default: Mention it's work in progress Message-ID: <56bef782.a3abc20a.90efe.ffff94cf@mx.google.com> Author: Armin Rigo Branch: Changeset: r2629:9fc77fff3e63 Date: 2016-02-13 10:29 +0100 http://bitbucket.org/cffi/cffi/changeset/9fc77fff3e63/ Log: Mention it's work in progress diff --git a/cffi/api.py b/cffi/api.py --- a/cffi/api.py +++ b/cffi/api.py @@ -551,8 +551,10 @@ # if '__pypy__' in sys.builtin_module_names: if sys.platform == "win32": - # we need 'libpypy-c.lib' (included with recent pypy distrib) - # in addition to the runtime 'libpypy-c.dll' + # we need 'libpypy-c.lib'. Right now, distributions of + # pypy contain it as 'include/python27.lib'. You need + # to manually copy it back to 'libpypy-c.lib'. XXX Will + # be fixed in the next pypy release. pythonlib = "libpypy-c" if hasattr(sys, 'prefix'): ensure('library_dirs', sys.prefix) From pypy.commits at gmail.com Sat Feb 13 04:29:39 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 01:29:39 -0800 (PST) Subject: [pypy-commit] cffi release-1.5: hg merge default Message-ID: <56bef783.8e811c0a.4f6a9.0834@mx.google.com> Author: Armin Rigo Branch: release-1.5 Changeset: r2630:e877261953bc Date: 2016-02-13 10:29 +0100 http://bitbucket.org/cffi/cffi/changeset/e877261953bc/ Log: hg merge default diff --git a/cffi/api.py b/cffi/api.py --- a/cffi/api.py +++ b/cffi/api.py @@ -551,8 +551,10 @@ # if '__pypy__' in sys.builtin_module_names: if sys.platform == "win32": - # we need 'libpypy-c.lib' (included with recent pypy distrib) - # in addition to the runtime 'libpypy-c.dll' + # we need 'libpypy-c.lib'. Right now, distributions of + # pypy contain it as 'include/python27.lib'. You need + # to manually copy it back to 'libpypy-c.lib'. XXX Will + # be fixed in the next pypy release. pythonlib = "libpypy-c" if hasattr(sys, 'prefix'): ensure('library_dirs', sys.prefix) diff --git a/testing/embedding/test_basic.py b/testing/embedding/test_basic.py --- a/testing/embedding/test_basic.py +++ b/testing/embedding/test_basic.py @@ -100,6 +100,7 @@ c = distutils.ccompiler.new_compiler() print('compiling %s with %r' % (name, modules)) extra_preargs = [] + debug = True if sys.platform == 'win32': libfiles = [] for m in modules: @@ -108,9 +109,12 @@ libfiles.append('Release\\%s.lib' % m[:-4]) modules = libfiles extra_preargs.append('/MANIFEST') + debug = False # you need to install extra stuff + # for this to work elif threads: extra_preargs.append('-pthread') - objects = c.compile([filename], macros=sorted(defines.items()), debug=True) + objects = c.compile([filename], macros=sorted(defines.items()), + debug=debug) c.link_executable(objects + modules, name, extra_preargs=extra_preargs) finally: os.chdir(curdir) From pypy.commits at gmail.com Sat Feb 13 04:32:31 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 01:32:31 -0800 (PST) Subject: [pypy-commit] cffi release-1.5: md5/sha1 Message-ID: <56bef82f.42711c0a.9b8b2.0d55@mx.google.com> Author: Armin Rigo Branch: release-1.5 Changeset: r2631:2d6f6fc88726 Date: 2016-02-13 10:31 +0100 http://bitbucket.org/cffi/cffi/changeset/2d6f6fc88726/ Log: md5/sha1 diff --git a/doc/source/installation.rst b/doc/source/installation.rst --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -53,9 +53,9 @@ * http://pypi.python.org/packages/source/c/cffi/cffi-1.5.1.tar.gz - - MD5: ... + - MD5: ac9a3c7724bd7a4f3c0d4f6ffef2c6c7 - - SHA: ... + - SHA: 933f7ea6f48ebbaf4794a1a807ae6f5ec232c83b * Or grab the most current version from the `Bitbucket page`_: ``hg clone https://bitbucket.org/cffi/cffi`` From pypy.commits at gmail.com Sat Feb 13 04:32:33 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 01:32:33 -0800 (PST) Subject: [pypy-commit] cffi default: hg merge release-1.5 Message-ID: <56bef831.c13fc20a.8e7a6.ffff9692@mx.google.com> Author: Armin Rigo Branch: Changeset: r2632:3ea5159b897c Date: 2016-02-13 10:31 +0100 http://bitbucket.org/cffi/cffi/changeset/3ea5159b897c/ Log: hg merge release-1.5 diff --git a/doc/source/installation.rst b/doc/source/installation.rst --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -53,9 +53,9 @@ * http://pypi.python.org/packages/source/c/cffi/cffi-1.5.1.tar.gz - - MD5: ... + - MD5: ac9a3c7724bd7a4f3c0d4f6ffef2c6c7 - - SHA: ... + - SHA: 933f7ea6f48ebbaf4794a1a807ae6f5ec232c83b * Or grab the most current version from the `Bitbucket page`_: ``hg clone https://bitbucket.org/cffi/cffi`` From pypy.commits at gmail.com Sat Feb 13 07:31:35 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 13 Feb 2016 04:31:35 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg backout fce240f22548 Message-ID: <56bf2227.c3e01c0a.6c77a.4218@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82205:fc48d0bd0c1d Date: 2016-02-13 13:30 +0100 http://bitbucket.org/pypy/pypy/changeset/fc48d0bd0c1d/ Log: hg backout fce240f22548 diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -66,10 +66,7 @@ # overwrite the value of a key already in w_initialdict. (So as to avoid # overriding the builtin value with a user-provided value) if not self.space.is_none(self.w_initialdict): - w_dict = self.w_dict - from pypy.objspace.std.dictmultiobject import W_DictMultiObject - assert isinstance(w_dict, W_DictMultiObject) - new_items = w_dict.iteritems() + new_items = self.w_dict.iteritems() while True: w_key, w_value = new_items.next_item() if w_key is None: From pypy.commits at gmail.com Sat Feb 13 07:33:25 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 13 Feb 2016 04:33:25 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Merged in marky1991/pypy_new/py3.3 (pull request #398) Message-ID: <56bf2295.8abb1c0a.6e7d9.4141@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82207:d31526af1a0e Date: 2016-02-13 13:32 +0100 http://bitbucket.org/pypy/pypy/changeset/d31526af1a0e/ Log: Merged in marky1991/pypy_new/py3.3 (pull request #398) Fix translation for 3.3. diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -66,11 +66,9 @@ # overwrite the value of a key already in w_initialdict. (So as to avoid # overriding the builtin value with a user-provided value) if not self.space.is_none(self.w_initialdict): - new_items = self.w_dict.iteritems() - while True: - w_key, w_value = new_items.next_item() - if w_key is None: - break + w_items = self.space.iteriterable(self.space.call_method(self.w_dict,'items')) + for w_item in w_items: + w_key, w_value = self.space.fixedview(w_item, expected_length=2) if not self.space.is_true(self.space.contains(self.w_initialdict, w_key)): self.space.setitem(self.w_initialdict, w_key, w_value) else: From pypy.commits at gmail.com Sat Feb 13 07:33:38 2016 From: pypy.commits at gmail.com (marky1991) Date: Sat, 13 Feb 2016 04:33:38 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix translation. Message-ID: <56bf22a2.c96cc20a.7f53b.ffffcfd9@mx.google.com> Author: Mark Young Branch: py3.3 Changeset: r82206:00c150ed026d Date: 2016-02-13 00:20 -0500 http://bitbucket.org/pypy/pypy/changeset/00c150ed026d/ Log: Fix translation. diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -66,11 +66,9 @@ # overwrite the value of a key already in w_initialdict. (So as to avoid # overriding the builtin value with a user-provided value) if not self.space.is_none(self.w_initialdict): - new_items = self.w_dict.iteritems() - while True: - w_key, w_value = new_items.next_item() - if w_key is None: - break + w_items = self.space.iteriterable(self.space.call_method(self.w_dict,'items')) + for w_item in w_items: + w_key, w_value = self.space.fixedview(w_item, expected_length=2) if not self.space.is_true(self.space.contains(self.w_initialdict, w_key)): self.space.setitem(self.w_initialdict, w_key, w_value) else: From pypy.commits at gmail.com Sat Feb 13 09:14:06 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 13 Feb 2016 06:14:06 -0800 (PST) Subject: [pypy-commit] pypy default: Make W_InterpIterable a interp-level-only object instead of letting it implement W_Root. Message-ID: <56bf3a2e.c3e01c0a.6c77a.619a@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82208:436b9109cbd7 Date: 2016-02-13 14:30 +0100 http://bitbucket.org/pypy/pypy/changeset/436b9109cbd7/ Log: Make W_InterpIterable a interp-level-only object instead of letting it implement W_Root. diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -306,7 +306,7 @@ return None -class W_InterpIterable(W_Root): +class InterpIterable(object): def __init__(self, space, w_iterable): self.w_iter = space.iter(w_iterable) self.space = space @@ -846,7 +846,7 @@ return lst_w[:] # make the resulting list resizable def iteriterable(self, w_iterable): - return W_InterpIterable(self, w_iterable) + return InterpIterable(self, w_iterable) def _unpackiterable_unknown_length(self, w_iterator, w_iterable): """Unpack an iterable of unknown length into an interp-level From pypy.commits at gmail.com Sat Feb 13 09:14:10 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 13 Feb 2016 06:14:10 -0800 (PST) Subject: [pypy-commit] pypy default: Pass iterable instead of iterator to space.iteriterable() (both works). Message-ID: <56bf3a32.077bc20a.f4074.ffffecfe@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82210:5c06f73f7e1f Date: 2016-02-13 14:43 +0100 http://bitbucket.org/pypy/pypy/changeset/5c06f73f7e1f/ Log: Pass iterable instead of iterator to space.iteriterable() (both works). diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -1419,8 +1419,7 @@ return space.len(self.w_dict) def _all_contained_in(space, w_dictview, w_other): - w_iter = space.iter(w_dictview) - for w_item in space.iteriterable(w_iter): + for w_item in space.iteriterable(w_dictview): if not space.contains_w(w_other, w_item): return space.w_False return space.w_True From pypy.commits at gmail.com Sat Feb 13 09:14:11 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 13 Feb 2016 06:14:11 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default Message-ID: <56bf3a33.ca56c20a.75c8d.ffffeb77@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82211:837af91c362b Date: 2016-02-13 15:01 +0100 http://bitbucket.org/pypy/pypy/changeset/837af91c362b/ Log: hg merge default diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -37,13 +37,13 @@ "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "_continuation", "_cffi_backend", - "_csv", "_pypyjson", "_posixsubprocess", # "cppyy", "micronumpy" + "_csv", "_pypyjson", "_vmprof", "_posixsubprocess", # "cppyy", "micronumpy" ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): +#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') +# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): # it's not enough that we get x86_64 - working_modules.add('_vmprof') +# working_modules.add('_vmprof') translation_modules = default_modules.copy() translation_modules.update([ diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -313,7 +313,7 @@ return None -class W_InterpIterable(W_Root): +class InterpIterable(object): def __init__(self, space, w_iterable): self.w_iter = space.iter(w_iterable) self.space = space @@ -763,9 +763,13 @@ return self.int_w(self.hash(w_obj)) def len_w(self, w_obj): - """shotcut for space.int_w(space.len(w_obj))""" + """shortcut for space.int_w(space.len(w_obj))""" return self.int_w(self.len(w_obj)) + def contains_w(self, w_container, w_item): + """shortcut for space.is_true(space.contains(w_container, w_item))""" + return self.is_true(self.contains(w_container, w_item)) + def setitem_str(self, w_obj, key, w_value): return self.setitem(w_obj, self.wrap(key), w_value) @@ -868,7 +872,7 @@ return lst_w[:] # make the resulting list resizable def iteriterable(self, w_iterable): - return W_InterpIterable(self, w_iterable) + return InterpIterable(self, w_iterable) def _unpackiterable_unknown_length(self, w_iterator, w_iterable): """Unpack an iterable of unknown length into an interp-level @@ -1229,7 +1233,7 @@ if not isinstance(statement, PyCode): raise TypeError('space.exec_(): expected a string, code or PyCode object') w_key = self.wrap('__builtins__') - if not self.is_true(self.contains(w_globals, w_key)): + if not self.contains_w(w_globals, w_key): self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) diff --git a/pypy/module/_demo/test/test_import.py b/pypy/module/_demo/test/test_import.py --- a/pypy/module/_demo/test/test_import.py +++ b/pypy/module/_demo/test/test_import.py @@ -12,8 +12,7 @@ w_modules = space.sys.get('modules') assert _demo.Module.demo_events == ['setup'] - assert not space.is_true(space.contains(w_modules, - space.wrap('_demo'))) + assert not space.contains_w(w_modules, space.wrap('_demo')) # first import w_import = space.builtin.get('__import__') diff --git a/pypy/module/_vmprof/interp_vmprof.py b/pypy/module/_vmprof/interp_vmprof.py --- a/pypy/module/_vmprof/interp_vmprof.py +++ b/pypy/module/_vmprof/interp_vmprof.py @@ -60,7 +60,7 @@ Must be smaller than 1.0 """ w_modules = space.sys.get('modules') - if space.is_true(space.contains(w_modules, space.wrap('_continuation'))): + if space.contains_w(w_modules, space.wrap('_continuation')): space.warn(space.wrap("Using _continuation/greenlet/stacklet together " "with vmprof will crash"), space.w_RuntimeWarning) diff --git a/pypy/module/cpyext/test/test_dictobject.py b/pypy/module/cpyext/test/test_dictobject.py --- a/pypy/module/cpyext/test/test_dictobject.py +++ b/pypy/module/cpyext/test/test_dictobject.py @@ -150,7 +150,7 @@ def test_dictproxy(self, space, api): w_dict = space.sys.get('modules') w_proxy = api.PyDictProxy_New(w_dict) - assert space.is_true(space.contains(w_proxy, space.wrap('sys'))) + assert space.contains_w(w_proxy, space.wrap('sys')) raises(OperationError, space.setitem, w_proxy, space.wrap('sys'), space.w_None) raises(OperationError, space.delitem, diff --git a/pypy/module/cpyext/test/test_import.py b/pypy/module/cpyext/test/test_import.py --- a/pypy/module/cpyext/test/test_import.py +++ b/pypy/module/cpyext/test/test_import.py @@ -21,7 +21,7 @@ def test_getmoduledict(self, space, api): testmod = "contextlib" w_pre_dict = api.PyImport_GetModuleDict() - assert not space.is_true(space.contains(w_pre_dict, space.wrap(testmod))) + assert not space.contains_w(w_pre_dict, space.wrap(testmod)) with rffi.scoped_str2charp(testmod) as modname: w_module = api.PyImport_ImportModule(modname) @@ -29,7 +29,7 @@ assert w_module w_dict = api.PyImport_GetModuleDict() - assert space.is_true(space.contains(w_dict, space.wrap(testmod))) + assert space.contains_w(w_dict, space.wrap(testmod)) def test_reload(self, space, api): stat = api.PyImport_Import(space.wrap("stat")) diff --git a/pypy/module/cpyext/test/test_object.py b/pypy/module/cpyext/test/test_object.py --- a/pypy/module/cpyext/test/test_object.py +++ b/pypy/module/cpyext/test/test_object.py @@ -179,7 +179,7 @@ def test_dir(self, space, api): w_dir = api.PyObject_Dir(space.sys) assert space.isinstance_w(w_dir, space.w_list) - assert space.is_true(space.contains(w_dir, space.wrap('modules'))) + assert space.contains_w(w_dir, space.wrap('modules')) class AppTestObject(AppTestCpythonExtensionBase): def setup_class(cls): diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -1339,9 +1339,8 @@ return space.len(self.w_dict) def _all_contained_in(space, w_dictview, w_other): - w_iter = space.iter(w_dictview) - for w_item in space.iteriterable(w_iter): - if not space.is_true(space.contains(w_other, w_item)): + for w_item in space.iteriterable(w_dictview): + if not space.contains_w(w_other, w_item): return space.w_False return space.w_True diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -3,16 +3,13 @@ from rpython.jit.backend.test.support import CCompiledMixin from rpython.rlib.jit import JitDriver from rpython.tool.udir import udir +from rpython.rlib import rthread from rpython.translator.translator import TranslationContext from rpython.jit.backend.detect_cpu import getcpuclass class CompiledVmprofTest(CCompiledMixin): CPUClass = getcpuclass() - def setup(self): - if self.CPUClass.backend_name != 'x86_64': - py.test.skip("vmprof only supports x86-64 CPUs at the moment") - def _get_TranslationContext(self): t = TranslationContext() t.config.translation.gc = 'incminimark' @@ -62,6 +59,7 @@ tmpfilename = str(udir.join('test_rvmprof')) def f(num): + rthread.get_ident() # register TLOFS_thread_ident code = MyCode("py:x:foo:3") rvmprof.register_code(code, get_name) fd = os.open(tmpfilename, os.O_WRONLY | os.O_CREAT, 0666) diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -30,11 +30,11 @@ def setup(): + compile_extra = ['-DRPYTHON_LL2CTYPES'] platform.verify_eci(ExternalCompilationInfo( - compile_extra=['-DRPYTHON_LL2CTYPES'], + compile_extra=compile_extra, **eci_kwds)) - eci = global_eci vmprof_init = rffi.llexternal("vmprof_init", [rffi.INT, rffi.DOUBLE, rffi.CCHARP], diff --git a/rpython/rlib/rvmprof/src/rvmprof.c b/rpython/rlib/rvmprof/src/rvmprof.c --- a/rpython/rlib/rvmprof/src/rvmprof.c +++ b/rpython/rlib/rvmprof/src/rvmprof.c @@ -1,23 +1,21 @@ #define _GNU_SOURCE 1 - #ifdef RPYTHON_LL2CTYPES /* only for testing: ll2ctypes sets RPY_EXTERN from the command-line */ -# ifndef RPY_EXTERN -# define RPY_EXTERN RPY_EXPORTED -# endif -# define RPY_EXPORTED extern __attribute__((visibility("default"))) -# define VMPROF_ADDR_OF_TRAMPOLINE(addr) 0 +#ifndef RPY_EXTERN +#define RPY_EXTERN RPY_EXPORTED +#endif +#ifdef _WIN32 +#define RPY_EXPORTED __declspec(dllexport) +#else +#define RPY_EXPORTED extern __attribute__((visibility("default"))) +#endif #else - # include "common_header.h" # include "structdef.h" # include "src/threadlocal.h" # include "rvmprof.h" -/*# ifndef VMPROF_ADDR_OF_TRAMPOLINE -# error "RPython program using rvmprof, but not calling vmprof_execute_code()" -# endif*/ #endif diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -7,9 +7,6 @@ static long profile_interval_usec = 0; static int opened_profile(char *interp_name); -#define MAX_STACK_DEPTH \ - ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) - #define MARKER_STACKTRACE '\x01' #define MARKER_VIRTUAL_IP '\x02' #define MARKER_TRAILER '\x03' @@ -20,6 +17,9 @@ #define VERSION_THREAD_ID '\x01' #define VERSION_TAG '\x02' +#define MAX_STACK_DEPTH \ + ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) + typedef struct prof_stacktrace_s { char padding[sizeof(long) - 1]; char marker; @@ -71,6 +71,43 @@ return _write_all((char*)&header, 5 * sizeof(long) + 4 + namelen); } +/* ************************************************************* + * functions to dump the stack trace + * ************************************************************* + */ + + +static int get_stack_trace(vmprof_stack_t* stack, intptr_t *result, int max_depth, intptr_t pc) +{ + int n = 0; + intptr_t addr = 0; + int bottom_jitted = 0; + // check if the pc is in JIT +#ifdef PYPY_JIT_CODEMAP + if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { + // the bottom part is jitted, means we can fill up the first part + // from the JIT + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + stack = stack->next; // skip the first item as it contains garbage + } +#endif + while (n < max_depth - 1 && stack) { + if (stack->kind == VMPROF_CODE_TAG) { + result[n] = stack->kind; + result[n + 1] = stack->value; + n += 2; + } +#ifdef PYPY_JIT_CODEMAP + else if (stack->kind == VMPROF_JITTED_TAG) { + pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + } +#endif + stack = stack->next; + } + return n; +} + #ifndef RPYTHON_LL2CTYPES static vmprof_stack_t *get_vmprof_stack(void) { diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -35,6 +35,7 @@ #include "vmprof_stack.h" #include "vmprof_getpc.h" #include "vmprof_mt.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" /************************************************************/ @@ -78,46 +79,6 @@ static char atfork_hook_installed = 0; -#include "vmprof_get_custom_offset.h" - -/* ************************************************************* - * functions to dump the stack trace - * ************************************************************* - */ - - -static int get_stack_trace(intptr_t *result, int max_depth, intptr_t pc, ucontext_t *ucontext) -{ - vmprof_stack_t* stack = get_vmprof_stack(); - int n = 0; - intptr_t addr = 0; - int bottom_jitted = 0; - // check if the pc is in JIT -#ifdef PYPY_JIT_CODEMAP - if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { - // the bottom part is jitted, means we can fill up the first part - // from the JIT - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - stack = stack->next; // skip the first item as it contains garbage - } -#endif - while (n < max_depth - 1 && stack) { - if (stack->kind == VMPROF_CODE_TAG) { - result[n] = stack->kind; - result[n + 1] = stack->value; - n += 2; - } -#ifdef PYPY_JIT_CODEMAP - else if (stack->kind == VMPROF_JITTED_TAG) { - pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - } -#endif - stack = stack->next; - } - return n; -} - static intptr_t get_current_thread_id(void) { /* xxx This function is a hack on two fronts: @@ -194,8 +155,8 @@ struct prof_stacktrace_s *st = (struct prof_stacktrace_s *)p->data; st->marker = MARKER_STACKTRACE; st->count = 1; - depth = get_stack_trace(st->stack, - MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext), ucontext); + depth = get_stack_trace(get_vmprof_stack(), st->stack, + MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext)); st->depth = depth; st->stack[depth++] = get_current_thread_id(); p->data_offset = offsetof(struct prof_stacktrace_s, marker); diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -10,13 +10,30 @@ return 0; } +#if defined(_MSC_VER) +#include +typedef SSIZE_T ssize_t; +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include #include "vmprof_stack.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" #include // This file has been inspired (but not copied from since the LICENSE // would not allow it) from verysleepy profiler +#define SINGLE_BUF_SIZE 8192 + volatile int thread_started = 0; volatile int enabled = 0; @@ -55,52 +72,75 @@ return 0; } -int vmprof_snapshot_thread(DWORD thread_id, PyThreadState *tstate, prof_stacktrace_s *stack) +int vmprof_snapshot_thread(struct pypy_threadlocal_s *p, prof_stacktrace_s *stack) { - HRESULT result; - HANDLE hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, thread_id); - int depth; + void *addr; + vmprof_stack_t *cur; + long tid; + HANDLE hThread; + long depth; + DWORD result; + CONTEXT ctx; + +#ifdef RPYTHON_LL2CTYPES + return 0; // not much we can do +#else +#ifndef RPY_TLOFS_thread_ident + return 0; // we can't freeze threads, unsafe +#else + hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, p->thread_ident); if (!hThread) { return -1; } result = SuspendThread(hThread); if(result == 0xffffffff) return -1; // possible, e.g. attached debugger or thread alread suspended - // find the correct thread - depth = read_trace_from_cpy_frame(tstate->frame, stack->stack, - MAX_STACK_DEPTH); + ctx.ContextFlags = CONTEXT_FULL; + if (!GetThreadContext(hThread, &ctx)) + return -1; + depth = get_stack_trace(p->vmprof_tl_stack, + stack->stack, MAX_STACK_DEPTH-2, ctx.Eip); stack->depth = depth; - stack->stack[depth++] = (void*)thread_id; + stack->stack[depth++] = (void*)p->thread_ident; stack->count = 1; stack->marker = MARKER_STACKTRACE; ResumeThread(hThread); return depth; +#endif +#endif } long __stdcall vmprof_mainloop(void *arg) { +#ifndef RPYTHON_LL2CTYPES + struct pypy_threadlocal_s *p; prof_stacktrace_s *stack = (prof_stacktrace_s*)malloc(SINGLE_BUF_SIZE); - HANDLE hThreadSnap = INVALID_HANDLE_VALUE; int depth; - PyThreadState *tstate; while (1) { - Sleep(profile_interval_usec * 1000); + //Sleep(profile_interval_usec * 1000); + Sleep(10); if (!enabled) { continue; } - tstate = PyInterpreterState_Head()->tstate_head; - while (tstate) { - depth = vmprof_snapshot_thread(tstate->thread_id, tstate, stack); - if (depth > 0) { - _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), - depth * sizeof(void *) + - sizeof(struct prof_stacktrace_s) - - offsetof(struct prof_stacktrace_s, marker)); + _RPython_ThreadLocals_Acquire(); + p = _RPython_ThreadLocals_Head(); // the first one is one behind head + p = _RPython_ThreadLocals_Enum(p); + while (p) { + if (p->ready == 42) { + depth = vmprof_snapshot_thread(p, stack); + if (depth > 0) { + _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), + depth * sizeof(void *) + + sizeof(struct prof_stacktrace_s) - + offsetof(struct prof_stacktrace_s, marker)); + } } - tstate = tstate->next; + p = _RPython_ThreadLocals_Enum(p); } + _RPython_ThreadLocals_Release(); } +#endif } RPY_EXTERN diff --git a/rpython/rlib/rvmprof/src/vmprof_stack.h b/rpython/rlib/rvmprof/src/vmprof_stack.h --- a/rpython/rlib/rvmprof/src/vmprof_stack.h +++ b/rpython/rlib/rvmprof/src/vmprof_stack.h @@ -1,7 +1,11 @@ #ifndef _VMPROF_STACK_H_ #define _VMPROF_STACK_H_ +#ifdef _WIN32 +#define intptr_t long // XXX windows VC++ 2008 lacks stdint.h +#else #include +#endif #define VMPROF_CODE_TAG 1 /* <- also in cintf.py */ #define VMPROF_BLACKHOLE_TAG 2 diff --git a/rpython/rlib/rvmprof/test/test_ztranslation.py b/rpython/rlib/rvmprof/test/test_ztranslation.py --- a/rpython/rlib/rvmprof/test/test_ztranslation.py +++ b/rpython/rlib/rvmprof/test/test_ztranslation.py @@ -3,11 +3,10 @@ sys.path += ['../../../..'] # for subprocess in test_interpreted import py from rpython.tool.udir import udir -from rpython.rlib import rvmprof +from rpython.rlib import rvmprof, rthread from rpython.translator.c.test.test_genc import compile from rpython.rlib.nonconst import NonConstant - class MyCode: def __init__(self, count): self.count = count @@ -39,6 +38,7 @@ PROF_FILE = str(udir.join('test_ztranslation.prof')) def main(argv=[]): + rthread.get_ident() # force TLOFS_thread_ident if NonConstant(False): # Hack to give os.open() the correct annotation os.open('foo', 1, 1) diff --git a/rpython/translator/c/src/threadlocal.c b/rpython/translator/c/src/threadlocal.c --- a/rpython/translator/c/src/threadlocal.c +++ b/rpython/translator/c/src/threadlocal.c @@ -85,6 +85,11 @@ return prev->next; } +struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(void) +{ + return &linkedlist_head; +} + static void _RPy_ThreadLocals_Init(void *p) { struct pypy_threadlocal_s *tls = (struct pypy_threadlocal_s *)p; diff --git a/rpython/translator/c/src/threadlocal.h b/rpython/translator/c/src/threadlocal.h --- a/rpython/translator/c/src/threadlocal.h +++ b/rpython/translator/c/src/threadlocal.h @@ -27,6 +27,9 @@ RPY_EXTERN struct pypy_threadlocal_s * _RPython_ThreadLocals_Enum(struct pypy_threadlocal_s *prev); +/* will return the head of the list */ +RPY_EXTERN struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(); + #define OP_THREADLOCALREF_ACQUIRE(r) _RPython_ThreadLocals_Acquire() #define OP_THREADLOCALREF_RELEASE(r) _RPython_ThreadLocals_Release() #define OP_THREADLOCALREF_ENUM(p, r) r = _RPython_ThreadLocals_Enum(p) From pypy.commits at gmail.com Sat Feb 13 09:14:08 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 13 Feb 2016 06:14:08 -0800 (PST) Subject: [pypy-commit] pypy default: Introduce space.contains_w(w_container, w_item) as a shortcut for space.is_true(space.contains(w_container, w_item)). Message-ID: <56bf3a30.44e01c0a.f7760.5d09@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82209:bc162d5aa423 Date: 2016-02-13 14:40 +0100 http://bitbucket.org/pypy/pypy/changeset/bc162d5aa423/ Log: Introduce space.contains_w(w_container, w_item) as a shortcut for space.is_true(space.contains(w_container, w_item)). diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -745,9 +745,13 @@ return self.int_w(self.hash(w_obj)) def len_w(self, w_obj): - """shotcut for space.int_w(space.len(w_obj))""" + """shortcut for space.int_w(space.len(w_obj))""" return self.int_w(self.len(w_obj)) + def contains_w(self, w_container, w_item): + """shortcut for space.is_true(space.contains(w_container, w_item))""" + return self.is_true(self.contains(w_container, w_item)) + def setitem_str(self, w_obj, key, w_value): return self.setitem(w_obj, self.wrap(key), w_value) @@ -1237,7 +1241,7 @@ if not isinstance(statement, PyCode): raise TypeError('space.exec_(): expected a string, code or PyCode object') w_key = self.wrap('__builtins__') - if not self.is_true(self.contains(w_globals, w_key)): + if not self.contains_w(w_globals, w_key): self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) diff --git a/pypy/module/__builtin__/interp_classobj.py b/pypy/module/__builtin__/interp_classobj.py --- a/pypy/module/__builtin__/interp_classobj.py +++ b/pypy/module/__builtin__/interp_classobj.py @@ -20,7 +20,7 @@ if not space.isinstance_w(w_dict, space.w_dict): raise_type_err(space, 'bases', 'tuple', w_bases) - if not space.is_true(space.contains(w_dict, space.wrap("__doc__"))): + if not space.contains_w(w_dict, space.wrap("__doc__")): space.setitem(w_dict, space.wrap("__doc__"), space.w_None) # XXX missing: lengthy and obscure logic about "__module__" diff --git a/pypy/module/_demo/test/test_import.py b/pypy/module/_demo/test/test_import.py --- a/pypy/module/_demo/test/test_import.py +++ b/pypy/module/_demo/test/test_import.py @@ -12,8 +12,7 @@ w_modules = space.sys.get('modules') assert _demo.Module.demo_events == ['setup'] - assert not space.is_true(space.contains(w_modules, - space.wrap('_demo'))) + assert not space.contains_w(w_modules, space.wrap('_demo')) # first import w_import = space.builtin.get('__import__') diff --git a/pypy/module/_vmprof/interp_vmprof.py b/pypy/module/_vmprof/interp_vmprof.py --- a/pypy/module/_vmprof/interp_vmprof.py +++ b/pypy/module/_vmprof/interp_vmprof.py @@ -60,7 +60,7 @@ Must be smaller than 1.0 """ w_modules = space.sys.get('modules') - if space.is_true(space.contains(w_modules, space.wrap('_continuation'))): + if space.contains_w(w_modules, space.wrap('_continuation')): space.warn(space.wrap("Using _continuation/greenlet/stacklet together " "with vmprof will crash"), space.w_RuntimeWarning) diff --git a/pypy/module/cpyext/test/test_dictobject.py b/pypy/module/cpyext/test/test_dictobject.py --- a/pypy/module/cpyext/test/test_dictobject.py +++ b/pypy/module/cpyext/test/test_dictobject.py @@ -146,7 +146,7 @@ def test_dictproxy(self, space, api): w_dict = space.sys.get('modules') w_proxy = api.PyDictProxy_New(w_dict) - assert space.is_true(space.contains(w_proxy, space.wrap('sys'))) + assert space.contains_w(w_proxy, space.wrap('sys')) raises(OperationError, space.setitem, w_proxy, space.wrap('sys'), space.w_None) raises(OperationError, space.delitem, diff --git a/pypy/module/cpyext/test/test_import.py b/pypy/module/cpyext/test/test_import.py --- a/pypy/module/cpyext/test/test_import.py +++ b/pypy/module/cpyext/test/test_import.py @@ -21,7 +21,7 @@ def test_getmoduledict(self, space, api): testmod = "_functools" w_pre_dict = api.PyImport_GetModuleDict() - assert not space.is_true(space.contains(w_pre_dict, space.wrap(testmod))) + assert not space.contains_w(w_pre_dict, space.wrap(testmod)) with rffi.scoped_str2charp(testmod) as modname: w_module = api.PyImport_ImportModule(modname) @@ -29,7 +29,7 @@ assert w_module w_dict = api.PyImport_GetModuleDict() - assert space.is_true(space.contains(w_dict, space.wrap(testmod))) + assert space.contains_w(w_dict, space.wrap(testmod)) def test_reload(self, space, api): stat = api.PyImport_Import(space.wrap("stat")) diff --git a/pypy/module/cpyext/test/test_object.py b/pypy/module/cpyext/test/test_object.py --- a/pypy/module/cpyext/test/test_object.py +++ b/pypy/module/cpyext/test/test_object.py @@ -202,7 +202,7 @@ def test_dir(self, space, api): w_dir = api.PyObject_Dir(space.sys) assert space.isinstance_w(w_dir, space.w_list) - assert space.is_true(space.contains(w_dir, space.wrap('modules'))) + assert space.contains_w(w_dir, space.wrap('modules')) class AppTestObject(AppTestCpythonExtensionBase): def setup_class(cls): diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -1421,7 +1421,7 @@ def _all_contained_in(space, w_dictview, w_other): w_iter = space.iter(w_dictview) for w_item in space.iteriterable(w_iter): - if not space.is_true(space.contains(w_other, w_item)): + if not space.contains_w(w_other, w_item): return space.w_False return space.w_True From pypy.commits at gmail.com Sat Feb 13 09:14:13 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 13 Feb 2016 06:14:13 -0800 (PST) Subject: [pypy-commit] pypy py3k: Simplify code. Message-ID: <56bf3a35.45631c0a.872c6.63e7@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82212:8e9369bbc962 Date: 2016-02-13 15:04 +0100 http://bitbucket.org/pypy/pypy/changeset/8e9369bbc962/ Log: Simplify code. diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -1405,9 +1405,8 @@ if len_other > len_self: self, w_other = w_other, self - w_iter = space.iter(w_other) - for w_item in space.iteriterable(w_iter): - if space.is_true(space.contains(self, w_item)): + for w_item in space.iteriterable(w_other): + if space.contains_w(self, w_item): return space.w_False return space.w_True From pypy.commits at gmail.com Sat Feb 13 09:14:15 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 13 Feb 2016 06:14:15 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56bf3a37.c96cc20a.7f53b.ffffeddf@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82213:ee3f3a22ee31 Date: 2016-02-13 15:06 +0100 http://bitbucket.org/pypy/pypy/changeset/ee3f3a22ee31/ Log: hg merge py3k diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -37,14 +37,14 @@ "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "_continuation", "_cffi_backend", - "_csv", "_pypyjson", "_posixsubprocess", # "cppyy", "micronumpy" + "_csv", "_pypyjson", "_vmprof", "_posixsubprocess", # "cppyy", "micronumpy" "faulthandler", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): +#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') +# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): # it's not enough that we get x86_64 - working_modules.add('_vmprof') +# working_modules.add('_vmprof') translation_modules = default_modules.copy() translation_modules.update([ diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -313,7 +313,7 @@ return None -class W_InterpIterable(W_Root): +class InterpIterable(object): def __init__(self, space, w_iterable): self.w_iter = space.iter(w_iterable) self.space = space @@ -765,9 +765,13 @@ return self.int_w(self.hash(w_obj)) def len_w(self, w_obj): - """shotcut for space.int_w(space.len(w_obj))""" + """shortcut for space.int_w(space.len(w_obj))""" return self.int_w(self.len(w_obj)) + def contains_w(self, w_container, w_item): + """shortcut for space.is_true(space.contains(w_container, w_item))""" + return self.is_true(self.contains(w_container, w_item)) + def setitem_str(self, w_obj, key, w_value): return self.setitem(w_obj, self.wrap(key), w_value) @@ -870,7 +874,7 @@ return lst_w[:] # make the resulting list resizable def iteriterable(self, w_iterable): - return W_InterpIterable(self, w_iterable) + return InterpIterable(self, w_iterable) def _unpackiterable_unknown_length(self, w_iterator, w_iterable): """Unpack an iterable of unknown length into an interp-level @@ -1240,7 +1244,7 @@ if not isinstance(statement, PyCode): raise TypeError('space.exec_(): expected a string, code or PyCode object') w_key = self.wrap('__builtins__') - if not self.is_true(self.contains(w_globals, w_key)): + if not self.contains_w(w_globals, w_key): self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) diff --git a/pypy/module/_demo/test/test_import.py b/pypy/module/_demo/test/test_import.py --- a/pypy/module/_demo/test/test_import.py +++ b/pypy/module/_demo/test/test_import.py @@ -12,8 +12,7 @@ w_modules = space.sys.get('modules') assert _demo.Module.demo_events == ['setup'] - assert not space.is_true(space.contains(w_modules, - space.wrap('_demo'))) + assert not space.contains_w(w_modules, space.wrap('_demo')) # first import w_import = space.builtin.get('__import__') diff --git a/pypy/module/_vmprof/interp_vmprof.py b/pypy/module/_vmprof/interp_vmprof.py --- a/pypy/module/_vmprof/interp_vmprof.py +++ b/pypy/module/_vmprof/interp_vmprof.py @@ -60,7 +60,7 @@ Must be smaller than 1.0 """ w_modules = space.sys.get('modules') - if space.is_true(space.contains(w_modules, space.wrap('_continuation'))): + if space.contains_w(w_modules, space.wrap('_continuation')): space.warn(space.wrap("Using _continuation/greenlet/stacklet together " "with vmprof will crash"), space.w_RuntimeWarning) diff --git a/pypy/module/cpyext/test/test_dictobject.py b/pypy/module/cpyext/test/test_dictobject.py --- a/pypy/module/cpyext/test/test_dictobject.py +++ b/pypy/module/cpyext/test/test_dictobject.py @@ -150,7 +150,7 @@ def test_dictproxy(self, space, api): w_dict = space.sys.get('modules') w_proxy = api.PyDictProxy_New(w_dict) - assert space.is_true(space.contains(w_proxy, space.wrap('sys'))) + assert space.contains_w(w_proxy, space.wrap('sys')) raises(OperationError, space.setitem, w_proxy, space.wrap('sys'), space.w_None) raises(OperationError, space.delitem, diff --git a/pypy/module/cpyext/test/test_import.py b/pypy/module/cpyext/test/test_import.py --- a/pypy/module/cpyext/test/test_import.py +++ b/pypy/module/cpyext/test/test_import.py @@ -21,7 +21,7 @@ def test_getmoduledict(self, space, api): testmod = "contextlib" w_pre_dict = api.PyImport_GetModuleDict() - assert not space.is_true(space.contains(w_pre_dict, space.wrap(testmod))) + assert not space.contains_w(w_pre_dict, space.wrap(testmod)) with rffi.scoped_str2charp(testmod) as modname: w_module = api.PyImport_ImportModule(modname) @@ -29,7 +29,7 @@ assert w_module w_dict = api.PyImport_GetModuleDict() - assert space.is_true(space.contains(w_dict, space.wrap(testmod))) + assert space.contains_w(w_dict, space.wrap(testmod)) def test_reload(self, space, api): stat = api.PyImport_Import(space.wrap("stat")) diff --git a/pypy/module/cpyext/test/test_object.py b/pypy/module/cpyext/test/test_object.py --- a/pypy/module/cpyext/test/test_object.py +++ b/pypy/module/cpyext/test/test_object.py @@ -179,7 +179,7 @@ def test_dir(self, space, api): w_dir = api.PyObject_Dir(space.sys) assert space.isinstance_w(w_dir, space.w_list) - assert space.is_true(space.contains(w_dir, space.wrap('modules'))) + assert space.contains_w(w_dir, space.wrap('modules')) class AppTestObject(AppTestCpythonExtensionBase): def setup_class(cls): diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -1353,9 +1353,8 @@ return space.len(self.w_dict) def _all_contained_in(space, w_dictview, w_other): - w_iter = space.iter(w_dictview) - for w_item in space.iteriterable(w_iter): - if not space.is_true(space.contains(w_other, w_item)): + for w_item in space.iteriterable(w_dictview): + if not space.contains_w(w_other, w_item): return space.w_False return space.w_True @@ -1420,9 +1419,8 @@ if len_other > len_self: self, w_other = w_other, self - w_iter = space.iter(w_other) - for w_item in space.iteriterable(w_iter): - if space.is_true(space.contains(self, w_item)): + for w_item in space.iteriterable(w_other): + if space.contains_w(self, w_item): return space.w_False return space.w_True diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -3,16 +3,13 @@ from rpython.jit.backend.test.support import CCompiledMixin from rpython.rlib.jit import JitDriver from rpython.tool.udir import udir +from rpython.rlib import rthread from rpython.translator.translator import TranslationContext from rpython.jit.backend.detect_cpu import getcpuclass class CompiledVmprofTest(CCompiledMixin): CPUClass = getcpuclass() - def setup(self): - if self.CPUClass.backend_name != 'x86_64': - py.test.skip("vmprof only supports x86-64 CPUs at the moment") - def _get_TranslationContext(self): t = TranslationContext() t.config.translation.gc = 'incminimark' @@ -62,6 +59,7 @@ tmpfilename = str(udir.join('test_rvmprof')) def f(num): + rthread.get_ident() # register TLOFS_thread_ident code = MyCode("py:x:foo:3") rvmprof.register_code(code, get_name) fd = os.open(tmpfilename, os.O_WRONLY | os.O_CREAT, 0666) diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -30,11 +30,11 @@ def setup(): + compile_extra = ['-DRPYTHON_LL2CTYPES'] platform.verify_eci(ExternalCompilationInfo( - compile_extra=['-DRPYTHON_LL2CTYPES'], + compile_extra=compile_extra, **eci_kwds)) - eci = global_eci vmprof_init = rffi.llexternal("vmprof_init", [rffi.INT, rffi.DOUBLE, rffi.CCHARP], diff --git a/rpython/rlib/rvmprof/src/rvmprof.c b/rpython/rlib/rvmprof/src/rvmprof.c --- a/rpython/rlib/rvmprof/src/rvmprof.c +++ b/rpython/rlib/rvmprof/src/rvmprof.c @@ -1,23 +1,21 @@ #define _GNU_SOURCE 1 - #ifdef RPYTHON_LL2CTYPES /* only for testing: ll2ctypes sets RPY_EXTERN from the command-line */ -# ifndef RPY_EXTERN -# define RPY_EXTERN RPY_EXPORTED -# endif -# define RPY_EXPORTED extern __attribute__((visibility("default"))) -# define VMPROF_ADDR_OF_TRAMPOLINE(addr) 0 +#ifndef RPY_EXTERN +#define RPY_EXTERN RPY_EXPORTED +#endif +#ifdef _WIN32 +#define RPY_EXPORTED __declspec(dllexport) +#else +#define RPY_EXPORTED extern __attribute__((visibility("default"))) +#endif #else - # include "common_header.h" # include "structdef.h" # include "src/threadlocal.h" # include "rvmprof.h" -/*# ifndef VMPROF_ADDR_OF_TRAMPOLINE -# error "RPython program using rvmprof, but not calling vmprof_execute_code()" -# endif*/ #endif diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -7,9 +7,6 @@ static long profile_interval_usec = 0; static int opened_profile(char *interp_name); -#define MAX_STACK_DEPTH \ - ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) - #define MARKER_STACKTRACE '\x01' #define MARKER_VIRTUAL_IP '\x02' #define MARKER_TRAILER '\x03' @@ -20,6 +17,9 @@ #define VERSION_THREAD_ID '\x01' #define VERSION_TAG '\x02' +#define MAX_STACK_DEPTH \ + ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) + typedef struct prof_stacktrace_s { char padding[sizeof(long) - 1]; char marker; @@ -71,6 +71,43 @@ return _write_all((char*)&header, 5 * sizeof(long) + 4 + namelen); } +/* ************************************************************* + * functions to dump the stack trace + * ************************************************************* + */ + + +static int get_stack_trace(vmprof_stack_t* stack, intptr_t *result, int max_depth, intptr_t pc) +{ + int n = 0; + intptr_t addr = 0; + int bottom_jitted = 0; + // check if the pc is in JIT +#ifdef PYPY_JIT_CODEMAP + if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { + // the bottom part is jitted, means we can fill up the first part + // from the JIT + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + stack = stack->next; // skip the first item as it contains garbage + } +#endif + while (n < max_depth - 1 && stack) { + if (stack->kind == VMPROF_CODE_TAG) { + result[n] = stack->kind; + result[n + 1] = stack->value; + n += 2; + } +#ifdef PYPY_JIT_CODEMAP + else if (stack->kind == VMPROF_JITTED_TAG) { + pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + } +#endif + stack = stack->next; + } + return n; +} + #ifndef RPYTHON_LL2CTYPES static vmprof_stack_t *get_vmprof_stack(void) { diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -35,6 +35,7 @@ #include "vmprof_stack.h" #include "vmprof_getpc.h" #include "vmprof_mt.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" /************************************************************/ @@ -78,46 +79,6 @@ static char atfork_hook_installed = 0; -#include "vmprof_get_custom_offset.h" - -/* ************************************************************* - * functions to dump the stack trace - * ************************************************************* - */ - - -static int get_stack_trace(intptr_t *result, int max_depth, intptr_t pc, ucontext_t *ucontext) -{ - vmprof_stack_t* stack = get_vmprof_stack(); - int n = 0; - intptr_t addr = 0; - int bottom_jitted = 0; - // check if the pc is in JIT -#ifdef PYPY_JIT_CODEMAP - if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { - // the bottom part is jitted, means we can fill up the first part - // from the JIT - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - stack = stack->next; // skip the first item as it contains garbage - } -#endif - while (n < max_depth - 1 && stack) { - if (stack->kind == VMPROF_CODE_TAG) { - result[n] = stack->kind; - result[n + 1] = stack->value; - n += 2; - } -#ifdef PYPY_JIT_CODEMAP - else if (stack->kind == VMPROF_JITTED_TAG) { - pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - } -#endif - stack = stack->next; - } - return n; -} - static intptr_t get_current_thread_id(void) { /* xxx This function is a hack on two fronts: @@ -194,8 +155,8 @@ struct prof_stacktrace_s *st = (struct prof_stacktrace_s *)p->data; st->marker = MARKER_STACKTRACE; st->count = 1; - depth = get_stack_trace(st->stack, - MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext), ucontext); + depth = get_stack_trace(get_vmprof_stack(), st->stack, + MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext)); st->depth = depth; st->stack[depth++] = get_current_thread_id(); p->data_offset = offsetof(struct prof_stacktrace_s, marker); diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -10,13 +10,30 @@ return 0; } +#if defined(_MSC_VER) +#include +typedef SSIZE_T ssize_t; +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include #include "vmprof_stack.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" #include // This file has been inspired (but not copied from since the LICENSE // would not allow it) from verysleepy profiler +#define SINGLE_BUF_SIZE 8192 + volatile int thread_started = 0; volatile int enabled = 0; @@ -55,52 +72,75 @@ return 0; } -int vmprof_snapshot_thread(DWORD thread_id, PyThreadState *tstate, prof_stacktrace_s *stack) +int vmprof_snapshot_thread(struct pypy_threadlocal_s *p, prof_stacktrace_s *stack) { - HRESULT result; - HANDLE hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, thread_id); - int depth; + void *addr; + vmprof_stack_t *cur; + long tid; + HANDLE hThread; + long depth; + DWORD result; + CONTEXT ctx; + +#ifdef RPYTHON_LL2CTYPES + return 0; // not much we can do +#else +#ifndef RPY_TLOFS_thread_ident + return 0; // we can't freeze threads, unsafe +#else + hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, p->thread_ident); if (!hThread) { return -1; } result = SuspendThread(hThread); if(result == 0xffffffff) return -1; // possible, e.g. attached debugger or thread alread suspended - // find the correct thread - depth = read_trace_from_cpy_frame(tstate->frame, stack->stack, - MAX_STACK_DEPTH); + ctx.ContextFlags = CONTEXT_FULL; + if (!GetThreadContext(hThread, &ctx)) + return -1; + depth = get_stack_trace(p->vmprof_tl_stack, + stack->stack, MAX_STACK_DEPTH-2, ctx.Eip); stack->depth = depth; - stack->stack[depth++] = (void*)thread_id; + stack->stack[depth++] = (void*)p->thread_ident; stack->count = 1; stack->marker = MARKER_STACKTRACE; ResumeThread(hThread); return depth; +#endif +#endif } long __stdcall vmprof_mainloop(void *arg) { +#ifndef RPYTHON_LL2CTYPES + struct pypy_threadlocal_s *p; prof_stacktrace_s *stack = (prof_stacktrace_s*)malloc(SINGLE_BUF_SIZE); - HANDLE hThreadSnap = INVALID_HANDLE_VALUE; int depth; - PyThreadState *tstate; while (1) { - Sleep(profile_interval_usec * 1000); + //Sleep(profile_interval_usec * 1000); + Sleep(10); if (!enabled) { continue; } - tstate = PyInterpreterState_Head()->tstate_head; - while (tstate) { - depth = vmprof_snapshot_thread(tstate->thread_id, tstate, stack); - if (depth > 0) { - _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), - depth * sizeof(void *) + - sizeof(struct prof_stacktrace_s) - - offsetof(struct prof_stacktrace_s, marker)); + _RPython_ThreadLocals_Acquire(); + p = _RPython_ThreadLocals_Head(); // the first one is one behind head + p = _RPython_ThreadLocals_Enum(p); + while (p) { + if (p->ready == 42) { + depth = vmprof_snapshot_thread(p, stack); + if (depth > 0) { + _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), + depth * sizeof(void *) + + sizeof(struct prof_stacktrace_s) - + offsetof(struct prof_stacktrace_s, marker)); + } } - tstate = tstate->next; + p = _RPython_ThreadLocals_Enum(p); } + _RPython_ThreadLocals_Release(); } +#endif } RPY_EXTERN diff --git a/rpython/rlib/rvmprof/src/vmprof_stack.h b/rpython/rlib/rvmprof/src/vmprof_stack.h --- a/rpython/rlib/rvmprof/src/vmprof_stack.h +++ b/rpython/rlib/rvmprof/src/vmprof_stack.h @@ -1,7 +1,11 @@ #ifndef _VMPROF_STACK_H_ #define _VMPROF_STACK_H_ +#ifdef _WIN32 +#define intptr_t long // XXX windows VC++ 2008 lacks stdint.h +#else #include +#endif #define VMPROF_CODE_TAG 1 /* <- also in cintf.py */ #define VMPROF_BLACKHOLE_TAG 2 diff --git a/rpython/rlib/rvmprof/test/test_ztranslation.py b/rpython/rlib/rvmprof/test/test_ztranslation.py --- a/rpython/rlib/rvmprof/test/test_ztranslation.py +++ b/rpython/rlib/rvmprof/test/test_ztranslation.py @@ -3,11 +3,10 @@ sys.path += ['../../../..'] # for subprocess in test_interpreted import py from rpython.tool.udir import udir -from rpython.rlib import rvmprof +from rpython.rlib import rvmprof, rthread from rpython.translator.c.test.test_genc import compile from rpython.rlib.nonconst import NonConstant - class MyCode: def __init__(self, count): self.count = count @@ -39,6 +38,7 @@ PROF_FILE = str(udir.join('test_ztranslation.prof')) def main(argv=[]): + rthread.get_ident() # force TLOFS_thread_ident if NonConstant(False): # Hack to give os.open() the correct annotation os.open('foo', 1, 1) diff --git a/rpython/translator/c/src/threadlocal.c b/rpython/translator/c/src/threadlocal.c --- a/rpython/translator/c/src/threadlocal.c +++ b/rpython/translator/c/src/threadlocal.c @@ -85,6 +85,11 @@ return prev->next; } +struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(void) +{ + return &linkedlist_head; +} + static void _RPy_ThreadLocals_Init(void *p) { struct pypy_threadlocal_s *tls = (struct pypy_threadlocal_s *)p; diff --git a/rpython/translator/c/src/threadlocal.h b/rpython/translator/c/src/threadlocal.h --- a/rpython/translator/c/src/threadlocal.h +++ b/rpython/translator/c/src/threadlocal.h @@ -27,6 +27,9 @@ RPY_EXTERN struct pypy_threadlocal_s * _RPython_ThreadLocals_Enum(struct pypy_threadlocal_s *prev); +/* will return the head of the list */ +RPY_EXTERN struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(); + #define OP_THREADLOCALREF_ACQUIRE(r) _RPython_ThreadLocals_Acquire() #define OP_THREADLOCALREF_RELEASE(r) _RPython_ThreadLocals_Release() #define OP_THREADLOCALREF_ENUM(p, r) r = _RPython_ThreadLocals_Enum(p) From pypy.commits at gmail.com Sat Feb 13 09:14:17 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 13 Feb 2016 06:14:17 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Simplify code and make it correctly respect the w_ convention. Message-ID: <56bf3a39.0772c20a.b088e.ffffed83@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82214:3f20320bc330 Date: 2016-02-13 15:12 +0100 http://bitbucket.org/pypy/pypy/changeset/3f20320bc330/ Log: Simplify code and make it correctly respect the w_ convention. diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -66,10 +66,10 @@ # overwrite the value of a key already in w_initialdict. (So as to avoid # overriding the builtin value with a user-provided value) if not self.space.is_none(self.w_initialdict): - w_items = self.space.iteriterable(self.space.call_method(self.w_dict,'items')) - for w_item in w_items: + w_items = self.space.call_method(self.w_dict, 'items') + for w_item in self.space.iteriterable(w_items): w_key, w_value = self.space.fixedview(w_item, expected_length=2) - if not self.space.is_true(self.space.contains(self.w_initialdict, w_key)): + if not self.space.contains_w(self.w_initialdict, w_key): self.space.setitem(self.w_initialdict, w_key, w_value) else: self.w_initialdict = self.space.call_method(self.w_dict, 'copy') From pypy.commits at gmail.com Sat Feb 13 10:11:18 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:11:18 -0800 (PST) Subject: [pypy-commit] pypy default: Document this fix here Message-ID: <56bf4796.d62d1c0a.48d97.742f@mx.google.com> Author: Armin Rigo Branch: Changeset: r82215:191e5c069244 Date: 2016-02-12 19:03 +0100 http://bitbucket.org/pypy/pypy/changeset/191e5c069244/ Log: Document this fix here diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -7,6 +7,9 @@ Fixed ``_PyLong_FromByteArray()``, which was buggy. +Fixed a crash with stacklets (or greenlets) on non-Linux machines +which showed up if you forget stacklets without resuming them. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy From pypy.commits at gmail.com Sat Feb 13 10:11:20 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:11:20 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: in-progress: rewrite tupleobject.py to be similar to stringobject.py Message-ID: <56bf4798.c615c20a.84ffe.0347@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82216:b54a2a1a9efb Date: 2016-02-13 16:09 +0100 http://bitbucket.org/pypy/pypy/changeset/b54a2a1a9efb/ Log: in-progress: rewrite tupleobject.py to be similar to stringobject.py for similar reasons (documented) diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -497,7 +497,7 @@ GLOBALS['%s#' % (cpyname, )] = ('PyTypeObject*', pypyexpr) for cpyname in '''PyMethodObject PyListObject PyLongObject - PyDictObject PyTupleObject PyClassObject'''.split(): + PyDictObject PyClassObject'''.split(): FORWARD_DECLS.append('typedef struct { PyObject_HEAD } %s' % (cpyname, )) build_exported_objects() diff --git a/pypy/module/cpyext/include/tupleobject.h b/pypy/module/cpyext/include/tupleobject.h --- a/pypy/module/cpyext/include/tupleobject.h +++ b/pypy/module/cpyext/include/tupleobject.h @@ -7,11 +7,21 @@ extern "C" { #endif +typedef struct { + PyObject_HEAD + Py_ssize_t ob_size; + PyObject **ob_item; /* XXX optimize to ob_item[] */ +} PyTupleObject; + /* defined in varargswrapper.c */ PyAPI_FUNC(PyObject *) PyTuple_Pack(Py_ssize_t, ...); -#define PyTuple_SET_ITEM PyTuple_SetItem -#define PyTuple_GET_ITEM PyTuple_GetItem +/* Macro, trading safety for speed */ +#define PyTuple_GET_ITEM(op, i) (((PyTupleObject *)(op))->ob_item[i]) +#define PyTuple_GET_SIZE(op) Py_SIZE(op) + +/* Macro, *only* to be used to fill in brand new tuples */ +#define PyTuple_SET_ITEM(op, i, v) (((PyTupleObject *)(op))->ob_item[i] = v) #ifdef __cplusplus diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -185,24 +185,6 @@ assert py_obj rawrefcount.create_link_pypy(w_obj, py_obj) -def make_ref(space, w_obj): - """ - Returns a new reference to an intepreter object. - """ - GOES_AWAY - if w_obj is None: - return lltype.nullptr(PyObject.TO) - assert isinstance(w_obj, W_Root) - state = space.fromcache(RefcountState) - try: - py_obj = state.py_objects_w2r[w_obj] - except KeyError: - py_obj = create_ref(space, w_obj) - track_reference(space, py_obj, w_obj) - else: - Py_IncRef(space, py_obj) - return py_obj - def from_ref(space, ref): """ diff --git a/pypy/module/cpyext/test/test_tupleobject.py b/pypy/module/cpyext/test/test_tupleobject.py --- a/pypy/module/cpyext/test/test_tupleobject.py +++ b/pypy/module/cpyext/test/test_tupleobject.py @@ -1,6 +1,8 @@ import py from pypy.module.cpyext.pyobject import PyObject, PyObjectP, make_ref, from_ref +from pypy.module.cpyext.pyobject import as_pyobj +from pypy.module.cpyext.tupleobject import PyTupleObject from pypy.module.cpyext.test.test_api import BaseApiTest from rpython.rtyper.lltypesystem import rffi, lltype @@ -10,25 +12,35 @@ def test_tupleobject(self, space, api): assert not api.PyTuple_Check(space.w_None) assert api.PyTuple_SetItem(space.w_None, 0, space.w_None) == -1 - atuple = space.newtuple([0, 1, 'yay']) + atuple = space.newtuple([space.wrap(0), space.wrap(1), + space.wrap('yay')]) assert api.PyTuple_Size(atuple) == 3 - assert api.PyTuple_GET_SIZE(atuple) == 3 + #assert api.PyTuple_GET_SIZE(atuple) == 3 --- now a C macro raises(TypeError, api.PyTuple_Size(space.newlist([]))) api.PyErr_Clear() def test_tuple_resize(self, space, api): + w_42 = space.wrap(42) + ar = lltype.malloc(PyObjectP.TO, 1, flavor='raw') + py_tuple = api.PyTuple_New(3) - ar = lltype.malloc(PyObjectP.TO, 1, flavor='raw') - ar[0] = rffi.cast(PyObject, make_ref(space, py_tuple)) + rffi.cast(PyTupleObject, py_tuple).c_ob_item[0] = as_pyobj(space, w_42) + ar[0] = py_tuple api._PyTuple_Resize(ar, 2) - py_tuple = from_ref(space, ar[0]) - assert space.int_w(space.len(py_tuple)) == 2 - + w_tuple = from_ref(space, ar[0]) + assert space.int_w(space.len(w_tuple)) == 2 + assert space.int_w(space.getitem(w_tuple, space.wrap(0))) == 42 + api.Py_DecRef(ar[0]) + + py_tuple = api.PyTuple_New(3) + rffi.cast(PyTupleObject, py_tuple).c_ob_item[0] = as_pyobj(space, w_42) + ar[0] = py_tuple api._PyTuple_Resize(ar, 10) - py_tuple = from_ref(space, ar[0]) - assert space.int_w(space.len(py_tuple)) == 10 - + w_tuple = from_ref(space, ar[0]) + assert space.int_w(space.len(w_tuple)) == 10 + assert space.int_w(space.getitem(w_tuple, space.wrap(0))) == 42 api.Py_DecRef(ar[0]) + lltype.free(ar, flavor='raw') def test_setitem(self, space, api): diff --git a/pypy/module/cpyext/tupleobject.py b/pypy/module/cpyext/tupleobject.py --- a/pypy/module/cpyext/tupleobject.py +++ b/pypy/module/cpyext/tupleobject.py @@ -1,59 +1,166 @@ from pypy.interpreter.error import OperationError from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import (cpython_api, Py_ssize_t, CANNOT_FAIL, - build_type_checkers) + build_type_checkers, PyObjectFields, + cpython_struct, bootstrap_function) from pypy.module.cpyext.pyobject import (PyObject, PyObjectP, Py_DecRef, - borrow_from, make_ref, from_ref) + borrow_from, make_ref, from_ref, decref, + track_reference, make_typedescr, get_typedescr) from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.objspace.std.tupleobject import W_TupleObject +## +## Implementation of PyTupleObject +## =============================== +## +## Similar to stringobject.py. The reason is only the existance of +## W_SpecialisedTupleObject_ii and W_SpecialisedTupleObject_ff. +## These two PyPy classes implement getitem() by returning a freshly +## constructed W_IntObject or W_FloatObject. This is not compatible +## with PyTuple_GetItem, which returns a borrowed reference. +## +## So we use this more advanced (but also likely faster) solution: +## tuple_attach makes a real PyTupleObject with an array of N +## 'PyObject *', which are created immediately and own a reference. +## Then the macro PyTuple_GET_ITEM can be implemented like CPython. +## + +PyTupleObjectStruct = lltype.ForwardReference() +PyTupleObject = lltype.Ptr(PyTupleObjectStruct) +ObjectItems = rffi.CArray(PyObject) +PyTupleObjectFields = PyObjectFields + \ + (("ob_size", Py_ssize_t), ("ob_item", lltype.Ptr(ObjectItems))) +cpython_struct("PyTupleObject", PyTupleObjectFields, PyTupleObjectStruct) + + at bootstrap_function +def init_stringobject(space): + "Type description of PyTupleObject" + make_typedescr(space.w_tuple.instancetypedef, + basestruct=PyTupleObject.TO, + attach=tuple_attach, + dealloc=tuple_dealloc, + realize=tuple_realize) + PyTuple_Check, PyTuple_CheckExact = build_type_checkers("Tuple") +def tuple_check_ref(space, ref): + w_type = from_ref(space, rffi.cast(PyObject, ref.c_ob_type)) + return (w_type is space.w_tuple or + space.is_true(space.issubtype(w_type, space.w_tuple))) + +def new_empty_tuple(space, length): + """ + Allocate a PyTupleObject and its array of PyObject *, but without a + corresponding interpreter object. The array may be mutated, until + tuple_realize() is called. + """ + typedescr = get_typedescr(space.w_tuple.instancetypedef) + py_obj = typedescr.allocate(space, space.w_tuple) + py_tup = rffi.cast(PyTupleObject, py_obj) + + py_tup.c_ob_item = lltype.malloc(ObjectItems, length, + flavor='raw', zero=True) + py_tup.c_ob_size = length + return py_tup + +def tuple_attach(space, py_obj, w_obj): + """ + Fills a newly allocated PyTupleObject with the given tuple object. The + buffer must not be modified. + """ + items_w = space.fixedview(w_obj) + l = len(items_w) + p = lltype.malloc(ObjectItems, l, flavor='raw') + i = 0 + try: + while i < l: + p[i] = make_ref(space, items_w[i]) + i += 1 + except: + while i > 0: + i -= 1 + decref(space, p[i]) + lltype.free(p, flavor='raw') + raise + py_tup = rffi.cast(PyTupleObject, py_obj) + py_tup.c_ob_size = l + py_tup.c_ob_item = p + +def tuple_realize(space, py_obj): + """ + Creates the tuple in the interpreter. The PyTupleObject must not + be modified after this call. + """ + py_tup = rffi.cast(PyTupleObject, py_obj) + l = py_tup.c_ob_size + p = py_tup.c_ob_item + items_w = [None] * l + for i in range(l): + items_w[i] = from_ref(space, p[i]) + w_obj = space.newtuple(items_w) + track_reference(space, py_obj, w_obj) + return w_obj + + at cpython_api([PyObject], lltype.Void, external=False) +def tuple_dealloc(space, py_obj): + """Frees allocated PyTupleObject resources. + """ + py_tup = rffi.cast(PyTupleObject, py_obj) + p = py_tup.c_ob_item + if p: + for i in range(py_tup.c_ob_size): + decref(space, p[i]) + lltype.free(p, flavor="raw") + from pypy.module.cpyext.object import PyObject_dealloc + PyObject_dealloc(space, py_obj) + +#_______________________________________________________________________ + @cpython_api([Py_ssize_t], PyObject) def PyTuple_New(space, size): - return W_TupleObject([space.w_None] * size) + return rffi.cast(PyObject, new_empty_tuple(space, size)) @cpython_api([PyObject, Py_ssize_t, PyObject], rffi.INT_real, error=-1) -def PyTuple_SetItem(space, w_t, pos, w_obj): - if not PyTuple_Check(space, w_t): - # XXX this should also steal a reference, test it!!! +def PyTuple_SetItem(space, ref, index, py_obj): + # XXX this will not complain when changing tuples that have + # already been realized as a W_TupleObject, but won't update the + # W_TupleObject + if not tuple_check_ref(space, ref): + decref(space, py_obj) PyErr_BadInternalCall(space) - _setitem_tuple(w_t, pos, w_obj) - Py_DecRef(space, w_obj) # SetItem steals a reference! + ref = rffi.cast(PyTupleObject, ref) + size = ref.c_ob_size + if index < 0 or index >= size: + raise OperationError(space.w_IndexError, + space.wrap("tuple assignment index out of range")) + old_ref = ref.c_ob_item[index] + ref.c_ob_item[index] = py_obj # consumes a reference + if old_ref: + decref(space, old_ref) return 0 -def _setitem_tuple(w_t, pos, w_obj): - # this function checks that w_t is really a W_TupleObject. It - # should only ever be called with a freshly built tuple from - # PyTuple_New(), which always return a W_TupleObject, even if there - # are also other implementations of tuples. - assert isinstance(w_t, W_TupleObject) - w_t.wrappeditems[pos] = w_obj - - at cpython_api([PyObject, Py_ssize_t], PyObject) -def PyTuple_GetItem(space, w_t, pos): - if not PyTuple_Check(space, w_t): + at cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True) +def PyTuple_GetItem(space, ref, index): + if not tuple_check_ref(space, ref): PyErr_BadInternalCall(space) - w_obj = space.getitem(w_t, space.wrap(pos)) - return borrow_from(w_t, w_obj) - - at cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL) -def PyTuple_GET_SIZE(space, w_t): - """Return the size of the tuple p, which must be non-NULL and point to a tuple; - no error checking is performed. """ - return space.int_w(space.len(w_t)) + ref = rffi.cast(PyTupleObject, ref) + size = ref.c_ob_size + if index < 0 or index >= size: + raise OperationError(space.w_IndexError, + space.wrap("tuple index out of range")) + return ref.c_ob_item[index] @cpython_api([PyObject], Py_ssize_t, error=-1) def PyTuple_Size(space, ref): """Take a pointer to a tuple object, and return the size of that tuple.""" - if not PyTuple_Check(space, ref): - raise OperationError(space.w_TypeError, - space.wrap("expected tuple object")) - return PyTuple_GET_SIZE(space, ref) + if not tuple_check_ref(space, ref): + PyErr_BadInternalCall(space) + ref = rffi.cast(PyTupleObject, ref) + return ref.c_ob_size @cpython_api([PyObjectP, Py_ssize_t], rffi.INT_real, error=-1) -def _PyTuple_Resize(space, ref, newsize): +def _PyTuple_Resize(space, p_ref, newsize): """Can be used to resize a tuple. newsize will be the new length of the tuple. Because tuples are supposed to be immutable, this should only be used if there is only one reference to the object. Do not use this if the tuple may already @@ -64,19 +171,27 @@ this function. If the object referenced by *p is replaced, the original *p is destroyed. On failure, returns -1 and sets *p to NULL, and raises MemoryError or SystemError.""" - py_tuple = from_ref(space, ref[0]) - if not PyTuple_Check(space, py_tuple): + ref = p_ref[0] + if not tuple_check_ref(space, ref): PyErr_BadInternalCall(space) - py_newtuple = PyTuple_New(space, newsize) - - to_cp = newsize - oldsize = space.int_w(space.len(py_tuple)) - if oldsize < newsize: - to_cp = oldsize - for i in range(to_cp): - _setitem_tuple(py_newtuple, i, space.getitem(py_tuple, space.wrap(i))) - Py_DecRef(space, ref[0]) - ref[0] = make_ref(space, py_newtuple) + ref = rffi.cast(PyTupleObject, ref) + oldsize = ref.c_ob_size + oldp = ref.c_ob_item + newp = lltype.malloc(ObjectItems, newsize, zero=True, flavor='raw') + try: + if oldsize < newsize: + to_cp = oldsize + else: + to_cp = newsize + for i in range(to_cp): + newp[i] = oldp[i] + except: + lltype.free(newp, flavor='raw') + raise + ref.c_ob_item = newp + ref.c_ob_size = newsize + lltype.free(oldp, flavor='raw') + # in this version, p_ref[0] never needs to be updated return 0 @cpython_api([PyObject, Py_ssize_t, Py_ssize_t], PyObject) From pypy.commits at gmail.com Sat Feb 13 10:11:22 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:11:22 -0800 (PST) Subject: [pypy-commit] pypy default: merge heads Message-ID: <56bf479a.50371c0a.a238a.7532@mx.google.com> Author: Armin Rigo Branch: Changeset: r82217:df6d9eeb2d42 Date: 2016-02-13 16:10 +0100 http://bitbucket.org/pypy/pypy/changeset/df6d9eeb2d42/ Log: merge heads diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -36,13 +36,13 @@ "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", - "_csv", "cppyy", "_pypyjson" + "_csv", "cppyy", "_pypyjson", "_vmprof", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): +#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') +# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): # it's not enough that we get x86_64 - working_modules.add('_vmprof') +# working_modules.add('_vmprof') translation_modules = default_modules.copy() translation_modules.update([ diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -306,7 +306,7 @@ return None -class W_InterpIterable(W_Root): +class InterpIterable(object): def __init__(self, space, w_iterable): self.w_iter = space.iter(w_iterable) self.space = space @@ -745,9 +745,13 @@ return self.int_w(self.hash(w_obj)) def len_w(self, w_obj): - """shotcut for space.int_w(space.len(w_obj))""" + """shortcut for space.int_w(space.len(w_obj))""" return self.int_w(self.len(w_obj)) + def contains_w(self, w_container, w_item): + """shortcut for space.is_true(space.contains(w_container, w_item))""" + return self.is_true(self.contains(w_container, w_item)) + def setitem_str(self, w_obj, key, w_value): return self.setitem(w_obj, self.wrap(key), w_value) @@ -846,7 +850,7 @@ return lst_w[:] # make the resulting list resizable def iteriterable(self, w_iterable): - return W_InterpIterable(self, w_iterable) + return InterpIterable(self, w_iterable) def _unpackiterable_unknown_length(self, w_iterator, w_iterable): """Unpack an iterable of unknown length into an interp-level @@ -1237,7 +1241,7 @@ if not isinstance(statement, PyCode): raise TypeError('space.exec_(): expected a string, code or PyCode object') w_key = self.wrap('__builtins__') - if not self.is_true(self.contains(w_globals, w_key)): + if not self.contains_w(w_globals, w_key): self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) diff --git a/pypy/module/__builtin__/interp_classobj.py b/pypy/module/__builtin__/interp_classobj.py --- a/pypy/module/__builtin__/interp_classobj.py +++ b/pypy/module/__builtin__/interp_classobj.py @@ -20,7 +20,7 @@ if not space.isinstance_w(w_dict, space.w_dict): raise_type_err(space, 'bases', 'tuple', w_bases) - if not space.is_true(space.contains(w_dict, space.wrap("__doc__"))): + if not space.contains_w(w_dict, space.wrap("__doc__")): space.setitem(w_dict, space.wrap("__doc__"), space.w_None) # XXX missing: lengthy and obscure logic about "__module__" diff --git a/pypy/module/_demo/test/test_import.py b/pypy/module/_demo/test/test_import.py --- a/pypy/module/_demo/test/test_import.py +++ b/pypy/module/_demo/test/test_import.py @@ -12,8 +12,7 @@ w_modules = space.sys.get('modules') assert _demo.Module.demo_events == ['setup'] - assert not space.is_true(space.contains(w_modules, - space.wrap('_demo'))) + assert not space.contains_w(w_modules, space.wrap('_demo')) # first import w_import = space.builtin.get('__import__') diff --git a/pypy/module/_vmprof/interp_vmprof.py b/pypy/module/_vmprof/interp_vmprof.py --- a/pypy/module/_vmprof/interp_vmprof.py +++ b/pypy/module/_vmprof/interp_vmprof.py @@ -60,7 +60,7 @@ Must be smaller than 1.0 """ w_modules = space.sys.get('modules') - if space.is_true(space.contains(w_modules, space.wrap('_continuation'))): + if space.contains_w(w_modules, space.wrap('_continuation')): space.warn(space.wrap("Using _continuation/greenlet/stacklet together " "with vmprof will crash"), space.w_RuntimeWarning) diff --git a/pypy/module/cpyext/test/test_dictobject.py b/pypy/module/cpyext/test/test_dictobject.py --- a/pypy/module/cpyext/test/test_dictobject.py +++ b/pypy/module/cpyext/test/test_dictobject.py @@ -146,7 +146,7 @@ def test_dictproxy(self, space, api): w_dict = space.sys.get('modules') w_proxy = api.PyDictProxy_New(w_dict) - assert space.is_true(space.contains(w_proxy, space.wrap('sys'))) + assert space.contains_w(w_proxy, space.wrap('sys')) raises(OperationError, space.setitem, w_proxy, space.wrap('sys'), space.w_None) raises(OperationError, space.delitem, diff --git a/pypy/module/cpyext/test/test_import.py b/pypy/module/cpyext/test/test_import.py --- a/pypy/module/cpyext/test/test_import.py +++ b/pypy/module/cpyext/test/test_import.py @@ -21,7 +21,7 @@ def test_getmoduledict(self, space, api): testmod = "_functools" w_pre_dict = api.PyImport_GetModuleDict() - assert not space.is_true(space.contains(w_pre_dict, space.wrap(testmod))) + assert not space.contains_w(w_pre_dict, space.wrap(testmod)) with rffi.scoped_str2charp(testmod) as modname: w_module = api.PyImport_ImportModule(modname) @@ -29,7 +29,7 @@ assert w_module w_dict = api.PyImport_GetModuleDict() - assert space.is_true(space.contains(w_dict, space.wrap(testmod))) + assert space.contains_w(w_dict, space.wrap(testmod)) def test_reload(self, space, api): stat = api.PyImport_Import(space.wrap("stat")) diff --git a/pypy/module/cpyext/test/test_object.py b/pypy/module/cpyext/test/test_object.py --- a/pypy/module/cpyext/test/test_object.py +++ b/pypy/module/cpyext/test/test_object.py @@ -202,7 +202,7 @@ def test_dir(self, space, api): w_dir = api.PyObject_Dir(space.sys) assert space.isinstance_w(w_dir, space.w_list) - assert space.is_true(space.contains(w_dir, space.wrap('modules'))) + assert space.contains_w(w_dir, space.wrap('modules')) class AppTestObject(AppTestCpythonExtensionBase): def setup_class(cls): diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -1419,9 +1419,8 @@ return space.len(self.w_dict) def _all_contained_in(space, w_dictview, w_other): - w_iter = space.iter(w_dictview) - for w_item in space.iteriterable(w_iter): - if not space.is_true(space.contains(w_other, w_item)): + for w_item in space.iteriterable(w_dictview): + if not space.contains_w(w_other, w_item): return space.w_False return space.w_True diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -3,16 +3,13 @@ from rpython.jit.backend.test.support import CCompiledMixin from rpython.rlib.jit import JitDriver from rpython.tool.udir import udir +from rpython.rlib import rthread from rpython.translator.translator import TranslationContext from rpython.jit.backend.detect_cpu import getcpuclass class CompiledVmprofTest(CCompiledMixin): CPUClass = getcpuclass() - def setup(self): - if self.CPUClass.backend_name != 'x86_64': - py.test.skip("vmprof only supports x86-64 CPUs at the moment") - def _get_TranslationContext(self): t = TranslationContext() t.config.translation.gc = 'incminimark' @@ -62,6 +59,7 @@ tmpfilename = str(udir.join('test_rvmprof')) def f(num): + rthread.get_ident() # register TLOFS_thread_ident code = MyCode("py:x:foo:3") rvmprof.register_code(code, get_name) fd = os.open(tmpfilename, os.O_WRONLY | os.O_CREAT, 0666) diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -30,11 +30,11 @@ def setup(): + compile_extra = ['-DRPYTHON_LL2CTYPES'] platform.verify_eci(ExternalCompilationInfo( - compile_extra=['-DRPYTHON_LL2CTYPES'], + compile_extra=compile_extra, **eci_kwds)) - eci = global_eci vmprof_init = rffi.llexternal("vmprof_init", [rffi.INT, rffi.DOUBLE, rffi.CCHARP], diff --git a/rpython/rlib/rvmprof/src/rvmprof.c b/rpython/rlib/rvmprof/src/rvmprof.c --- a/rpython/rlib/rvmprof/src/rvmprof.c +++ b/rpython/rlib/rvmprof/src/rvmprof.c @@ -1,23 +1,21 @@ #define _GNU_SOURCE 1 - #ifdef RPYTHON_LL2CTYPES /* only for testing: ll2ctypes sets RPY_EXTERN from the command-line */ -# ifndef RPY_EXTERN -# define RPY_EXTERN RPY_EXPORTED -# endif -# define RPY_EXPORTED extern __attribute__((visibility("default"))) -# define VMPROF_ADDR_OF_TRAMPOLINE(addr) 0 +#ifndef RPY_EXTERN +#define RPY_EXTERN RPY_EXPORTED +#endif +#ifdef _WIN32 +#define RPY_EXPORTED __declspec(dllexport) +#else +#define RPY_EXPORTED extern __attribute__((visibility("default"))) +#endif #else - # include "common_header.h" # include "structdef.h" # include "src/threadlocal.h" # include "rvmprof.h" -/*# ifndef VMPROF_ADDR_OF_TRAMPOLINE -# error "RPython program using rvmprof, but not calling vmprof_execute_code()" -# endif*/ #endif diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -7,9 +7,6 @@ static long profile_interval_usec = 0; static int opened_profile(char *interp_name); -#define MAX_STACK_DEPTH \ - ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) - #define MARKER_STACKTRACE '\x01' #define MARKER_VIRTUAL_IP '\x02' #define MARKER_TRAILER '\x03' @@ -20,6 +17,9 @@ #define VERSION_THREAD_ID '\x01' #define VERSION_TAG '\x02' +#define MAX_STACK_DEPTH \ + ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) + typedef struct prof_stacktrace_s { char padding[sizeof(long) - 1]; char marker; @@ -71,6 +71,43 @@ return _write_all((char*)&header, 5 * sizeof(long) + 4 + namelen); } +/* ************************************************************* + * functions to dump the stack trace + * ************************************************************* + */ + + +static int get_stack_trace(vmprof_stack_t* stack, intptr_t *result, int max_depth, intptr_t pc) +{ + int n = 0; + intptr_t addr = 0; + int bottom_jitted = 0; + // check if the pc is in JIT +#ifdef PYPY_JIT_CODEMAP + if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { + // the bottom part is jitted, means we can fill up the first part + // from the JIT + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + stack = stack->next; // skip the first item as it contains garbage + } +#endif + while (n < max_depth - 1 && stack) { + if (stack->kind == VMPROF_CODE_TAG) { + result[n] = stack->kind; + result[n + 1] = stack->value; + n += 2; + } +#ifdef PYPY_JIT_CODEMAP + else if (stack->kind == VMPROF_JITTED_TAG) { + pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + } +#endif + stack = stack->next; + } + return n; +} + #ifndef RPYTHON_LL2CTYPES static vmprof_stack_t *get_vmprof_stack(void) { diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -35,6 +35,7 @@ #include "vmprof_stack.h" #include "vmprof_getpc.h" #include "vmprof_mt.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" /************************************************************/ @@ -78,46 +79,6 @@ static char atfork_hook_installed = 0; -#include "vmprof_get_custom_offset.h" - -/* ************************************************************* - * functions to dump the stack trace - * ************************************************************* - */ - - -static int get_stack_trace(intptr_t *result, int max_depth, intptr_t pc, ucontext_t *ucontext) -{ - vmprof_stack_t* stack = get_vmprof_stack(); - int n = 0; - intptr_t addr = 0; - int bottom_jitted = 0; - // check if the pc is in JIT -#ifdef PYPY_JIT_CODEMAP - if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { - // the bottom part is jitted, means we can fill up the first part - // from the JIT - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - stack = stack->next; // skip the first item as it contains garbage - } -#endif - while (n < max_depth - 1 && stack) { - if (stack->kind == VMPROF_CODE_TAG) { - result[n] = stack->kind; - result[n + 1] = stack->value; - n += 2; - } -#ifdef PYPY_JIT_CODEMAP - else if (stack->kind == VMPROF_JITTED_TAG) { - pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - } -#endif - stack = stack->next; - } - return n; -} - static intptr_t get_current_thread_id(void) { /* xxx This function is a hack on two fronts: @@ -194,8 +155,8 @@ struct prof_stacktrace_s *st = (struct prof_stacktrace_s *)p->data; st->marker = MARKER_STACKTRACE; st->count = 1; - depth = get_stack_trace(st->stack, - MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext), ucontext); + depth = get_stack_trace(get_vmprof_stack(), st->stack, + MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext)); st->depth = depth; st->stack[depth++] = get_current_thread_id(); p->data_offset = offsetof(struct prof_stacktrace_s, marker); diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -10,13 +10,30 @@ return 0; } +#if defined(_MSC_VER) +#include +typedef SSIZE_T ssize_t; +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include #include "vmprof_stack.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" #include // This file has been inspired (but not copied from since the LICENSE // would not allow it) from verysleepy profiler +#define SINGLE_BUF_SIZE 8192 + volatile int thread_started = 0; volatile int enabled = 0; @@ -55,52 +72,75 @@ return 0; } -int vmprof_snapshot_thread(DWORD thread_id, PyThreadState *tstate, prof_stacktrace_s *stack) +int vmprof_snapshot_thread(struct pypy_threadlocal_s *p, prof_stacktrace_s *stack) { - HRESULT result; - HANDLE hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, thread_id); - int depth; + void *addr; + vmprof_stack_t *cur; + long tid; + HANDLE hThread; + long depth; + DWORD result; + CONTEXT ctx; + +#ifdef RPYTHON_LL2CTYPES + return 0; // not much we can do +#else +#ifndef RPY_TLOFS_thread_ident + return 0; // we can't freeze threads, unsafe +#else + hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, p->thread_ident); if (!hThread) { return -1; } result = SuspendThread(hThread); if(result == 0xffffffff) return -1; // possible, e.g. attached debugger or thread alread suspended - // find the correct thread - depth = read_trace_from_cpy_frame(tstate->frame, stack->stack, - MAX_STACK_DEPTH); + ctx.ContextFlags = CONTEXT_FULL; + if (!GetThreadContext(hThread, &ctx)) + return -1; + depth = get_stack_trace(p->vmprof_tl_stack, + stack->stack, MAX_STACK_DEPTH-2, ctx.Eip); stack->depth = depth; - stack->stack[depth++] = (void*)thread_id; + stack->stack[depth++] = (void*)p->thread_ident; stack->count = 1; stack->marker = MARKER_STACKTRACE; ResumeThread(hThread); return depth; +#endif +#endif } long __stdcall vmprof_mainloop(void *arg) { +#ifndef RPYTHON_LL2CTYPES + struct pypy_threadlocal_s *p; prof_stacktrace_s *stack = (prof_stacktrace_s*)malloc(SINGLE_BUF_SIZE); - HANDLE hThreadSnap = INVALID_HANDLE_VALUE; int depth; - PyThreadState *tstate; while (1) { - Sleep(profile_interval_usec * 1000); + //Sleep(profile_interval_usec * 1000); + Sleep(10); if (!enabled) { continue; } - tstate = PyInterpreterState_Head()->tstate_head; - while (tstate) { - depth = vmprof_snapshot_thread(tstate->thread_id, tstate, stack); - if (depth > 0) { - _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), - depth * sizeof(void *) + - sizeof(struct prof_stacktrace_s) - - offsetof(struct prof_stacktrace_s, marker)); + _RPython_ThreadLocals_Acquire(); + p = _RPython_ThreadLocals_Head(); // the first one is one behind head + p = _RPython_ThreadLocals_Enum(p); + while (p) { + if (p->ready == 42) { + depth = vmprof_snapshot_thread(p, stack); + if (depth > 0) { + _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), + depth * sizeof(void *) + + sizeof(struct prof_stacktrace_s) - + offsetof(struct prof_stacktrace_s, marker)); + } } - tstate = tstate->next; + p = _RPython_ThreadLocals_Enum(p); } + _RPython_ThreadLocals_Release(); } +#endif } RPY_EXTERN diff --git a/rpython/rlib/rvmprof/src/vmprof_stack.h b/rpython/rlib/rvmprof/src/vmprof_stack.h --- a/rpython/rlib/rvmprof/src/vmprof_stack.h +++ b/rpython/rlib/rvmprof/src/vmprof_stack.h @@ -1,7 +1,11 @@ #ifndef _VMPROF_STACK_H_ #define _VMPROF_STACK_H_ +#ifdef _WIN32 +#define intptr_t long // XXX windows VC++ 2008 lacks stdint.h +#else #include +#endif #define VMPROF_CODE_TAG 1 /* <- also in cintf.py */ #define VMPROF_BLACKHOLE_TAG 2 diff --git a/rpython/rlib/rvmprof/test/test_ztranslation.py b/rpython/rlib/rvmprof/test/test_ztranslation.py --- a/rpython/rlib/rvmprof/test/test_ztranslation.py +++ b/rpython/rlib/rvmprof/test/test_ztranslation.py @@ -3,11 +3,10 @@ sys.path += ['../../../..'] # for subprocess in test_interpreted import py from rpython.tool.udir import udir -from rpython.rlib import rvmprof +from rpython.rlib import rvmprof, rthread from rpython.translator.c.test.test_genc import compile from rpython.rlib.nonconst import NonConstant - class MyCode: def __init__(self, count): self.count = count @@ -39,6 +38,7 @@ PROF_FILE = str(udir.join('test_ztranslation.prof')) def main(argv=[]): + rthread.get_ident() # force TLOFS_thread_ident if NonConstant(False): # Hack to give os.open() the correct annotation os.open('foo', 1, 1) diff --git a/rpython/translator/c/src/threadlocal.c b/rpython/translator/c/src/threadlocal.c --- a/rpython/translator/c/src/threadlocal.c +++ b/rpython/translator/c/src/threadlocal.c @@ -85,6 +85,11 @@ return prev->next; } +struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(void) +{ + return &linkedlist_head; +} + static void _RPy_ThreadLocals_Init(void *p) { struct pypy_threadlocal_s *tls = (struct pypy_threadlocal_s *)p; diff --git a/rpython/translator/c/src/threadlocal.h b/rpython/translator/c/src/threadlocal.h --- a/rpython/translator/c/src/threadlocal.h +++ b/rpython/translator/c/src/threadlocal.h @@ -27,6 +27,9 @@ RPY_EXTERN struct pypy_threadlocal_s * _RPython_ThreadLocals_Enum(struct pypy_threadlocal_s *prev); +/* will return the head of the list */ +RPY_EXTERN struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(); + #define OP_THREADLOCALREF_ACQUIRE(r) _RPython_ThreadLocals_Acquire() #define OP_THREADLOCALREF_RELEASE(r) _RPython_ThreadLocals_Release() #define OP_THREADLOCALREF_ENUM(p, r) r = _RPython_ThreadLocals_Enum(p) From pypy.commits at gmail.com Sat Feb 13 10:15:57 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:15:57 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: test_tuple_resize passes again Message-ID: <56bf48ad.046f1c0a.665af.76d8@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82218:400bb271c619 Date: 2016-02-13 16:15 +0100 http://bitbucket.org/pypy/pypy/changeset/400bb271c619/ Log: test_tuple_resize passes again diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -828,6 +828,7 @@ modulename = py.path.local(eci.libraries[-1]) def dealloc_trigger(): + from pypy.module.cpyext.pyobject import _Py_Dealloc print 'dealloc_trigger...' while True: ob = rawrefcount.next_dead(PyObject) diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -317,7 +317,7 @@ @cpython_api([PyObject], lltype.Void) def Py_IncRef(space, obj): - incref(obj) + incref(space, obj) @cpython_api([PyObject], lltype.Void) def Py_DecRef(space, obj): @@ -332,11 +332,11 @@ @cpython_api([PyObject], lltype.Void) def _Py_Dealloc(space, obj): - from pypy.module.cpyext.api import generic_cpy_call_dont_decref + from pypy.module.cpyext.api import generic_cpy_call pto = obj.c_ob_type #print >>sys.stderr, "Calling dealloc slot", pto.c_tp_dealloc, "of", obj, \ # "'s type which is", rffi.charp2str(pto.c_tp_name) - generic_cpy_call_dont_decref(space, pto.c_tp_dealloc, obj) + generic_cpy_call(space, pto.c_tp_dealloc, obj) @cpython_api([rffi.VOIDP], lltype.Signed, error=CANNOT_FAIL) def _Py_HashPointer(space, ptr): From pypy.commits at gmail.com Sat Feb 13 10:28:07 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:28:07 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix this test: can't any more call PyTuple_SetItem() on already-realized tuples Message-ID: <56bf4b87.53ad1c0a.6df8c.76b6@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82219:4564af2cb7ed Date: 2016-02-13 16:27 +0100 http://bitbucket.org/pypy/pypy/changeset/4564af2cb7ed/ Log: fix this test: can't any more call PyTuple_SetItem() on already- realized tuples diff --git a/pypy/module/cpyext/test/test_tupleobject.py b/pypy/module/cpyext/test/test_tupleobject.py --- a/pypy/module/cpyext/test/test_tupleobject.py +++ b/pypy/module/cpyext/test/test_tupleobject.py @@ -44,16 +44,13 @@ lltype.free(ar, flavor='raw') def test_setitem(self, space, api): - atuple = space.newtuple([space.wrap(0), space.wrap("hello")]) - assert api.PyTuple_Size(atuple) == 2 - assert space.eq_w(space.getitem(atuple, space.wrap(0)), space.wrap(0)) - assert space.eq_w(space.getitem(atuple, space.wrap(1)), space.wrap("hello")) - w_obj = space.wrap(1) - api.Py_IncRef(w_obj) - api.PyTuple_SetItem(atuple, 1, w_obj) - assert api.PyTuple_Size(atuple) == 2 - assert space.eq_w(space.getitem(atuple, space.wrap(0)), space.wrap(0)) - assert space.eq_w(space.getitem(atuple, space.wrap(1)), space.wrap(1)) + py_tuple = api.PyTuple_New(2) + api.PyTuple_SetItem(py_tuple, 0, make_ref(space, space.wrap(42))) + api.PyTuple_SetItem(py_tuple, 1, make_ref(space, space.wrap(43))) + + w_tuple = from_ref(space, py_tuple) + assert space.eq_w(w_tuple, space.newtuple([space.wrap(42), + space.wrap(43)])) def test_getslice(self, space, api): w_tuple = space.newtuple([space.wrap(i) for i in range(10)]) From pypy.commits at gmail.com Sat Feb 13 10:33:44 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:33:44 -0800 (PST) Subject: [pypy-commit] cffi default: Update version Message-ID: <56bf4cd8.077bc20a.f4074.033a@mx.google.com> Author: Armin Rigo Branch: Changeset: r2634:5800738d1322 Date: 2016-02-13 16:32 +0100 http://bitbucket.org/cffi/cffi/changeset/5800738d1322/ Log: Update version diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c --- a/c/_cffi_backend.c +++ b/c/_cffi_backend.c @@ -2,7 +2,7 @@ #include #include "structmember.h" -#define CFFI_VERSION "1.5.1" +#define CFFI_VERSION "1.5.2" #ifdef MS_WIN32 #include diff --git a/c/test_c.py b/c/test_c.py --- a/c/test_c.py +++ b/c/test_c.py @@ -12,7 +12,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/cffi/__init__.py b/cffi/__init__.py --- a/cffi/__init__.py +++ b/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.1" -__version_info__ = (1, 5, 1) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/cffi/_embedding.h b/cffi/_embedding.h --- a/cffi/_embedding.h +++ b/cffi/_embedding.h @@ -233,7 +233,7 @@ f = PySys_GetObject((char *)"stderr"); if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.5.1" + "\ncompiled with cffi version: 1.5.2" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/doc/source/conf.py b/doc/source/conf.py --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -47,7 +47,7 @@ # The short X.Y version. version = '1.5' # The full version, including alpha/beta/rc tags. -release = '1.5.1' +release = '1.5.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/installation.rst b/doc/source/installation.rst --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -51,11 +51,11 @@ Download and Installation: -* http://pypi.python.org/packages/source/c/cffi/cffi-1.5.1.tar.gz +* http://pypi.python.org/packages/source/c/cffi/cffi-1.5.2.tar.gz - - MD5: ac9a3c7724bd7a4f3c0d4f6ffef2c6c7 + - MD5: ... - - SHA: 933f7ea6f48ebbaf4794a1a807ae6f5ec232c83b + - SHA: ... * Or grab the most current version from the `Bitbucket page`_: ``hg clone https://bitbucket.org/cffi/cffi`` diff --git a/doc/source/whatsnew.rst b/doc/source/whatsnew.rst --- a/doc/source/whatsnew.rst +++ b/doc/source/whatsnew.rst @@ -3,6 +3,12 @@ ====================== +v1.5.2 +====== + +* Fix 1.5.1 for Python 2.6. + + v1.5.1 ====== diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -144,7 +144,7 @@ `Mailing list `_ """, - version='1.5.1', + version='1.5.2', packages=['cffi'] if cpython else [], package_data={'cffi': ['_cffi_include.h', 'parse_c_type.h', '_embedding.h']} From pypy.commits at gmail.com Sat Feb 13 10:33:45 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:33:45 -0800 (PST) Subject: [pypy-commit] cffi release-1.5: hg merge default Message-ID: <56bf4cd9.8205c20a.fec93.0a3f@mx.google.com> Author: Armin Rigo Branch: release-1.5 Changeset: r2635:b52caed9d9cf Date: 2016-02-13 16:32 +0100 http://bitbucket.org/cffi/cffi/changeset/b52caed9d9cf/ Log: hg merge default diff --git a/c/_cffi_backend.c b/c/_cffi_backend.c --- a/c/_cffi_backend.c +++ b/c/_cffi_backend.c @@ -2,7 +2,7 @@ #include #include "structmember.h" -#define CFFI_VERSION "1.5.1" +#define CFFI_VERSION "1.5.2" #ifdef MS_WIN32 #include diff --git a/c/test_c.py b/c/test_c.py --- a/c/test_c.py +++ b/c/test_c.py @@ -12,7 +12,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/cffi/__init__.py b/cffi/__init__.py --- a/cffi/__init__.py +++ b/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.1" -__version_info__ = (1, 5, 1) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/cffi/_embedding.h b/cffi/_embedding.h --- a/cffi/_embedding.h +++ b/cffi/_embedding.h @@ -233,7 +233,7 @@ f = PySys_GetObject((char *)"stderr"); if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.5.1" + "\ncompiled with cffi version: 1.5.2" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/cffi/api.py b/cffi/api.py --- a/cffi/api.py +++ b/cffi/api.py @@ -1,4 +1,4 @@ -import sys, sysconfig, types +import sys, types from .lock import allocate_lock try: @@ -571,6 +571,10 @@ if hasattr(sys, 'gettotalrefcount'): template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" if sysconfig.get_config_var('DEBUG_EXT'): template += sysconfig.get_config_var('DEBUG_EXT') diff --git a/doc/source/conf.py b/doc/source/conf.py --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -47,7 +47,7 @@ # The short X.Y version. version = '1.5' # The full version, including alpha/beta/rc tags. -release = '1.5.1' +release = '1.5.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/source/installation.rst b/doc/source/installation.rst --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -51,11 +51,11 @@ Download and Installation: -* http://pypi.python.org/packages/source/c/cffi/cffi-1.5.1.tar.gz +* http://pypi.python.org/packages/source/c/cffi/cffi-1.5.2.tar.gz - - MD5: ac9a3c7724bd7a4f3c0d4f6ffef2c6c7 + - MD5: ... - - SHA: 933f7ea6f48ebbaf4794a1a807ae6f5ec232c83b + - SHA: ... * Or grab the most current version from the `Bitbucket page`_: ``hg clone https://bitbucket.org/cffi/cffi`` diff --git a/doc/source/whatsnew.rst b/doc/source/whatsnew.rst --- a/doc/source/whatsnew.rst +++ b/doc/source/whatsnew.rst @@ -3,6 +3,12 @@ ====================== +v1.5.2 +====== + +* Fix 1.5.1 for Python 2.6. + + v1.5.1 ====== diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -144,7 +144,7 @@ `Mailing list `_ """, - version='1.5.1', + version='1.5.2', packages=['cffi'] if cpython else [], package_data={'cffi': ['_cffi_include.h', 'parse_c_type.h', '_embedding.h']} diff --git a/testing/cffi0/test_zintegration.py b/testing/cffi0/test_zintegration.py --- a/testing/cffi0/test_zintegration.py +++ b/testing/cffi0/test_zintegration.py @@ -4,6 +4,9 @@ if sys.platform == 'win32': py.test.skip('snippets do not run on win32') +if sys.version_info < (2, 7): + py.test.skip('fails e.g. on a Debian/Ubuntu which patches virtualenv' + ' in a non-2.6-friendly way') def create_venv(name): tmpdir = udir.join(name) From pypy.commits at gmail.com Sat Feb 13 10:33:42 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:33:42 -0800 (PST) Subject: [pypy-commit] cffi default: Fix for Python 2.6 Message-ID: <56bf4cd6.a3f6c20a.6ba4d.0ce8@mx.google.com> Author: Armin Rigo Branch: Changeset: r2633:97b191f401bd Date: 2016-02-13 16:32 +0100 http://bitbucket.org/cffi/cffi/changeset/97b191f401bd/ Log: Fix for Python 2.6 diff --git a/cffi/api.py b/cffi/api.py --- a/cffi/api.py +++ b/cffi/api.py @@ -1,4 +1,4 @@ -import sys, sysconfig, types +import sys, types from .lock import allocate_lock try: @@ -571,6 +571,10 @@ if hasattr(sys, 'gettotalrefcount'): template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" if sysconfig.get_config_var('DEBUG_EXT'): template += sysconfig.get_config_var('DEBUG_EXT') diff --git a/testing/cffi0/test_zintegration.py b/testing/cffi0/test_zintegration.py --- a/testing/cffi0/test_zintegration.py +++ b/testing/cffi0/test_zintegration.py @@ -4,6 +4,9 @@ if sys.platform == 'win32': py.test.skip('snippets do not run on win32') +if sys.version_info < (2, 7): + py.test.skip('fails e.g. on a Debian/Ubuntu which patches virtualenv' + ' in a non-2.6-friendly way') def create_venv(name): tmpdir = udir.join(name) From pypy.commits at gmail.com Sat Feb 13 10:35:06 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:35:06 -0800 (PST) Subject: [pypy-commit] cffi release-1.5: md5/sha1 Message-ID: <56bf4d2a.07811c0a.89504.7910@mx.google.com> Author: Armin Rigo Branch: release-1.5 Changeset: r2636:815923bd8af3 Date: 2016-02-13 16:34 +0100 http://bitbucket.org/cffi/cffi/changeset/815923bd8af3/ Log: md5/sha1 diff --git a/doc/source/installation.rst b/doc/source/installation.rst --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -53,9 +53,9 @@ * http://pypi.python.org/packages/source/c/cffi/cffi-1.5.2.tar.gz - - MD5: ... + - MD5: fa766133f7299464c8bf857e0c966a82 - - SHA: ... + - SHA: 5239b3aa4f67eed3559c09778096ecd4faeca876 * Or grab the most current version from the `Bitbucket page`_: ``hg clone https://bitbucket.org/cffi/cffi`` From pypy.commits at gmail.com Sat Feb 13 10:35:08 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:35:08 -0800 (PST) Subject: [pypy-commit] cffi default: hg merge release-1.5 Message-ID: <56bf4d2c.45941c0a.770c3.7cd8@mx.google.com> Author: Armin Rigo Branch: Changeset: r2637:4640c7bfb316 Date: 2016-02-13 16:34 +0100 http://bitbucket.org/cffi/cffi/changeset/4640c7bfb316/ Log: hg merge release-1.5 diff --git a/doc/source/installation.rst b/doc/source/installation.rst --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -53,9 +53,9 @@ * http://pypi.python.org/packages/source/c/cffi/cffi-1.5.2.tar.gz - - MD5: ... + - MD5: fa766133f7299464c8bf857e0c966a82 - - SHA: ... + - SHA: 5239b3aa4f67eed3559c09778096ecd4faeca876 * Or grab the most current version from the `Bitbucket page`_: ``hg clone https://bitbucket.org/cffi/cffi`` From pypy.commits at gmail.com Sat Feb 13 10:43:05 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 07:43:05 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix test_pyerrors Message-ID: <56bf4f09.11301c0a.92238.7d32@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82220:f19a490ae47f Date: 2016-02-13 16:42 +0100 http://bitbucket.org/pypy/pypy/changeset/f19a490ae47f/ Log: fix test_pyerrors diff --git a/pypy/module/cpyext/pyerrors.py b/pypy/module/cpyext/pyerrors.py --- a/pypy/module/cpyext/pyerrors.py +++ b/pypy/module/cpyext/pyerrors.py @@ -28,12 +28,12 @@ """This is a shorthand for PyErr_SetObject(type, Py_None).""" PyErr_SetObject(space, w_type, space.w_None) - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyErr_Occurred(space): state = space.fromcache(State) if state.operror is None: return None - return borrow_from(None, state.operror.w_type) + return state.operror.w_type @cpython_api([], lltype.Void) def PyErr_Clear(space): From pypy.commits at gmail.com Sat Feb 13 11:22:18 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 08:22:18 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: next fix Message-ID: <56bf583a.ca56c20a.75c8d.104b@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82221:5eb12aee635c Date: 2016-02-13 16:43 +0100 http://bitbucket.org/pypy/pypy/changeset/5eb12aee635c/ Log: next fix diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -61,9 +61,9 @@ w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type)) w_obj = space.allocate_instance(self.W_BaseObject, w_type) track_reference(space, obj, w_obj) - if w_type is not space.gettypefor(self.W_BaseObject): - state = space.fromcache(RefcountState) - state.set_lifeline(w_obj, obj) + #if w_type is not space.gettypefor(self.W_BaseObject): + # state = space.fromcache(RefcountState) + # state.set_lifeline(w_obj, obj) return w_obj typedescr_cache = {} From pypy.commits at gmail.com Sat Feb 13 11:22:20 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 08:22:20 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix Message-ID: <56bf583c.25fac20a.7006c.129f@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82222:0656f5bc1381 Date: 2016-02-13 16:59 +0100 http://bitbucket.org/pypy/pypy/changeset/0656f5bc1381/ Log: fix diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -31,17 +31,16 @@ Py_GE = 5 -def check_num_args(space, ob, n): - from pypy.module.cpyext.tupleobject import PyTuple_CheckExact, \ - PyTuple_GET_SIZE - if not PyTuple_CheckExact(space, ob): +def check_num_args(space, w_ob, n): + from pypy.module.cpyext.tupleobject import PyTuple_CheckExact + if not PyTuple_CheckExact(space, w_ob): raise OperationError(space.w_SystemError, space.wrap("PyArg_UnpackTuple() argument list is not a tuple")) - if n == PyTuple_GET_SIZE(space, ob): + if n == space.len_w(w_ob): return raise oefmt(space.w_TypeError, "expected %d arguments, got %d", - n, PyTuple_GET_SIZE(space, ob)) + n, space.len_w(w_ob)) def wrap_init(space, w_self, w_args, func, w_kwargs): func_init = rffi.cast(initproc, func) From pypy.commits at gmail.com Sat Feb 13 11:22:22 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 08:22:22 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: kill test, borrowing as such no longer exists Message-ID: <56bf583e.d4e41c0a.29b09.ffff8393@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82223:ecb4ebcdb9e4 Date: 2016-02-13 16:59 +0100 http://bitbucket.org/pypy/pypy/changeset/ecb4ebcdb9e4/ Log: kill test, borrowing as such no longer exists diff --git a/pypy/module/cpyext/test/test_borrow.py b/pypy/module/cpyext/test/test_borrow.py --- a/pypy/module/cpyext/test/test_borrow.py +++ b/pypy/module/cpyext/test/test_borrow.py @@ -4,17 +4,6 @@ from pypy.module.cpyext.pyobject import make_ref, borrow_from, RefcountState -class TestBorrowing(BaseApiTest): - def test_borrowing(self, space, api): - w_int = space.wrap(1) - w_tuple = space.newtuple([w_int]) - api.Py_IncRef(w_tuple) - one_pyo = borrow_from(w_tuple, w_int).get_ref(space) - api.Py_DecRef(w_tuple) - state = space.fromcache(RefcountState) - state.print_refcounts() - py.test.raises(AssertionError, api.Py_DecRef, one_pyo) - class AppTestBorrow(AppTestCpythonExtensionBase): def test_tuple_borrowing(self): module = self.import_extension('foo', [ From pypy.commits at gmail.com Sat Feb 13 11:22:24 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 08:22:24 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: A direct test for the refcount management of tuples Message-ID: <56bf5840.cb371c0a.4fc4.ffff8979@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82224:973d9b955039 Date: 2016-02-13 17:08 +0100 http://bitbucket.org/pypy/pypy/changeset/973d9b955039/ Log: A direct test for the refcount management of tuples diff --git a/pypy/module/cpyext/test/test_tupleobject.py b/pypy/module/cpyext/test/test_tupleobject.py --- a/pypy/module/cpyext/test/test_tupleobject.py +++ b/pypy/module/cpyext/test/test_tupleobject.py @@ -4,6 +4,7 @@ from pypy.module.cpyext.pyobject import as_pyobj from pypy.module.cpyext.tupleobject import PyTupleObject from pypy.module.cpyext.test.test_api import BaseApiTest +from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase from rpython.rtyper.lltypesystem import rffi, lltype @@ -58,3 +59,45 @@ assert space.eq_w(w_slice, space.newtuple([space.wrap(i) for i in range(3, 7)])) + +class AppTestTuple(AppTestCpythonExtensionBase): + def test_refcounts(self): + module = self.import_extension('foo', [ + ("run", "METH_NOARGS", + """ + PyObject *item = PyTuple_New(0); + PyObject *t = PyTuple_New(1); + if (t->ob_refcnt != 1 || item->ob_refcnt != 1) { + PyErr_SetString(PyExc_SystemError, "bad initial refcnt"); + return NULL; + } + + PyTuple_SetItem(t, 0, item); + if (t->ob_refcnt != 1) { + PyErr_SetString(PyExc_SystemError, "SetItem: t refcnt != 1"); + return NULL; + } + if (item->ob_refcnt != 1) { + PyErr_SetString(PyExc_SystemError, "SetItem: item refcnt != 1"); + return NULL; + } + + if (PyTuple_GetItem(t, 0) != item || + PyTuple_GetItem(t, 0) != item) { + PyErr_SetString(PyExc_SystemError, "GetItem: bogus item"); + return NULL; + } + + if (t->ob_refcnt != 1) { + PyErr_SetString(PyExc_SystemError, "GetItem: t refcnt != 1"); + return NULL; + } + if (item->ob_refcnt != 1) { + PyErr_SetString(PyExc_SystemError, "GetItem: item refcnt != 1"); + return NULL; + } + return t; + """), + ]) + x = module.run() + assert x == ((),) From pypy.commits at gmail.com Sat Feb 13 11:22:26 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 08:22:26 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: App-level assertion details don't always work. In one case, I could Message-ID: <56bf5842.c1b3c20a.51d9f.19a3@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82225:77fd0fb746fe Date: 2016-02-13 17:21 +0100 http://bitbucket.org/pypy/pypy/changeset/77fd0fb746fe/ Log: App-level assertion details don't always work. In one case, I could restore support for them with this fix, caused probably by some long-ago refactoring diff --git a/pypy/tool/pytest/appsupport.py b/pypy/tool/pytest/appsupport.py --- a/pypy/tool/pytest/appsupport.py +++ b/pypy/tool/pytest/appsupport.py @@ -58,6 +58,9 @@ self.w_locals = space.getattr(pyframe, space.wrap('f_locals')) self.f_locals = self.w_locals # for py.test's recursion detection + def get_w_globals(self): + return self.w_globals + def eval(self, code, **vars): space = self.space for key, w_value in vars.items(): From pypy.commits at gmail.com Sat Feb 13 11:29:36 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 08:29:36 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Small fix to avoid being thrown into pdb when running test_complexobject Message-ID: <56bf59f0.890bc30a.7fea6.14f0@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82226:373f38b1e4ac Date: 2016-02-13 17:29 +0100 http://bitbucket.org/pypy/pypy/changeset/373f38b1e4ac/ Log: Small fix to avoid being thrown into pdb when running test_complexobject with "-s" (the test passes anyway) diff --git a/pypy/module/cpyext/complexobject.py b/pypy/module/cpyext/complexobject.py --- a/pypy/module/cpyext/complexobject.py +++ b/pypy/module/cpyext/complexobject.py @@ -43,7 +43,7 @@ # lltype does not handle functions returning a structure. This implements a # helper function, which takes as argument a reference to the return value. - at cpython_api([PyObject, Py_complex_ptr], lltype.Void) + at cpython_api([PyObject, Py_complex_ptr], rffi.INT_real, error=-1) def _PyComplex_AsCComplex(space, w_obj, result): """Return the Py_complex value of the complex number op. @@ -60,7 +60,7 @@ # if the above did not work, interpret obj as a float giving the # real part of the result, and fill in the imaginary part as 0. result.c_real = PyFloat_AsDouble(space, w_obj) # -1 on failure - return + return 0 if not PyComplex_Check(space, w_obj): raise OperationError(space.w_TypeError, space.wrap( @@ -69,3 +69,4 @@ assert isinstance(w_obj, W_ComplexObject) result.c_real = w_obj.realval result.c_imag = w_obj.imagval + return 0 diff --git a/pypy/module/cpyext/include/complexobject.h b/pypy/module/cpyext/include/complexobject.h --- a/pypy/module/cpyext/include/complexobject.h +++ b/pypy/module/cpyext/include/complexobject.h @@ -15,7 +15,7 @@ } Py_complex; /* generated function */ -PyAPI_FUNC(void) _PyComplex_AsCComplex(PyObject *, Py_complex *); +PyAPI_FUNC(int) _PyComplex_AsCComplex(PyObject *, Py_complex *); PyAPI_FUNC(PyObject *) _PyComplex_FromCComplex(Py_complex *); Py_LOCAL_INLINE(Py_complex) PyComplex_AsCComplex(PyObject *obj) From pypy.commits at gmail.com Sat Feb 13 11:32:30 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 08:32:30 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Avoid "if obj:" where obj is potentially a W_Root. Notably, if it is a Message-ID: <56bf5a9e.01adc20a.cbec7.1754@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82227:9ead1cf8ed7e Date: 2016-02-13 17:31 +0100 http://bitbucket.org/pypy/pypy/changeset/9ead1cf8ed7e/ Log: Avoid "if obj:" where obj is potentially a W_Root. Notably, if it is a W_BoolObject then this complains. diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -260,18 +260,16 @@ """Increment the reference counter of the PyObject and return it. Can be called with either a PyObject or a W_Root. """ - if obj: - if is_pyobj(obj): - pyobj = rffi.cast(PyObject, obj) - else: - pyobj = as_pyobj(space, obj) + if is_pyobj(obj): + pyobj = rffi.cast(PyObject, obj) + else: + pyobj = as_pyobj(space, obj) + if pyobj: assert pyobj.c_ob_refcnt > 0 pyobj.c_ob_refcnt += 1 if not is_pyobj(obj): keepalive_until_here(obj) - return pyobj - else: - return lltype.nullptr(PyObject.TO) + return pyobj INTERPLEVEL_API['make_ref'] = make_ref From pypy.commits at gmail.com Sat Feb 13 11:38:22 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 13 Feb 2016 08:38:22 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix with comment Message-ID: <56bf5bfe.cf0b1c0a.8b231.ffff8f46@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82228:72c05e2cc492 Date: 2016-02-13 17:37 +0100 http://bitbucket.org/pypy/pypy/changeset/72c05e2cc492/ Log: fix with comment diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -14,13 +14,16 @@ PyDict_Check, PyDict_CheckExact = build_type_checkers("Dict") - at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL) + at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL, result_borrowed=True) def PyDict_GetItem(space, w_dict, w_key): try: w_res = space.getitem(w_dict, w_key) except: return None - return borrow_from(w_dict, w_res) + # NOTE: this works so far because all our dict strategies store + # *values* as full objects, which stay alive as long as the dict is + # alive and not modified. + return w_res @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) def PyDict_SetItem(space, w_dict, w_key, w_obj): From pypy.commits at gmail.com Sun Feb 14 04:44:32 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 01:44:32 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix weakrefobject Message-ID: <56c04c80.e7bec20a.39b65.0af2@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82229:cba976d16588 Date: 2016-02-14 10:43 +0100 http://bitbucket.org/pypy/pypy/changeset/cba976d16588/ Log: fix weakrefobject diff --git a/pypy/module/cpyext/weakrefobject.py b/pypy/module/cpyext/weakrefobject.py --- a/pypy/module/cpyext/weakrefobject.py +++ b/pypy/module/cpyext/weakrefobject.py @@ -37,17 +37,19 @@ """ return PyWeakref_GET_OBJECT(space, w_ref) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyWeakref_GET_OBJECT(space, w_ref): """Similar to PyWeakref_GetObject(), but implemented as a macro that does no error checking. """ - return borrow_from(w_ref, space.call_function(w_ref)) + return space.call_function(w_ref) @cpython_api([PyObject], PyObject) def PyWeakref_LockObject(space, w_ref): """Return the referenced object from a weak reference. If the referent is no longer live, returns None. This function returns a new reference. + + (A PyPy extension that may not be useful any more: use + PyWeakref_GetObject() and Py_INCREF().) """ return space.call_function(w_ref) - From pypy.commits at gmail.com Sun Feb 14 05:08:33 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 02:08:33 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Compile with "-g -O0" the examples Message-ID: <56c05221.c711c30a.e9bd9.13f0@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82230:c55583423936 Date: 2016-02-14 11:07 +0100 http://bitbucket.org/pypy/pypy/changeset/c55583423936/ Log: Compile with "-g -O0" the examples diff --git a/pypy/module/cpyext/test/test_cpyext.py b/pypy/module/cpyext/test/test_cpyext.py --- a/pypy/module/cpyext/test/test_cpyext.py +++ b/pypy/module/cpyext/test/test_cpyext.py @@ -73,7 +73,9 @@ else: kwds["link_files"] = [str(api_library + '.so')] if sys.platform.startswith('linux'): - kwds["compile_extra"]=["-Werror=implicit-function-declaration"] + kwds["compile_extra"]=["-Werror=implicit-function-declaration", + "-g", "-O0"] + kwds["link_extra"]=["-g"] modname = modname.split('.')[-1] eci = ExternalCompilationInfo( From pypy.commits at gmail.com Sun Feb 14 05:08:35 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 02:08:35 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix Message-ID: <56c05223.463f1c0a.58fd3.ffff8203@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82231:29943ebc709d Date: 2016-02-14 11:07 +0100 http://bitbucket.org/pypy/pypy/changeset/29943ebc709d/ Log: fix diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -160,11 +160,11 @@ """ #state = space.fromcache(RefcountState) w_type = space.type(w_obj) - if w_type.is_cpytype(): - ZZZ # py_obj = state.get_from_lifeline(w_obj) - if py_obj: - Py_IncRef(space, py_obj) - return py_obj + #if w_type.is_cpytype(): + # py_obj = state.get_from_lifeline(w_obj) + # if py_obj: + # Py_IncRef(space, py_obj) + # return py_obj typedescr = get_typedescr(w_obj.typedef) py_obj = typedescr.allocate(space, w_type, itemcount=itemcount) From pypy.commits at gmail.com Sun Feb 14 05:27:47 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 02:27:47 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Return a borrowed result from PyList_GetItem by simply switching to the object strategy Message-ID: <56c056a3.0c2d1c0a.3495f.ffff8ad8@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82232:e42c71fc80cc Date: 2016-02-14 11:27 +0100 http://bitbucket.org/pypy/pypy/changeset/e42c71fc80cc/ Log: Return a borrowed result from PyList_GetItem by simply switching to the object strategy diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py --- a/pypy/module/cpyext/listobject.py +++ b/pypy/module/cpyext/listobject.py @@ -38,7 +38,7 @@ w_list.setitem(index, w_item) return 0 - at cpython_api([PyObject, Py_ssize_t], PyObject) + at cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True) def PyList_GetItem(space, w_list, index): """Return the object at position pos in the list pointed to by p. The position must be positive, indexing from the end of the list is not @@ -49,8 +49,9 @@ if index < 0 or index >= w_list.length(): raise OperationError(space.w_IndexError, space.wrap( "list index out of range")) - w_item = w_list.getitem(index) - return borrow_from(w_list, w_item) + w_list.switch_to_object_strategy() # make sure we can return a borrowed obj + # XXX ^^^ how does this interact with CPyListStrategy? + return w_list.getitem(index) @cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) From pypy.commits at gmail.com Sun Feb 14 05:30:06 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 02:30:06 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix for test_sequence Message-ID: <56c0572e.02931c0a.5f6f8.ffff896e@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82233:103bd85674e7 Date: 2016-02-14 11:29 +0100 http://bitbucket.org/pypy/pypy/changeset/103bd85674e7/ Log: fix for test_sequence diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py --- a/pypy/module/cpyext/sequence.py +++ b/pypy/module/cpyext/sequence.py @@ -2,7 +2,7 @@ from pypy.interpreter.error import OperationError, oefmt from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, Py_ssize_t) -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from rpython.rtyper.lltypesystem import rffi, lltype from pypy.objspace.std import listobject, tupleobject @@ -42,15 +42,19 @@ which case o is returned. Use PySequence_Fast_GET_ITEM() to access the members of the result. Returns NULL on failure. If the object is not a sequence, raises TypeError with m as the message text.""" - if (isinstance(w_obj, listobject.W_ListObject) or - isinstance(w_obj, tupleobject.W_TupleObject)): + if isinstance(w_obj, listobject.W_ListObject): + # make sure we can return a borrowed obj from PySequence_Fast_GET_ITEM + # XXX how does this interact with CPyListStrategy? + w_obj.switch_to_object_strategy() + return w_obj + if isinstance(w_obj, tupleobject.W_TupleObject): return w_obj try: return tupleobject.W_TupleObject(space.fixedview(w_obj)) except OperationError: raise OperationError(space.w_TypeError, space.wrap(rffi.charp2str(m))) - at cpython_api([PyObject, Py_ssize_t], PyObject) + at cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True) def PySequence_Fast_GET_ITEM(space, w_obj, index): """Return the ith element of o, assuming that o was returned by PySequence_Fast(), o is not NULL, and that i is within bounds. @@ -60,7 +64,7 @@ else: assert isinstance(w_obj, tupleobject.W_TupleObject) w_res = w_obj.wrappeditems[index] - return borrow_from(w_obj, w_res) + return w_res @cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL) def PySequence_Fast_GET_SIZE(space, w_obj): From pypy.commits at gmail.com Sun Feb 14 05:42:46 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 02:42:46 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: port some dict logic from cpyext-gc-support Message-ID: <56c05a26.0772c20a.b088e.1b8c@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82234:439d7f2b1660 Date: 2016-02-14 11:40 +0100 http://bitbucket.org/pypy/pypy/changeset/439d7f2b1660/ Log: port some dict logic from cpyext-gc-support diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -2,7 +2,7 @@ from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, build_type_checkers, Py_ssize_t, Py_ssize_tP, CONST_STRING) -from pypy.module.cpyext.pyobject import PyObject, PyObjectP, borrow_from +from pypy.module.cpyext.pyobject import PyObject, PyObjectP, as_pyobj from pypy.module.cpyext.pyobject import RefcountState from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.interpreter.error import OperationError @@ -50,7 +50,8 @@ else: PyErr_BadInternalCall(space) - at cpython_api([PyObject, CONST_STRING], PyObject, error=CANNOT_FAIL) + at cpython_api([PyObject, CONST_STRING], PyObject, error=CANNOT_FAIL, + result_borrowed=True) def PyDict_GetItemString(space, w_dict, key): """This is the same as PyDict_GetItem(), but key is specified as a char*, rather than a PyObject*.""" @@ -58,9 +59,10 @@ w_res = space.finditem_str(w_dict, rffi.charp2str(key)) except: w_res = None - if w_res is None: - return None - return borrow_from(w_dict, w_res) + # NOTE: this works so far because all our dict strategies store + # *values* as full objects, which stay alive as long as the dict is + # alive and not modified. + return w_res @cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=-1) def PyDict_DelItemString(space, w_dict, key_ptr): @@ -173,10 +175,13 @@ if w_dict is None: return 0 - # Note: this is not efficient. Storing an iterator would probably + # XXX XXX PyDict_Next is not efficient. Storing an iterator would probably # work, but we can't work out how to not leak it if iteration does - # not complete. + # not complete. Alternatively, we could add some RPython-only + # dict-iterator method to move forward by N steps. + w_dict.ensure_object_strategy() # make sure both keys and values can + # be borrwed try: w_iter = space.call_method(space.w_dict, "iteritems", w_dict) pos = ppos[0] @@ -186,11 +191,10 @@ w_item = space.call_method(w_iter, "next") w_key, w_value = space.fixedview(w_item, 2) - state = space.fromcache(RefcountState) if pkey: - pkey[0] = state.make_borrowed(w_dict, w_key) + pkey[0] = as_pyobj(space, w_key) if pvalue: - pvalue[0] = state.make_borrowed(w_dict, w_value) + pvalue[0] = as_pyobj(space, w_value) ppos[0] += 1 except OperationError, e: if not e.match(space, space.w_StopIteration): diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -350,6 +350,12 @@ F: D[k] = F[k]""" init_or_update(space, self, __args__, 'dict.update') + def ensure_object_strategy(self): # for cpyext + object_strategy = self.space.fromcache(ObjectDictStrategy) + strategy = self.get_strategy() + if strategy is not object_strategy: + strategy.switch_to_object_strategy(self) + class W_DictObject(W_DictMultiObject): """ a regular dict object """ From pypy.commits at gmail.com Sun Feb 14 05:42:53 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 02:42:53 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: oops, W_ListObject.switch_to_object_strategy() will always make Message-ID: <56c05a2d.cf821c0a.509f2.ffff9745@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82235:52cda7675796 Date: 2016-02-14 11:42 +0100 http://bitbucket.org/pypy/pypy/changeset/52cda7675796/ Log: oops, W_ListObject.switch_to_object_strategy() will always make an internal copy, even if the strategy is already 'object'. diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py --- a/pypy/module/cpyext/listobject.py +++ b/pypy/module/cpyext/listobject.py @@ -49,7 +49,7 @@ if index < 0 or index >= w_list.length(): raise OperationError(space.w_IndexError, space.wrap( "list index out of range")) - w_list.switch_to_object_strategy() # make sure we can return a borrowed obj + w_list.ensure_object_strategy() # make sure we can return a borrowed obj # XXX ^^^ how does this interact with CPyListStrategy? return w_list.getitem(index) diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py --- a/pypy/module/cpyext/sequence.py +++ b/pypy/module/cpyext/sequence.py @@ -45,7 +45,7 @@ if isinstance(w_obj, listobject.W_ListObject): # make sure we can return a borrowed obj from PySequence_Fast_GET_ITEM # XXX how does this interact with CPyListStrategy? - w_obj.switch_to_object_strategy() + w_obj.ensure_object_strategy() return w_obj if isinstance(w_obj, tupleobject.W_TupleObject): return w_obj diff --git a/pypy/objspace/std/listobject.py b/pypy/objspace/std/listobject.py --- a/pypy/objspace/std/listobject.py +++ b/pypy/objspace/std/listobject.py @@ -222,6 +222,10 @@ self.strategy = object_strategy object_strategy.init_from_list_w(self, list_w) + def ensure_object_strategy(self): # for cpyext + if self.strategy is not self.space.fromcache(ObjectListStrategy): + self.switch_to_object_strategy() + def _temporarily_as_objects(self): if self.strategy is self.space.fromcache(ObjectListStrategy): return self From pypy.commits at gmail.com Sun Feb 14 05:48:50 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 02:48:50 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: next fix Message-ID: <56c05b92.8205c20a.fec93.2175@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82236:e1849ada0ba5 Date: 2016-02-14 11:48 +0100 http://bitbucket.org/pypy/pypy/changeset/e1849ada0ba5/ Log: next fix diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -21,7 +21,7 @@ from pypy.module.cpyext.modsupport import convert_method_defs from pypy.module.cpyext.pyobject import ( PyObject, make_ref, create_ref, from_ref, get_typedescr, make_typedescr, - track_reference, RefcountState, borrow_from, Py_DecRef, as_pyobj) + track_reference, RefcountState, Py_DecRef, as_pyobj) from pypy.module.cpyext.slotdefs import ( slotdefs_for_tp_slots, slotdefs_for_wrappers, get_slot_tp_function) from pypy.module.cpyext.state import State @@ -633,7 +633,8 @@ return generic_cpy_call( space, type.c_tp_alloc, type, 0) - at cpython_api([PyTypeObjectPtr, PyObject], PyObject, error=CANNOT_FAIL) + at cpython_api([PyTypeObjectPtr, PyObject], PyObject, error=CANNOT_FAIL, + result_borrowed=True) def _PyType_Lookup(space, type, w_name): """Internal API to look for a name through the MRO. This returns a borrowed reference, and doesn't set an exception!""" @@ -644,7 +645,9 @@ return None name = space.str_w(w_name) w_obj = w_type.lookup(name) - return borrow_from(w_type, w_obj) + # this assumes that w_obj is not dynamically created, but will stay alive + # until w_type is modified or dies + return w_obj @cpython_api([PyTypeObjectPtr], lltype.Void) def PyType_Modified(space, w_obj): From pypy.commits at gmail.com Sun Feb 14 06:15:22 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 03:15:22 -0800 (PST) Subject: [pypy-commit] pypy default: update to cffi 1.5.2 Message-ID: <56c061ca.8378c20a.fcb23.223e@mx.google.com> Author: Armin Rigo Branch: Changeset: r82237:c8eb59d57470 Date: 2016-02-14 12:12 +0100 http://bitbucket.org/pypy/pypy/changeset/c8eb59d57470/ Log: update to cffi 1.5.2 diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.1 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.1" -__version_info__ = (1, 5, 1) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h --- a/lib_pypy/cffi/_embedding.h +++ b/lib_pypy/cffi/_embedding.h @@ -233,7 +233,7 @@ f = PySys_GetObject((char *)"stderr"); if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.5.1" + "\ncompiled with cffi version: 1.5.2" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, sysconfig, types +import sys, types from .lock import allocate_lock try: @@ -550,16 +550,31 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: - if hasattr(sys, 'prefix'): - import os - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - pythonlib = "pypy-c" + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Right now, distributions of + # pypy contain it as 'include/python27.lib'. You need + # to manually copy it back to 'libpypy-c.lib'. XXX Will + # be fixed in the next pypy release. + pythonlib = "libpypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', sys.prefix) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) else: if sys.platform == "win32": template = "python%d%d" if hasattr(sys, 'gettotalrefcount'): template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" if sysconfig.get_config_var('DEBUG_EXT'): template += sysconfig.get_config_var('DEBUG_EXT') diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py --- a/lib_pypy/cffi/vengine_cpy.py +++ b/lib_pypy/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py --- a/lib_pypy/cffi/vengine_gen.py +++ b/lib_pypy/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py --- a/lib_pypy/cffi/verifier.py +++ b/lib_pypy/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.1" +VERSION = "1.5.2" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py @@ -5,6 +5,9 @@ if sys.platform == 'win32': py.test.skip('snippets do not run on win32') +if sys.version_info < (2, 7): + py.test.skip('fails e.g. on a Debian/Ubuntu which patches virtualenv' + ' in a non-2.6-friendly way') def create_venv(name): tmpdir = udir.join(name) diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py @@ -101,6 +101,7 @@ c = distutils.ccompiler.new_compiler() print('compiling %s with %r' % (name, modules)) extra_preargs = [] + debug = True if sys.platform == 'win32': libfiles = [] for m in modules: @@ -109,9 +110,12 @@ libfiles.append('Release\\%s.lib' % m[:-4]) modules = libfiles extra_preargs.append('/MANIFEST') + debug = False # you need to install extra stuff + # for this to work elif threads: extra_preargs.append('-pthread') - objects = c.compile([filename], macros=sorted(defines.items()), debug=True) + objects = c.compile([filename], macros=sorted(defines.items()), + debug=debug) c.link_executable(objects + modules, name, extra_preargs=extra_preargs) finally: os.chdir(curdir) @@ -119,12 +123,18 @@ def execute(self, name): path = self.get_path() env_extra = {'PYTHONPATH': prefix_pythonpath()} - libpath = os.environ.get('LD_LIBRARY_PATH') - if libpath: - libpath = path + ':' + libpath + if sys.platform == 'win32': + _path = os.environ.get('PATH') + # for libpypy-c.dll or Python27.dll + _path = os.path.split(sys.executable)[0] + ';' + _path + env_extra['PATH'] = _path else: - libpath = path - env_extra['LD_LIBRARY_PATH'] = libpath + libpath = os.environ.get('LD_LIBRARY_PATH') + if libpath: + libpath = path + ':' + libpath + else: + libpath = path + env_extra['LD_LIBRARY_PATH'] = libpath print('running %r in %r' % (name, path)) executable_name = name if sys.platform == 'win32': From pypy.commits at gmail.com Sun Feb 14 06:18:06 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 03:18:06 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix intobject.py Message-ID: <56c0626e.c13fc20a.8e7a6.252e@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82238:8a49781b1d09 Date: 2016-02-14 12:17 +0100 http://bitbucket.org/pypy/pypy/changeset/8a49781b1d09/ Log: fix intobject.py diff --git a/pypy/module/cpyext/intobject.py b/pypy/module/cpyext/intobject.py --- a/pypy/module/cpyext/intobject.py +++ b/pypy/module/cpyext/intobject.py @@ -38,8 +38,8 @@ w_obj = space.allocate_instance(W_IntObject, w_type) w_obj.__init__(intval) track_reference(space, obj, w_obj) - state = space.fromcache(RefcountState) - state.set_lifeline(w_obj, obj) + #state = space.fromcache(RefcountState) + #state.set_lifeline(w_obj, obj) return w_obj PyInt_Check, PyInt_CheckExact = build_type_checkers("Int") From pypy.commits at gmail.com Sun Feb 14 06:18:50 2016 From: pypy.commits at gmail.com (fijal) Date: Sun, 14 Feb 2016 03:18:50 -0800 (PST) Subject: [pypy-commit] pypy default: expand this point Message-ID: <56c0629a.c2351c0a.708ae.ffff964b@mx.google.com> Author: fijal Branch: Changeset: r82239:63df3933561b Date: 2016-02-14 12:18 +0100 http://bitbucket.org/pypy/pypy/changeset/63df3933561b/ Log: expand this point diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -41,7 +41,8 @@ .. branch: compress-numbering -Improve the memory signature of numbering instances in the JIT. +Improve the memory signature of numbering instances in the JIT. This should massively +decrease the amount of memory consumed by the JIT, which is significant for most programs. .. branch: fix-trace-too-long-heuristic From pypy.commits at gmail.com Sun Feb 14 06:43:33 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 14 Feb 2016 03:43:33 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Kill PyPy_Borrow() Message-ID: <56c06865.45611c0a.c357.ffff9cf7@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82240:9c732bb2412b Date: 2016-02-14 12:42 +0100 http://bitbucket.org/pypy/pypy/changeset/9c732bb2412b/ Log: Kill PyPy_Borrow() diff --git a/pypy/module/cpyext/__init__.py b/pypy/module/cpyext/__init__.py --- a/pypy/module/cpyext/__init__.py +++ b/pypy/module/cpyext/__init__.py @@ -61,7 +61,6 @@ import pypy.module.cpyext.funcobject import pypy.module.cpyext.frameobject import pypy.module.cpyext.classobject -import pypy.module.cpyext.pypyintf import pypy.module.cpyext.memoryobject import pypy.module.cpyext.codecs import pypy.module.cpyext.pyfile diff --git a/pypy/module/cpyext/include/patchlevel.h b/pypy/module/cpyext/include/patchlevel.h --- a/pypy/module/cpyext/include/patchlevel.h +++ b/pypy/module/cpyext/include/patchlevel.h @@ -31,6 +31,12 @@ /* PyPy version as a string */ #define PYPY_VERSION "4.1.0-alpha0" +/* Defined to mean a PyPy where cpyext holds more regular references + to PyObjects, e.g. staying alive as long as the internal PyPy object + stays alive. */ +#define PYPY_CPYEXT_GC 1 +#define PyPy_Borrow(a, b) ((void) 0) + /* Subversion Revision number of this file (not of the repository). * Empty since Mercurial migration. */ #define PY_PATCHLEVEL_REVISION "" diff --git a/pypy/module/cpyext/pypyintf.py b/pypy/module/cpyext/pypyintf.py deleted file mode 100644 --- a/pypy/module/cpyext/pypyintf.py +++ /dev/null @@ -1,9 +0,0 @@ -from pypy.module.cpyext.api import cpython_api -from pypy.module.cpyext.pyobject import PyObject, borrow_from - - - at cpython_api([PyObject, PyObject], PyObject) -def PyPy_Borrow(space, w_parentobj, w_obj): - """Returns a borrowed reference to 'obj', borrowing from the 'parentobj'. - """ - return borrow_from(w_parentobj, w_obj) diff --git a/pypy/module/cpyext/src/getargs.c b/pypy/module/cpyext/src/getargs.c --- a/pypy/module/cpyext/src/getargs.c +++ b/pypy/module/cpyext/src/getargs.c @@ -442,7 +442,7 @@ strncpy(msgbuf, "is not retrievable", bufsize); return msgbuf; } - PyPy_Borrow(arg, item); + //PyPy_Borrow(arg, item); msg = convertitem(item, &format, p_va, flags, levels+1, msgbuf, bufsize, freelist); /* PySequence_GetItem calls tp->sq_item, which INCREFs */ From pypy.commits at gmail.com Sun Feb 14 11:23:27 2016 From: pypy.commits at gmail.com (alex_gaynor) Date: Sun, 14 Feb 2016 08:23:27 -0800 (PST) Subject: [pypy-commit] pypy default: Merged in HawkOwl/pypy (pull request #400) Message-ID: <56c0a9ff.077bc20a.f4074.7ab0@mx.google.com> Author: Alex Gaynor Branch: Changeset: r82242:f5bb0b58e857 Date: 2016-02-14 11:22 -0500 http://bitbucket.org/pypy/pypy/changeset/f5bb0b58e857/ Log: Merged in HawkOwl/pypy (pull request #400) Check for the existence of CODESET, not that it's a truthy value. diff --git a/pypy/module/sys/interp_encoding.py b/pypy/module/sys/interp_encoding.py --- a/pypy/module/sys/interp_encoding.py +++ b/pypy/module/sys/interp_encoding.py @@ -42,7 +42,7 @@ def _getfilesystemencoding(space): encoding = base_encoding - if rlocale.HAVE_LANGINFO and rlocale.CODESET: + if rlocale.HAVE_LANGINFO and hasattr(rlocale, "CODESET"): try: oldlocale = rlocale.setlocale(rlocale.LC_CTYPE, None) rlocale.setlocale(rlocale.LC_CTYPE, "") From pypy.commits at gmail.com Sun Feb 14 11:23:41 2016 From: pypy.commits at gmail.com (HawkOwl) Date: Sun, 14 Feb 2016 08:23:41 -0800 (PST) Subject: [pypy-commit] pypy default: Check for the existence of CODESET, not that it's a truthy value. CODESET is set from langinfo.h -- the value on Linux is 14, but the value on BSD is 0, so this should have prevented PyPy from ever working on FreeBSD. Message-ID: <56c0aa0d.96941c0a.178ff.ffffe952@mx.google.com> Author: Amber Brown Branch: Changeset: r82241:261c7622b920 Date: 2016-02-15 00:20 +0800 http://bitbucket.org/pypy/pypy/changeset/261c7622b920/ Log: Check for the existence of CODESET, not that it's a truthy value. CODESET is set from langinfo.h -- the value on Linux is 14, but the value on BSD is 0, so this should have prevented PyPy from ever working on FreeBSD. diff --git a/pypy/module/sys/interp_encoding.py b/pypy/module/sys/interp_encoding.py --- a/pypy/module/sys/interp_encoding.py +++ b/pypy/module/sys/interp_encoding.py @@ -42,7 +42,7 @@ def _getfilesystemencoding(space): encoding = base_encoding - if rlocale.HAVE_LANGINFO and rlocale.CODESET: + if rlocale.HAVE_LANGINFO and hasattr(rlocale, "CODESET"): try: oldlocale = rlocale.setlocale(rlocale.LC_CTYPE, None) rlocale.setlocale(rlocale.LC_CTYPE, "") From pypy.commits at gmail.com Sun Feb 14 13:16:45 2016 From: pypy.commits at gmail.com (fijal) Date: Sun, 14 Feb 2016 10:16:45 -0800 (PST) Subject: [pypy-commit] pypy jit-leaner-frontend: start working on a much simplified encoder Message-ID: <56c0c48d.654fc20a.a67a1.ffff89e9@mx.google.com> Author: fijal Branch: jit-leaner-frontend Changeset: r82243:1ce0ba36d602 Date: 2016-02-14 15:34 +0100 http://bitbucket.org/pypy/pypy/changeset/1ce0ba36d602/ Log: start working on a much simplified encoder diff --git a/rpython/jit/metainterp/opencoder.py b/rpython/jit/metainterp/opencoder.py --- a/rpython/jit/metainterp/opencoder.py +++ b/rpython/jit/metainterp/opencoder.py @@ -2,10 +2,10 @@ from rpython.jit.metainterp.history import ConstInt, Const, AbstractDescr,\ AbstractValue from rpython.jit.metainterp.resoperation import AbstractResOp, AbstractInputArg,\ - ResOperation, oparity, opname, rop + ResOperation, oparity, opname, rop, ResOperation, opwithdescr from rpython.rlib.rarithmetic import intmask -TAGINT, TAGCONST, TAGBOX, TAGOUTPUT = range(4) +TAGINT, TAGCONST, TAGBOX = range(3) TAGMASK = 0x3 TAGSHIFT = 2 MAXINT = 65536 @@ -13,8 +13,18 @@ class TraceIterator(object): def __init__(self, trace, end): self.trace = trace - self.pos = trace._start + self.inpargs = trace._inpargs + self.pos = 0 + self._count = 0 self.end = end + self._cache = [None] * trace._count + + def _get(self, i): + if i < 0: + return self.inpargs[-i-1] + res = self._cache[i] + assert res is not None + return res def done(self): return self.pos >= self.end @@ -24,69 +34,46 @@ self.pos += 1 return res + def _untag(self, tagged): + tag, v = untag(tagged) + if tag == TAGBOX: + return self._get(v) + elif tag == TAGINT: + return ConstInt(v) + else: + yyyy + def next(self): pos = self.pos opnum = self._next() - self._next() # forwarding if oparity[opnum] == -1: argnum = self._next() else: argnum = oparity[opnum] args = [] for i in range(argnum): - args.append(self._next()) - return RecordedOp(pos, opnum, args) - -class RecordedOp(AbstractValue): - def __init__(self, pos, opnum, args, descr=None): - self.opnum = opnum - self.args = args - self._pos = pos - self.descr = descr - - def get_tag(self): - return tag(TAGBOX, self._pos) - - def getarglist(self): - return self.args - - def getdescr(self): - return self.descr - - def numargs(self): - return len(self.args) - - def getopnum(self): - return self.opnum - - def getarg(self, i): - return self.args[i] - - def getopname(self): - try: - return opname[self.getopnum()].lower() - except KeyError: - return '<%d>' % self.getopnum() - - def __hash__(self): - raise NotImplementedError - + args.append(self._untag(self._next())) + if opwithdescr[opnum]: + xxx + else: + descr = None + res = ResOperation(opnum, args, -1, descr=descr) + self._cache[self._count] = res + self._count += 1 + return res class Trace(object): def __init__(self, inputargs): - self._ops = [0] * (2 * len(inputargs)) # place for forwarding inputargs - # plus infos + self._ops = [] for i, inparg in enumerate(inputargs): - self._ops[i * 2 + i] = i - inparg.position = i * 2 - self._start = len(inputargs) * 2 - self._count = len(inputargs) + inparg.position = -i - 1 + self._count = 0 + self._inpargs = inputargs def _record_op(self, opnum, argboxes, descr=None): operations = self._ops pos = len(operations) operations.append(opnum) - operations.append(self._count) # here we keep the index into infos if oparity[opnum] == -1: operations.append(len(argboxes)) operations.extend([encode(box) for box in argboxes]) @@ -99,7 +86,6 @@ operations = self._ops pos = len(operations) operations.append(opnum) - operations.append(self._count) # here we keep the index into infos if oparity[opnum] == -1: operations.append(len(tagged_args)) operations.extend(tagged_args) @@ -120,20 +106,18 @@ def record_op_tag(self, opnum, tagged_args, descr=None): return tag(TAGBOX, self._record_raw(opnum, tagged_args, descr)) - def record_op_output_tag(self, opnum, tagged_args, descr=None): - return tag(TAGOUTPUT, self._record_raw(opnum, tagged_args, descr)) - - def get_info(self, infos, pos): - index = self._ops[pos + 1] - return infos[index] - - def set_info(self, infos, pos, info): - index = self._ops[pos + 1] - infos[index] = info - def get_iter(self): return TraceIterator(self, len(self._ops)) + def _get_operations(self): + """ NOT_RPYTHON + """ + l = [] + i = self.get_iter() + while not i.done(): + l.append(i.next()) + return l + def tag(kind, pos): return (pos << TAGSHIFT) | kind diff --git a/rpython/jit/metainterp/resoperation.py b/rpython/jit/metainterp/resoperation.py --- a/rpython/jit/metainterp/resoperation.py +++ b/rpython/jit/metainterp/resoperation.py @@ -1448,13 +1448,6 @@ opnum == rop.CALL_N) @staticmethod - def is_call_assembler(opnum): - return (opnum == rop.CALL_ASSEMBLER_I or - opnum == rop.CALL_ASSEMBLER_R or - opnum == rop.CALL_ASSEMBLER_F or - opnum == rop.CALL_ASSEMBLER_N) - - @staticmethod def is_call_loopinvariant(opnum): return (opnum == rop.CALL_LOOPINVARIANT_I or opnum == rop.CALL_LOOPINVARIANT_R or @@ -1462,20 +1455,6 @@ opnum == rop.CALL_LOOPINVARIANT_N) @staticmethod - def is_call_may_force(opnum): - return (opnum == rop.CALL_MAY_FORCE_I or - opnum == rop.CALL_MAY_FORCE_R or - opnum == rop.CALL_MAY_FORCE_F or - opnum == rop.CALL_MAY_FORCE_N) - - @staticmethod - def is_call_release_gil(opnum): - # no R returning call_release_gil - return (opnum == rop.CALL_RELEASE_GIL_I or - opnum == rop.CALL_RELEASE_GIL_F or - opnum == rop.CALL_RELEASE_GIL_N) - - @staticmethod def inputarg_from_tp(tp): if tp == 'i': return InputArgInt() diff --git a/rpython/jit/metainterp/test/test_opencoder.py b/rpython/jit/metainterp/test/test_opencoder.py --- a/rpython/jit/metainterp/test/test_opencoder.py +++ b/rpython/jit/metainterp/test/test_opencoder.py @@ -34,35 +34,7 @@ assert len(l) == 2 assert l[0].opnum == rop.INT_ADD assert l[1].opnum == rop.INT_ADD - assert untag(l[1].args[1]) == (TAGINT, 1) - assert untag(l[1].args[0]) == (TAGBOX, l[0]._pos) - assert untag(l[0].args[0]) == (TAGBOX, 0) - assert untag(l[0].args[1]) == (TAGBOX, 1) - - def test_forwarding(self): - i0, i1 = InputArgInt(), InputArgInt() - t = Trace([i0, i1]) - add = t.record_op(rop.INT_ADD, [i0, i1]) - t.record_op(rop.INT_ADD, [add, ConstInt(1)]) - opt = SimpleOptimizer(t) - add, add2 = self.unpack(t) - assert (untag(opt.get_box_replacement(add.get_tag())) == TAGBOX, add._pos) - newtag = opt.replace_op_with(add, rop.INT_NEG, [i0]) - assert opt.get_box_replacement(add.get_tag()) == newtag - - def test_infos(self): - i0 = InputArgInt() - t = Trace([i0]) - t.record_op(rop.INT_ADD, [i0, ConstInt(1)]) - opt = SimpleOptimizer(t) - add, = self.unpack(t) - assert opt.getintbound(add.get_tag()) - - def test_output(self): - i0 = InputArgInt() - t = Trace([i0]) - t.record_op(rop.INT_ADD, [i0, ConstInt(1)]) - opt = SimpleOptimizer(t) - add, = self.unpack(t) - opt.emit_operation(add) -# xxx \ No newline at end of file + assert l[1].getarg(1).getint() == 1 + assert l[1].getarg(0) is l[0] + assert l[0].getarg(0) is i0 + assert l[0].getarg(1) is i1 diff --git a/rpython/jit/tool/test/test_oparser.py b/rpython/jit/tool/test/test_oparser.py --- a/rpython/jit/tool/test/test_oparser.py +++ b/rpython/jit/tool/test/test_oparser.py @@ -24,8 +24,9 @@ finish() # (tricky) """ loop = self.parse(x) - assert len(loop.operations) == 3 - assert [op.getopnum() for op in loop.operations] == [rop.INT_ADD, rop.INT_SUB, + ops = loop._get_operations() + assert len(ops) == 3 + assert [op.getopnum() for op in ops] == [rop.INT_ADD, rop.INT_SUB, rop.FINISH] assert len(loop.inputargs) == 2 assert loop.operations[-1].getdescr() From pypy.commits at gmail.com Sun Feb 14 13:16:47 2016 From: pypy.commits at gmail.com (fijal) Date: Sun, 14 Feb 2016 10:16:47 -0800 (PST) Subject: [pypy-commit] pypy jit-leaner-frontend: fix oparser up to the point. I'm not completely convinced what's the point of MockLoopModel any more Message-ID: <56c0c48f.0cb81c0a.17fa0.113b@mx.google.com> Author: fijal Branch: jit-leaner-frontend Changeset: r82244:86783301e2bd Date: 2016-02-14 16:15 +0100 http://bitbucket.org/pypy/pypy/changeset/86783301e2bd/ Log: fix oparser up to the point. I'm not completely convinced what's the point of MockLoopModel any more diff --git a/rpython/jit/metainterp/opencoder.py b/rpython/jit/metainterp/opencoder.py --- a/rpython/jit/metainterp/opencoder.py +++ b/rpython/jit/metainterp/opencoder.py @@ -13,7 +13,6 @@ class TraceIterator(object): def __init__(self, trace, end): self.trace = trace - self.inpargs = trace._inpargs self.pos = 0 self._count = 0 self.end = end @@ -21,7 +20,7 @@ def _get(self, i): if i < 0: - return self.inpargs[-i-1] + return self.trace.inputargs[-i - 1] res = self._cache[i] assert res is not None return res @@ -40,6 +39,8 @@ return self._get(v) elif tag == TAGINT: return ConstInt(v) + elif tag == TAGCONST: + return self.trace._consts[v] else: yyyy @@ -54,7 +55,11 @@ for i in range(argnum): args.append(self._untag(self._next())) if opwithdescr[opnum]: - xxx + descr_index = self._next() + if descr_index == -1: + descr = None + else: + descr = self.trace._descrs[descr_index] else: descr = None res = ResOperation(opnum, args, -1, descr=descr) @@ -65,10 +70,26 @@ class Trace(object): def __init__(self, inputargs): self._ops = [] + self._descrs = [None] + self._consts = [None] for i, inparg in enumerate(inputargs): inparg.position = -i - 1 self._count = 0 - self._inpargs = inputargs + self.inputargs = inputargs + + def _encode(self, box): + if isinstance(box, Const): + if isinstance(box, ConstInt) and box.getint() < MAXINT: + return tag(TAGINT, box.getint()) + else: + self._consts.append(box) + return tag(TAGCONST, len(self._consts) - 1) + elif isinstance(box, AbstractResOp): + return tag(TAGBOX, box.position) + elif isinstance(box, AbstractInputArg): + return tag(TAGBOX, box.position) + else: + assert False, "unreachable code" def _record_op(self, opnum, argboxes, descr=None): operations = self._ops @@ -76,9 +97,12 @@ operations.append(opnum) if oparity[opnum] == -1: operations.append(len(argboxes)) - operations.extend([encode(box) for box in argboxes]) - if descr is not None: - operations.append(encode(descr)) + operations.extend([self._encode(box) for box in argboxes]) + if opwithdescr[opnum]: + if descr is None: + operations.append(-1) + else: + operations.append(self._encode_descr(descr)) self._count += 1 return pos @@ -94,6 +118,12 @@ self._count += 1 return pos + def _encode_descr(self, descr): + # XXX provide a global cache for prebuilt descrs so we don't + # have to repeat them here + self._descrs.append(descr) + return len(self._descrs) - 1 + def record_forwarding(self, op, newtag): index = op._pos self._ops[index] = -newtag - 1 @@ -123,18 +153,3 @@ def untag(tagged): return intmask(tagged) & TAGMASK, intmask(tagged) >> TAGSHIFT - -def encode(box): - if isinstance(box, Const): - if isinstance(box, ConstInt) and box.getint() < MAXINT: - return tag(TAGINT, box.getint()) - else: - yyy - elif isinstance(box, AbstractResOp): - return tag(TAGBOX, box.position) - elif isinstance(box, AbstractInputArg): - return tag(TAGBOX, box.position) - elif isinstance(box, AbstractDescr): - pass - else: - yyy diff --git a/rpython/jit/metainterp/resoperation.py b/rpython/jit/metainterp/resoperation.py --- a/rpython/jit/metainterp/resoperation.py +++ b/rpython/jit/metainterp/resoperation.py @@ -95,7 +95,7 @@ assert isinstance(op, ResOpWithDescr) if opnum == rop.FINISH: assert descr.final_descr - elif op.is_guard(): + elif OpHelpers.is_guard(opnum): assert not descr.final_descr op.setdescr(descr) op.inittype() diff --git a/rpython/jit/tool/oparser.py b/rpython/jit/tool/oparser.py --- a/rpython/jit/tool/oparser.py +++ b/rpython/jit/tool/oparser.py @@ -408,10 +408,7 @@ if num < len(newlines): raise ParseError("unexpected dedent at line: %s" % newlines[num]) self.trace.comment = first_comment - #self.trace.original_jitcell_token = self.original_jitcell_token - #loop.operations = ops - #loop.inputargs = inpargs - #loop.last_offset = last_offset + self.trace.original_jitcell_token = self.original_jitcell_token return self.trace def record(self, opnum, args, descr): diff --git a/rpython/jit/tool/oparser_model.py b/rpython/jit/tool/oparser_model.py --- a/rpython/jit/tool/oparser_model.py +++ b/rpython/jit/tool/oparser_model.py @@ -29,7 +29,6 @@ def get_mock_model(): class MockLoopModel(object): - class TreeLoop(object): def __init__(self, name): self.name = name diff --git a/rpython/jit/tool/test/test_oparser.py b/rpython/jit/tool/test/test_oparser.py --- a/rpython/jit/tool/test/test_oparser.py +++ b/rpython/jit/tool/test/test_oparser.py @@ -29,7 +29,7 @@ assert [op.getopnum() for op in ops] == [rop.INT_ADD, rop.INT_SUB, rop.FINISH] assert len(loop.inputargs) == 2 - assert loop.operations[-1].getdescr() + assert ops[-1].getdescr() def test_const_ptr_subops(self): x = """ @@ -39,9 +39,10 @@ S = lltype.Struct('S') vtable = lltype.nullptr(S) loop = self.parse(x, None, locals()) - assert len(loop.operations) == 1 - assert loop.operations[0].getdescr() - assert loop.operations[0].getfailargs() == [] + ops = loop._get_operations() + assert len(ops) == 1 + assert ops[0].getdescr() + assert not ops[0].getfailargs() def test_descr(self): class Xyz(AbstractDescr): @@ -53,7 +54,8 @@ """ stuff = Xyz() loop = self.parse(x, None, locals()) - assert loop.operations[0].getdescr() is stuff + ops = loop._get_operations() + assert ops[0].getdescr() is stuff def test_after_fail(self): x = """ @@ -62,7 +64,7 @@ i1 = int_add(1, 2) """ loop = self.parse(x, None, {}) - assert len(loop.operations) == 2 + assert len(loop._get_operations()) == 2 def test_descr_setfield(self): class Xyz(AbstractDescr): @@ -74,7 +76,7 @@ """ stuff = Xyz() loop = self.parse(x, None, locals()) - assert loop.operations[0].getdescr() is stuff + assert loop._get_operations()[0].getdescr() is stuff def test_getvar_const_ptr(self): x = ''' @@ -84,7 +86,7 @@ TP = lltype.GcArray(lltype.Signed) NULL = lltype.cast_opaque_ptr(llmemory.GCREF, lltype.nullptr(TP)) loop = self.parse(x, None, {'func_ptr' : NULL}) - assert loop.operations[0].getarg(0).value == NULL + assert loop._get_operations()[0].getarg(0).value == NULL def test_jump_target(self): x = ''' @@ -92,7 +94,7 @@ jump() ''' loop = self.parse(x) - assert loop.operations[0].getdescr() is loop.original_jitcell_token + assert loop._get_operations()[0].getdescr() is loop.original_jitcell_token def test_jump_target_other(self): looptoken = JitCellToken() @@ -102,7 +104,7 @@ jump(descr=looptoken) ''' loop = self.parse(x, namespace=locals()) - assert loop.operations[0].getdescr() is looptoken + assert loop._get_operations()[0].getdescr() is looptoken def test_floats(self): x = ''' @@ -110,7 +112,7 @@ f1 = float_add(f0, 3.5) ''' loop = self.parse(x) - box = loop.operations[0].getarg(0) + box = loop._get_operations()[0].getarg(0) # we cannot use isinstance, because in case of mock the class will be # constructed on the fly assert box.__class__.__name__ == 'InputArgFloat' @@ -124,12 +126,13 @@ debug_merge_point(0, 0, '(stuff) #1') ''' loop = self.parse(x) - assert loop.operations[0].getarg(2)._get_str() == 'info' - assert loop.operations[0].getarg(1).value == 0 - assert loop.operations[1].getarg(2)._get_str() == 'info' - assert loop.operations[2].getarg(2)._get_str() == " info" - assert loop.operations[2].getarg(1).value == 1 - assert loop.operations[3].getarg(2)._get_str() == "(stuff) #1" + ops = loop._get_operations() + assert ops[0].getarg(2)._get_str() == 'info' + assert ops[0].getarg(1).value == 0 + assert ops[1].getarg(2)._get_str() == 'info' + assert ops[2].getarg(2)._get_str() == " info" + assert ops[2].getarg(1).value == 1 + assert ops[3].getarg(2)._get_str() == "(stuff) #1" def test_descr_with_obj_print(self): @@ -165,7 +168,7 @@ p0 = new(, descr=) ''' loop = self.parse(x) - assert loop.operations[0].getopname() == 'new' + assert loop._get_operations()[0].getopname() == 'new' def test_no_fail_args(self): x = ''' @@ -173,7 +176,7 @@ guard_true(i0, descr=) ''' loop = self.parse(x, nonstrict=True) - assert loop.operations[0].getfailargs() == [] + assert not loop._get_operations()[0].getfailargs() def test_offsets(self): x = """ @@ -209,8 +212,9 @@ jump(i0, descr=1) """ loop = self.parse(x) - assert loop.operations[0].getdescr() is loop.operations[1].getdescr() - assert isinstance(loop.operations[0].getdescr(), TargetToken) + ops = loop._get_operations() + assert ops[0].getdescr() is ops[1].getdescr() + assert isinstance(ops[0].getdescr(), TargetToken) class ForbiddenModule(object): From pypy.commits at gmail.com Sun Feb 14 13:16:49 2016 From: pypy.commits at gmail.com (fijal) Date: Sun, 14 Feb 2016 10:16:49 -0800 (PST) Subject: [pypy-commit] pypy jit-leaner-frontend: small fix Message-ID: <56c0c491.2457c20a.460fe.ffffaa1a@mx.google.com> Author: fijal Branch: jit-leaner-frontend Changeset: r82245:5f6bc638d07b Date: 2016-02-14 16:25 +0100 http://bitbucket.org/pypy/pypy/changeset/5f6bc638d07b/ Log: small fix diff --git a/rpython/jit/metainterp/optimizeopt/test/test_optimizebasic.py b/rpython/jit/metainterp/optimizeopt/test/test_optimizebasic.py --- a/rpython/jit/metainterp/optimizeopt/test/test_optimizebasic.py +++ b/rpython/jit/metainterp/optimizeopt/test/test_optimizebasic.py @@ -18,6 +18,7 @@ index = 0 def test_store_final_boxes_in_guard(): + py.test.skip("needs to be rewritten") from rpython.jit.metainterp.compile import ResumeGuardDescr from rpython.jit.metainterp.resume import tag, TAGBOX b0 = InputArgInt() @@ -33,13 +34,13 @@ # opt.store_final_boxes_in_guard(op, []) fdescr = op.getdescr() - #if op.getfailargs() == [b0, b1]: - # assert list(fdescr.rd_numb.nums) == [tag(1, TAGBOX)] - # assert list(fdescr.rd_numb.prev.nums) == [tag(0, TAGBOX)] - #else: - # assert op.getfailargs() == [b1, b0] - # assert list(fdescr.rd_numb.nums) == [tag(0, TAGBOX)] - # assert list(fdescr.rd_numb.prev.nums) == [tag(1, TAGBOX)] + if op.getfailargs() == [b0, b1]: + assert list(fdescr.rd_numb.nums) == [tag(1, TAGBOX)] + assert list(fdescr.rd_numb.prev.nums) == [tag(0, TAGBOX)] + else: + assert op.getfailargs() == [b1, b0] + assert list(fdescr.rd_numb.nums) == [tag(0, TAGBOX)] + assert list(fdescr.rd_numb.prev.nums) == [tag(1, TAGBOX)] assert fdescr.rd_virtuals is None assert fdescr.rd_consts == [] diff --git a/rpython/jit/metainterp/optimizeopt/test/test_util.py b/rpython/jit/metainterp/optimizeopt/test/test_util.py --- a/rpython/jit/metainterp/optimizeopt/test/test_util.py +++ b/rpython/jit/metainterp/optimizeopt/test/test_util.py @@ -19,7 +19,7 @@ from rpython.jit.metainterp.counter import DeterministicJitCounter from rpython.config.translationoption import get_combined_translation_config from rpython.jit.metainterp.resoperation import (rop, ResOperation, - InputArgRef, AbstractValue) + InputArgRef, AbstractValue, OpHelpers) from rpython.jit.metainterp.optimizeopt.util import args_dict @@ -55,11 +55,11 @@ loop2 = pure_parse(ops, namespace=namespace) loop3 = pure_parse(ops.replace("i2 = int_add", "i2 = int_sub"), namespace=namespace) - assert equaloplists(loop1.operations, loop2.operations, + assert equaloplists(loop1._get_operations(), loop2._get_operations(), remap=make_remap(loop1.inputargs, loop2.inputargs)) py.test.raises(AssertionError, - "equaloplists(loop1.operations, loop3.operations," + "equaloplists(loop1._get_operations(), loop3._get_operations()," "remap=make_remap(loop1.inputargs, loop3.inputargs))") def test_equaloplists_fail_args(): @@ -484,7 +484,7 @@ class FakeJitCode(object): index = 0 - if op.is_guard(): + if OpHelpers.is_guard(op.getopnum()): op.rd_snapshot = resume.Snapshot(None, op.getfailargs()) op.rd_frame_info_list = resume.FrameInfo(None, FakeJitCode(), 11) diff --git a/rpython/jit/metainterp/optimizeopt/util.py b/rpython/jit/metainterp/optimizeopt/util.py --- a/rpython/jit/metainterp/optimizeopt/util.py +++ b/rpython/jit/metainterp/optimizeopt/util.py @@ -7,7 +7,7 @@ from rpython.rlib.objectmodel import we_are_translated from rpython.jit.metainterp import resoperation from rpython.rlib.debug import make_sure_not_resized -from rpython.jit.metainterp.resoperation import rop +from rpython.jit.metainterp.resoperation import rop, OpHelpers from rpython.jit.metainterp.resume import Snapshot, AccumInfo # ____________________________________________________________ @@ -154,7 +154,8 @@ else: if op1.type != 'v': remap[op2] = op1 - if op1.getopnum() not in [rop.JUMP, rop.LABEL, rop.FINISH] and not op1.is_guard(): + if (op1.getopnum() not in [rop.JUMP, rop.LABEL, rop.FINISH] and + not OpHelpers.is_guard(op1.getopnum())): assert op1.getdescr() == op2.getdescr() if op1.getfailargs() or op2.getfailargs(): assert len(op1.getfailargs()) == len(op2.getfailargs()) From pypy.commits at gmail.com Sun Feb 14 13:16:52 2016 From: pypy.commits at gmail.com (fijal) Date: Sun, 14 Feb 2016 10:16:52 -0800 (PST) Subject: [pypy-commit] pypy jit-leaner-frontend: merge default Message-ID: <56c0c494.c7d8c20a.d42cd.ffffae09@mx.google.com> Author: fijal Branch: jit-leaner-frontend Changeset: r82246:2ed3235da4bc Date: 2016-02-14 16:44 +0100 http://bitbucket.org/pypy/pypy/changeset/2ed3235da4bc/ Log: merge default diff too long, truncating to 2000 out of 35347 lines diff --git a/.gitignore b/.gitignore --- a/.gitignore +++ b/.gitignore @@ -29,4 +29,4 @@ release/ !pypy/tool/release/ rpython/_cache/ -__pycache__/ +.cache/ diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -28,7 +28,7 @@ DEALINGS IN THE SOFTWARE. -PyPy Copyright holders 2003-2015 +PyPy Copyright holders 2003-2016 ----------------------------------- Except when otherwise stated (look for LICENSE files or information at diff --git a/Makefile b/Makefile --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ -all: pypy-c +all: pypy-c cffi_imports PYPY_EXECUTABLE := $(shell which pypy) URAM := $(shell python -c "import sys; print 4.5 if sys.maxint>1<<32 else 2.5") @@ -10,6 +10,8 @@ RUNINTERP = $(PYPY_EXECUTABLE) endif +.PHONY: cffi_imports + pypy-c: @echo @echo "====================================================================" @@ -36,3 +38,6 @@ # replaced with an opaque --jobserver option by the time this Makefile # runs. We cannot get their original value either: # http://lists.gnu.org/archive/html/help-make/2010-08/msg00106.html + +cffi_imports: pypy-c + PYTHONPATH=. ./pypy-c pypy/tool/build_cffi_imports.py diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -685,13 +685,17 @@ # the previous version of this code did. This should work for # CPython too. The point is that on PyPy with cpyext, the # config var 'SO' is just ".so" but we want to return - # ".pypy-VERSION.so" instead. - so_ext = _get_c_extension_suffix() + # ".pypy-VERSION.so" instead. Note a further tweak for cffi's + # embedding mode: if EXT_SUFFIX is also defined, use that + # directly. + so_ext = get_config_var('EXT_SUFFIX') if so_ext is None: - so_ext = get_config_var('SO') # fall-back - # extensions in debug_mode are named 'module_d.pyd' under windows - if os.name == 'nt' and self.debug: - so_ext = '_d.pyd' + so_ext = _get_c_extension_suffix() + if so_ext is None: + so_ext = get_config_var('SO') # fall-back + # extensions in debug_mode are named 'module_d.pyd' under windows + if os.name == 'nt' and self.debug: + so_ext = '_d.pyd' return os.path.join(*ext_path) + so_ext def get_export_symbols (self, ext): diff --git a/lib-python/2.7/pickle.py b/lib-python/2.7/pickle.py --- a/lib-python/2.7/pickle.py +++ b/lib-python/2.7/pickle.py @@ -1376,6 +1376,7 @@ def decode_long(data): r"""Decode a long from a two's complement little-endian binary string. + This is overriden on PyPy by a RPython version that has linear complexity. >>> decode_long('') 0L @@ -1402,6 +1403,11 @@ n -= 1L << (nbytes * 8) return n +try: + from __pypy__ import decode_long +except ImportError: + pass + # Shorthands try: diff --git a/lib-python/2.7/sysconfig.py b/lib-python/2.7/sysconfig.py --- a/lib-python/2.7/sysconfig.py +++ b/lib-python/2.7/sysconfig.py @@ -524,6 +524,13 @@ import _osx_support _osx_support.customize_config_vars(_CONFIG_VARS) + # PyPy: + import imp + for suffix, mode, type_ in imp.get_suffixes(): + if type_ == imp.C_EXTENSION: + _CONFIG_VARS['SOABI'] = suffix.split('.')[1] + break + if args: vals = [] for name in args: diff --git a/lib-python/2.7/test/capath/0e4015b9.0 b/lib-python/2.7/test/capath/0e4015b9.0 new file mode 100644 --- /dev/null +++ b/lib-python/2.7/test/capath/0e4015b9.0 @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv +bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG +A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo +b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 +aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ +Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm +Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= +-----END CERTIFICATE----- diff --git a/lib-python/2.7/test/capath/ce7b8643.0 b/lib-python/2.7/test/capath/ce7b8643.0 new file mode 100644 --- /dev/null +++ b/lib-python/2.7/test/capath/ce7b8643.0 @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv +bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG +A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo +b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 +aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ +Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm +Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= +-----END CERTIFICATE----- diff --git a/lib-python/2.7/test/https_svn_python_org_root.pem b/lib-python/2.7/test/https_svn_python_org_root.pem deleted file mode 100644 --- a/lib-python/2.7/test/https_svn_python_org_root.pem +++ /dev/null @@ -1,41 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 -IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB -IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA -Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO -BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi -MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ -ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ -8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 -zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y -fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 -w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc -G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k -epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q -laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ -QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU -fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 -YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w -ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY -gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe -MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 -IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy -dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw -czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 -dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl -aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC -AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg -b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB -ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc -nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg -18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c -gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl -Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY -sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T -SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF -CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum -GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk -zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW -omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD ------END CERTIFICATE----- diff --git a/lib-python/2.7/test/selfsigned_pythontestdotnet.pem b/lib-python/2.7/test/selfsigned_pythontestdotnet.pem --- a/lib-python/2.7/test/selfsigned_pythontestdotnet.pem +++ b/lib-python/2.7/test/selfsigned_pythontestdotnet.pem @@ -1,5 +1,5 @@ -----BEGIN CERTIFICATE----- -MIIChzCCAfCgAwIBAgIJAKGU95wKR8pSMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG @@ -8,9 +8,9 @@ aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv -EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjKTAnMCUGA1UdEQQeMByCGnNl -bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MA0GCSqGSIb3DQEBBQUAA4GBAIOXmdtM -eG9qzP9TiXW/Gc/zI4cBfdCpC+Y4gOfC9bQUC7hefix4iO3+iZjgy3X/FaRxUUoV -HKiXcXIaWqTSUWp45cSh0MbwZXudp6JIAptzdAhvvCrPKeC9i9GvxsPD4LtDAL97 -vSaxQBezA7hdxZd90/EeyMgVZgAnTCnvAWX9 +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= -----END CERTIFICATE----- diff --git a/lib-python/2.7/test/test_ssl.py b/lib-python/2.7/test/test_ssl.py --- a/lib-python/2.7/test/test_ssl.py +++ b/lib-python/2.7/test/test_ssl.py @@ -57,7 +57,8 @@ SIGNED_CERTFILE2 = data_file("keycert4.pem") SIGNING_CA = data_file("pycacert.pem") -SVN_PYTHON_ORG_ROOT_CERT = data_file("https_svn_python_org_root.pem") +REMOTE_HOST = "self-signed.pythontest.net" +REMOTE_ROOT_CERT = data_file("selfsigned_pythontestdotnet.pem") EMPTYCERT = data_file("nullcert.pem") BADCERT = data_file("badcert.pem") @@ -244,7 +245,7 @@ self.assertEqual(p['subjectAltName'], san) def test_DER_to_PEM(self): - with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: + with open(CAFILE_CACERT, 'r') as f: pem = f.read() d1 = ssl.PEM_cert_to_DER_cert(pem) p2 = ssl.DER_cert_to_PEM_cert(d1) @@ -792,7 +793,7 @@ # Mismatching key and cert ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) with self.assertRaisesRegexp(ssl.SSLError, "key values mismatch"): - ctx.load_cert_chain(SVN_PYTHON_ORG_ROOT_CERT, ONLYKEY) + ctx.load_cert_chain(CAFILE_CACERT, ONLYKEY) # Password protected key and cert ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD) ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode()) @@ -1013,7 +1014,7 @@ ctx.load_verify_locations(CERTFILE) self.assertEqual(ctx.cert_store_stats(), {'x509_ca': 0, 'crl': 0, 'x509': 1}) - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + ctx.load_verify_locations(CAFILE_CACERT) self.assertEqual(ctx.cert_store_stats(), {'x509_ca': 1, 'crl': 0, 'x509': 2}) @@ -1023,8 +1024,8 @@ # CERTFILE is not flagged as X509v3 Basic Constraints: CA:TRUE ctx.load_verify_locations(CERTFILE) self.assertEqual(ctx.get_ca_certs(), []) - # but SVN_PYTHON_ORG_ROOT_CERT is a CA cert - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + # but CAFILE_CACERT is a CA cert + ctx.load_verify_locations(CAFILE_CACERT) self.assertEqual(ctx.get_ca_certs(), [{'issuer': ((('organizationName', 'Root CA'),), (('organizationalUnitName', 'http://www.cacert.org'),), @@ -1040,7 +1041,7 @@ (('emailAddress', 'support at cacert.org'),)), 'version': 3}]) - with open(SVN_PYTHON_ORG_ROOT_CERT) as f: + with open(CAFILE_CACERT) as f: pem = f.read() der = ssl.PEM_cert_to_DER_cert(pem) self.assertEqual(ctx.get_ca_certs(True), [der]) @@ -1215,11 +1216,11 @@ class NetworkedTests(unittest.TestCase): def test_connect(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_NONE) try: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) self.assertEqual({}, s.getpeercert()) finally: s.close() @@ -1228,27 +1229,27 @@ s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED) self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", - s.connect, ("svn.python.org", 443)) + s.connect, (REMOTE_HOST, 443)) s.close() # this should succeed because we specify the root cert s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) self.assertTrue(s.getpeercert()) finally: s.close() def test_connect_ex(self): # Issue #11326: check connect_ex() implementation - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - self.assertEqual(0, s.connect_ex(("svn.python.org", 443))) + self.assertEqual(0, s.connect_ex((REMOTE_HOST, 443))) self.assertTrue(s.getpeercert()) finally: s.close() @@ -1256,14 +1257,14 @@ def test_non_blocking_connect_ex(self): # Issue #11326: non-blocking connect_ex() should allow handshake # to proceed after the socket gets ready. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT, + ca_certs=REMOTE_ROOT_CERT, do_handshake_on_connect=False) try: s.setblocking(False) - rc = s.connect_ex(('svn.python.org', 443)) + rc = s.connect_ex((REMOTE_HOST, 443)) # EWOULDBLOCK under Windows, EINPROGRESS elsewhere self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK)) # Wait for connect to finish @@ -1285,58 +1286,62 @@ def test_timeout_connect_ex(self): # Issue #12065: on a timeout, connect_ex() should return the original # errno (mimicking the behaviour of non-SSL sockets). - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT, + ca_certs=REMOTE_ROOT_CERT, do_handshake_on_connect=False) try: s.settimeout(0.0000001) - rc = s.connect_ex(('svn.python.org', 443)) + rc = s.connect_ex((REMOTE_HOST, 443)) if rc == 0: - self.skipTest("svn.python.org responded too quickly") + self.skipTest("REMOTE_HOST responded too quickly") self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK)) finally: s.close() def test_connect_ex_error(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - rc = s.connect_ex(("svn.python.org", 444)) + rc = s.connect_ex((REMOTE_HOST, 444)) # Issue #19919: Windows machines or VMs hosted on Windows # machines sometimes return EWOULDBLOCK. - self.assertIn(rc, (errno.ECONNREFUSED, errno.EWOULDBLOCK)) + errors = ( + errno.ECONNREFUSED, errno.EHOSTUNREACH, errno.ETIMEDOUT, + errno.EWOULDBLOCK, + ) + self.assertIn(rc, errors) finally: s.close() def test_connect_with_context(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): # Same as test_connect, but with a separately created context ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: self.assertEqual({}, s.getpeercert()) finally: s.close() # Same with a server hostname s = ctx.wrap_socket(socket.socket(socket.AF_INET), - server_hostname="svn.python.org") - s.connect(("svn.python.org", 443)) + server_hostname=REMOTE_HOST) + s.connect((REMOTE_HOST, 443)) s.close() # This should fail because we have no verification certs ctx.verify_mode = ssl.CERT_REQUIRED s = ctx.wrap_socket(socket.socket(socket.AF_INET)) self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", - s.connect, ("svn.python.org", 443)) + s.connect, (REMOTE_HOST, 443)) s.close() # This should succeed because we specify the root cert - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + ctx.load_verify_locations(REMOTE_ROOT_CERT) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1349,12 +1354,12 @@ # OpenSSL 0.9.8n and 1.0.0, as a result the capath directory must # contain both versions of each certificate (same content, different # filename) for this test to be portable across OpenSSL releases. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=CAPATH) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1365,7 +1370,7 @@ ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=BYTES_CAPATH) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1373,15 +1378,15 @@ s.close() def test_connect_cadata(self): - with open(CAFILE_CACERT) as f: + with open(REMOTE_ROOT_CERT) as f: pem = f.read().decode('ascii') der = ssl.PEM_cert_to_DER_cert(pem) - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(cadata=pem) with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) cert = s.getpeercert() self.assertTrue(cert) @@ -1390,7 +1395,7 @@ ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(cadata=der) with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) cert = s.getpeercert() self.assertTrue(cert) @@ -1399,9 +1404,9 @@ # Issue #5238: creating a file-like object with makefile() shouldn't # delay closing the underlying "real socket" (here tested with its # file descriptor, hence skipping the test under Windows). - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ss = ssl.wrap_socket(socket.socket(socket.AF_INET)) - ss.connect(("svn.python.org", 443)) + ss.connect((REMOTE_HOST, 443)) fd = ss.fileno() f = ss.makefile() f.close() @@ -1415,9 +1420,9 @@ self.assertEqual(e.exception.errno, errno.EBADF) def test_non_blocking_handshake(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = socket.socket(socket.AF_INET) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) s.setblocking(False) s = ssl.wrap_socket(s, cert_reqs=ssl.CERT_NONE, @@ -1460,12 +1465,12 @@ if support.verbose: sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem)) - _test_get_server_certificate('svn.python.org', 443, SVN_PYTHON_ORG_ROOT_CERT) + _test_get_server_certificate(REMOTE_HOST, 443, REMOTE_ROOT_CERT) if support.IPV6_ENABLED: _test_get_server_certificate('ipv6.google.com', 443) def test_ciphers(self): - remote = ("svn.python.org", 443) + remote = (REMOTE_HOST, 443) with support.transient_internet(remote[0]): with closing(ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_NONE, ciphers="ALL")) as s: @@ -1510,13 +1515,13 @@ def test_get_ca_certs_capath(self): # capath certs are loaded on request - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=CAPATH) self.assertEqual(ctx.get_ca_certs(), []) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1527,12 +1532,12 @@ @needs_sni def test_context_setget(self): # Check that the context of a connected socket can be replaced. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx1 = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ctx2 = ssl.SSLContext(ssl.PROTOCOL_SSLv23) s = socket.socket(socket.AF_INET) with closing(ctx1.wrap_socket(s)) as ss: - ss.connect(("svn.python.org", 443)) + ss.connect((REMOTE_HOST, 443)) self.assertIs(ss.context, ctx1) self.assertIs(ss._sslobj.context, ctx1) ss.context = ctx2 @@ -3026,7 +3031,7 @@ pass for filename in [ - CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, BYTES_CERTFILE, + CERTFILE, REMOTE_ROOT_CERT, BYTES_CERTFILE, ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY, SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA, BADCERT, BADKEY, EMPTYCERT]: diff --git a/lib_pypy/cPickle.py b/lib_pypy/cPickle.py --- a/lib_pypy/cPickle.py +++ b/lib_pypy/cPickle.py @@ -167,7 +167,11 @@ try: key = ord(self.read(1)) while key != STOP: - self.dispatch[key](self) + try: + meth = self.dispatch[key] + except KeyError: + raise UnpicklingError("invalid load key, %r." % chr(key)) + meth(self) key = ord(self.read(1)) except TypeError: if self.read(1) == '': @@ -559,6 +563,7 @@ def decode_long(data): r"""Decode a long from a two's complement little-endian binary string. + This is overriden on PyPy by a RPython version that has linear complexity. >>> decode_long('') 0L @@ -592,6 +597,11 @@ n -= 1L << (nbytes << 3) return n +try: + from __pypy__ import decode_long +except ImportError: + pass + def load(f): return Unpickler(f).load() diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.3.1 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.3.1" -__version_info__ = (1, 3, 1) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -146,7 +146,10 @@ ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) #define _cffi_convert_array_from_object \ ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) -#define _CFFI_NUM_EXPORTS 25 +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _CFFI_NUM_EXPORTS 26 typedef struct _ctypedescr CTypeDescrObject; @@ -201,8 +204,12 @@ the others follow */ } +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org #endif -/********** end CPython-specific section **********/ #define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) @@ -224,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.2" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -73,6 +73,8 @@ self._included_ffis = [] self._windows_unicode = None self._init_once_cache = {} + self._cdef_version = None + self._embedding = None if hasattr(backend, 'set_ffi'): backend.set_ffi(self) for name in backend.__dict__: @@ -100,12 +102,21 @@ If 'packed' is specified as True, all structs declared inside this cdef are packed, i.e. laid out without any field alignment at all. """ + self._cdef(csource, override=override, packed=packed) + + def embedding_api(self, csource, packed=False): + self._cdef(csource, packed=packed, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): if not isinstance(csource, str): # unicode, on Python 2 if not isinstance(csource, basestring): raise TypeError("cdef() argument must be a string") csource = csource.encode('ascii') with self._lock: - self._parser.parse(csource, override=override, packed=packed) + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) self._cdefsources.append(csource) if override: for cache in self._function_caches: @@ -531,6 +542,50 @@ ('_UNICODE', '1')] kwds['define_macros'] = defmacros + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Right now, distributions of + # pypy contain it as 'include/python27.lib'. You need + # to manually copy it back to 'libpypy-c.lib'. XXX Will + # be fixed in the next pypy release. + pythonlib = "libpypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', sys.prefix) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + else: + if sys.platform == "win32": + template = "python%d%d" + if hasattr(sys, 'gettotalrefcount'): + template += '_d' + else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig + template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): raise ValueError("set_source() cannot be called several times " @@ -590,14 +645,24 @@ recompile(self, module_name, source, c_file=filename, call_c_compiler=False, **kwds) - def compile(self, tmpdir='.'): + def compile(self, tmpdir='.', verbose=0, target=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ from .recompiler import recompile # if not hasattr(self, '_assigned_source'): raise ValueError("set_source() must be called before compile()") module_name, source, source_extension, kwds = self._assigned_source return recompile(self, module_name, source, tmpdir=tmpdir, - source_extension=source_extension, **kwds) + target=target, source_extension=source_extension, + compiler_verbose=verbose, **kwds) def init_once(self, func, tag): # Read _init_once_cache[tag], which is either (False, lock) if @@ -623,6 +688,36 @@ self._init_once_cache[tag] = (True, result) return result + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): From pypy.commits at gmail.com Sun Feb 14 20:58:40 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 14 Feb 2016 17:58:40 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Fix --sandbox translation Message-ID: <56c130d0.2179c20a.c368f.1497@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82247:44df4ef6c185 Date: 2016-02-14 16:39 +0000 http://bitbucket.org/pypy/pypy/changeset/44df4ef6c185/ Log: Fix --sandbox translation diff --git a/rpython/annotator/policy.py b/rpython/annotator/policy.py --- a/rpython/annotator/policy.py +++ b/rpython/annotator/policy.py @@ -3,6 +3,9 @@ from rpython.annotator.specialize import ( specialize_argvalue, specialize_argtype, specialize_arglistitemtype, specialize_arg_or_var, memo, specialize_call_location) +from rpython.flowspace.operation import op +from rpython.flowspace.model import Constant +from rpython.annotator.model import SomeTuple class AnnotatorPolicy(object): @@ -64,7 +67,35 @@ return LowLevelAnnotatorPolicy.specialize__ll_and_arg(*args) def no_more_blocks_to_annotate(pol, annotator): + bk = annotator.bookkeeper # hint to all pending specializers that we are done - for callback in annotator.bookkeeper.pending_specializations: + for callback in bk.pending_specializations: callback() - del annotator.bookkeeper.pending_specializations[:] + del bk.pending_specializations[:] + if annotator.added_blocks is not None: + all_blocks = annotator.added_blocks + else: + all_blocks = annotator.annotated + for block in list(all_blocks): + for i, instr in enumerate(block.operations): + if not isinstance(instr, (op.simple_call, op.call_args)): + continue + v_func = instr.args[0] + s_func = annotator.annotation(v_func) + if not hasattr(s_func, 'needs_sandboxing'): + continue + key = ('sandboxing', s_func.const) + if key not in bk.emulated_pbc_calls: + entry = s_func.entry + params_s = entry.signature_args + s_result = entry.signature_result + from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline + sandbox_trampoline = make_sandbox_trampoline( + entry.name, params_s, s_result) + sandbox_trampoline._signature_ = [SomeTuple(items=params_s)], s_result + bk.emulate_pbc_call(key, bk.immutablevalue(sandbox_trampoline), params_s) + else: + s_trampoline = bk.emulated_pbc_calls[key][0] + sandbox_trampoline = s_trampoline.const + new = instr.replace({instr.args[0]: Constant(sandbox_trampoline)}) + block.operations[i] = new diff --git a/rpython/annotator/unaryop.py b/rpython/annotator/unaryop.py --- a/rpython/annotator/unaryop.py +++ b/rpython/annotator/unaryop.py @@ -113,8 +113,9 @@ @op.simple_call.register(SomeObject) def simple_call_SomeObject(annotator, func, *args): - return annotator.annotation(func).call( - simple_args([annotator.annotation(arg) for arg in args])) + s_func = annotator.annotation(func) + argspec = simple_args([annotator.annotation(arg) for arg in args]) + return s_func.call(argspec) @op.call_args.register_transform(SomeObject) def transform_varargs(annotator, v_func, v_shape, *data_v): diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -25,13 +25,12 @@ return self.signature_result def compute_annotation(self): + s_result = super(ExtFuncEntry, self).compute_annotation() if (self.bookkeeper.annotator.translator.config.translation.sandbox and not self.safe_not_sandboxed): - from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline - impl = make_sandbox_trampoline(self.name, self.signature_args, - self.signature_result) - return self.bookkeeper.immutablevalue(impl) - return super(ExtFuncEntry, self).compute_annotation() + s_result.needs_sandboxing = True + s_result.entry = self + return s_result def specialize_call(self, hop): rtyper = hop.rtyper diff --git a/rpython/translator/sandbox/test/test_sandbox.py b/rpython/translator/sandbox/test/test_sandbox.py --- a/rpython/translator/sandbox/test/test_sandbox.py +++ b/rpython/translator/sandbox/test/test_sandbox.py @@ -292,6 +292,21 @@ rescode = pipe.wait() assert rescode == 0 +def test_environ_items(): + def entry_point(argv): + print os.environ.items() + return 0 + + exe = compile(entry_point) + g, f = run_in_subprocess(exe) + expect(f, g, "ll_os.ll_os_envitems", (), []) + expect(f, g, "ll_os.ll_os_write", (1, "[]\n"), 3) + g.close() + tail = f.read() + f.close() + assert tail == "" + + class TestPrintedResults: def run(self, entry_point, args, expected): From pypy.commits at gmail.com Mon Feb 15 03:40:57 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 00:40:57 -0800 (PST) Subject: [pypy-commit] pypy default: backout f5bb0b58e857: breaks translation (this should be RPython) Message-ID: <56c18f19.12871c0a.fb87b.73e5@mx.google.com> Author: Armin Rigo Branch: Changeset: r82248:bfa51a294a82 Date: 2016-02-15 09:40 +0100 http://bitbucket.org/pypy/pypy/changeset/bfa51a294a82/ Log: backout f5bb0b58e857: breaks translation (this should be RPython) diff --git a/pypy/module/sys/interp_encoding.py b/pypy/module/sys/interp_encoding.py --- a/pypy/module/sys/interp_encoding.py +++ b/pypy/module/sys/interp_encoding.py @@ -42,7 +42,7 @@ def _getfilesystemencoding(space): encoding = base_encoding - if rlocale.HAVE_LANGINFO and hasattr(rlocale, "CODESET"): + if rlocale.HAVE_LANGINFO and rlocale.CODESET: try: oldlocale = rlocale.setlocale(rlocale.LC_CTYPE, None) rlocale.setlocale(rlocale.LC_CTYPE, "") From pypy.commits at gmail.com Mon Feb 15 03:48:13 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 00:48:13 -0800 (PST) Subject: [pypy-commit] pypy default: Redo f5bb0b58e857 (thanks hawkowl on irc) Message-ID: <56c190cd.0cb81c0a.17fa0.ffffeccc@mx.google.com> Author: Armin Rigo Branch: Changeset: r82249:2dc4fd1e911d Date: 2016-02-15 09:47 +0100 http://bitbucket.org/pypy/pypy/changeset/2dc4fd1e911d/ Log: Redo f5bb0b58e857 (thanks hawkowl on irc) diff --git a/pypy/module/sys/interp_encoding.py b/pypy/module/sys/interp_encoding.py --- a/pypy/module/sys/interp_encoding.py +++ b/pypy/module/sys/interp_encoding.py @@ -42,7 +42,7 @@ def _getfilesystemencoding(space): encoding = base_encoding - if rlocale.HAVE_LANGINFO and rlocale.CODESET: + if rlocale.HAVE_LANGINFO: try: oldlocale = rlocale.setlocale(rlocale.LC_CTYPE, None) rlocale.setlocale(rlocale.LC_CTYPE, "") From pypy.commits at gmail.com Mon Feb 15 03:54:34 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 15 Feb 2016 00:54:34 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: booked train ticket now, added one day for skiing Message-ID: <56c1924a.cf821c0a.509f2.ffffff28@mx.google.com> Author: Richard Plangger Branch: extradoc Changeset: r5606:f23ab745f652 Date: 2016-02-15 09:54 +0100 http://bitbucket.org/pypy/extradoc/changeset/f23ab745f652/ Log: booked train ticket now, added one day for skiing diff --git a/sprintinfo/leysin-winter-2016/people.txt b/sprintinfo/leysin-winter-2016/people.txt --- a/sprintinfo/leysin-winter-2016/people.txt +++ b/sprintinfo/leysin-winter-2016/people.txt @@ -14,7 +14,7 @@ Carl Friedrich Bolz 20-27 Ermina (individual room) Matti Picus 20-25 Ermina Manuel Jacob 20-27 Ermina -Richard Plangger 20-28 Ermina +Richard Plangger 20-29 Ermina Maciej Fijalkowski 20-? Ermina (big room preferred) Ronan Lamy 20-27 Ermina? Pierre-Yves David 20-27 Ermina From pypy.commits at gmail.com Mon Feb 15 04:25:18 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 01:25:18 -0800 (PST) Subject: [pypy-commit] pypy default: Windows: add a translation config to give the name of the .lib file. Message-ID: <56c1997e.ccaa1c0a.ae391.ffffff2b@mx.google.com> Author: Armin Rigo Branch: Changeset: r82250:c6d3343b23cc Date: 2016-02-15 10:08 +0100 http://bitbucket.org/pypy/pypy/changeset/c6d3343b23cc/ Log: Windows: add a translation config to give the name of the .lib file. Use it to change "libpypy-c.lib" into "..\..\libs\python27.lib". Other related fixes to package and use the result. (thanks matti for starting) diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -188,7 +188,7 @@ # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'include')) + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -239,6 +239,9 @@ raise Exception("Cannot use the --output option with PyPy " "when --shared is on (it is by default). " "See issue #1971.") + if sys.platform == 'win32': + config.translation.libname = '..\\..\\libs\\python27.lib' + thisdir.join('..', '..', 'libs').ensure(dir=1) if config.translation.thread: config.objspace.usemodules.thread = True diff --git a/pypy/tool/release/package.py b/pypy/tool/release/package.py --- a/pypy/tool/release/package.py +++ b/pypy/tool/release/package.py @@ -108,13 +108,8 @@ # builddir = py.path.local(options.builddir) pypydir = builddir.ensure(name, dir=True) + includedir = basedir.join('include') - # Recursively copy all headers, shutil has only ignore - # so we do a double-negative to include what we want - def copyonly(dirpath, contents): - return set(contents) - set( # XXX function not used? - shutil.ignore_patterns('*.h', '*.incl')(dirpath, contents), - ) shutil.copytree(str(includedir), str(pypydir.join('include'))) pypydir.ensure('include', dir=True) @@ -129,9 +124,6 @@ win_extras = ['libpypy-c.dll', 'sqlite3.dll'] if not options.no_tk: win_extras += ['tcl85.dll', 'tk85.dll'] - # add the .lib too, which is convenient to compile other programs - # that use the .dll (and for cffi's embedding mode) - win_extras.append('libpypy-c.lib') for extra in win_extras: p = pypy_c.dirpath().join(extra) @@ -142,22 +134,27 @@ continue print "Picking %s" % p binaries.append((p, p.basename)) - importlib_name = 'python27.lib' - if pypy_c.dirpath().join(importlib_name).check(): - shutil.copyfile(str(pypy_c.dirpath().join(importlib_name)), - str(pypydir.join('include/python27.lib'))) - print "Picking %s as %s" % (pypy_c.dirpath().join(importlib_name), - pypydir.join('include/python27.lib')) + libsdir = basedir.join('libs') + if libsdir.exists(): + print 'Picking %s (and contents)' % libsdir + shutil.copytree(str(libsdir), str(pypydir.join('libs'))) else: - pass - # XXX users will complain that they cannot compile cpyext - # modules for windows, has the lib moved or are there no - # exported functions in the dll so no import library is created? + print '"libs" dir with import library not found.' + print 'You have to create %r' % (str(libsdir),) + print 'and copy libpypy-c.lib in there, renamed to python27.lib' + # XXX users will complain that they cannot compile capi (cpyext) + # modules for windows, also embedding pypy (i.e. in cffi) + # will fail. + # Has the lib moved, was translation not 'shared', or are + # there no exported functions in the dll so no import + # library was created? if not options.no_tk: try: p = pypy_c.dirpath().join('tcl85.dll') if not p.check(): p = py.path.local.sysfind('tcl85.dll') + if p is None: + raise WindowsError("tcl85.dll not found") tktcldir = p.dirpath().join('..').join('lib') shutil.copytree(str(tktcldir), str(pypydir.join('tcl'))) except WindowsError: diff --git a/rpython/config/translationoption.py b/rpython/config/translationoption.py --- a/rpython/config/translationoption.py +++ b/rpython/config/translationoption.py @@ -192,6 +192,8 @@ "If true, makes an lldebug0 build", default=False, cmdline="--lldebug0"), StrOption("icon", "Path to the (Windows) icon to use for the executable"), + StrOption("libname", + "Windows: name and possibly location of the lib file to create"), OptionDescription("backendopt", "Backend Optimization Options", [ # control inlining diff --git a/rpython/translator/driver.py b/rpython/translator/driver.py --- a/rpython/translator/driver.py +++ b/rpython/translator/driver.py @@ -487,13 +487,14 @@ exe = py.path.local(exename) exename = exe.new(purebasename=exe.purebasename + 'w') shutil_copy(str(exename), str(newexename)) - # the import library is named python27.lib, according - # to the pragma in pyconfig.h - libname = str(newsoname.dirpath().join('python27.lib')) + # for pypy, the import library is renamed and moved to + # libs/python27.lib, according to the pragma in pyconfig.h + libname = self.config.translation.libname + libname = libname or soname.new(ext='lib').basename + libname = str(newsoname.dirpath().join(libname)) shutil.copyfile(str(soname.new(ext='lib')), libname) self.log.info("copied: %s" % (libname,)) - # XXX TODO : replace the nonsense above with - # ext_to_copy = ['lib', 'pdb'] + # the pdb file goes in the same place as pypy(w).exe ext_to_copy = ['pdb',] for ext in ext_to_copy: name = soname.new(ext=ext) From pypy.commits at gmail.com Mon Feb 15 04:25:20 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 01:25:20 -0800 (PST) Subject: [pypy-commit] pypy default: hg merge heads Message-ID: <56c19980.45611c0a.c357.fffffffa@mx.google.com> Author: Armin Rigo Branch: Changeset: r82251:7ab8c02fe793 Date: 2016-02-15 10:19 +0100 http://bitbucket.org/pypy/pypy/changeset/7ab8c02fe793/ Log: hg merge heads diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.1 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.1" -__version_info__ = (1, 5, 1) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h --- a/lib_pypy/cffi/_embedding.h +++ b/lib_pypy/cffi/_embedding.h @@ -233,7 +233,7 @@ f = PySys_GetObject((char *)"stderr"); if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.5.1" + "\ncompiled with cffi version: 1.5.2" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, sysconfig, types +import sys, types from .lock import allocate_lock try: @@ -550,16 +550,31 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: - if hasattr(sys, 'prefix'): - import os - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - pythonlib = "pypy-c" + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Right now, distributions of + # pypy contain it as 'include/python27.lib'. You need + # to manually copy it back to 'libpypy-c.lib'. XXX Will + # be fixed in the next pypy release. + pythonlib = "libpypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', sys.prefix) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) else: if sys.platform == "win32": template = "python%d%d" if hasattr(sys, 'gettotalrefcount'): template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" if sysconfig.get_config_var('DEBUG_EXT'): template += sysconfig.get_config_var('DEBUG_EXT') diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py --- a/lib_pypy/cffi/vengine_cpy.py +++ b/lib_pypy/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py --- a/lib_pypy/cffi/vengine_gen.py +++ b/lib_pypy/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py --- a/lib_pypy/cffi/verifier.py +++ b/lib_pypy/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -36,13 +36,13 @@ "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", - "_csv", "cppyy", "_pypyjson" + "_csv", "cppyy", "_pypyjson", "_vmprof", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): +#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') +# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): # it's not enough that we get x86_64 - working_modules.add('_vmprof') +# working_modules.add('_vmprof') translation_modules = default_modules.copy() translation_modules.update([ diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -7,6 +7,9 @@ Fixed ``_PyLong_FromByteArray()``, which was buggy. +Fixed a crash with stacklets (or greenlets) on non-Linux machines +which showed up if you forget stacklets without resuming them. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy @@ -38,7 +41,8 @@ .. branch: compress-numbering -Improve the memory signature of numbering instances in the JIT. +Improve the memory signature of numbering instances in the JIT. This should massively +decrease the amount of memory consumed by the JIT, which is significant for most programs. .. branch: fix-trace-too-long-heuristic diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -306,7 +306,7 @@ return None -class W_InterpIterable(W_Root): +class InterpIterable(object): def __init__(self, space, w_iterable): self.w_iter = space.iter(w_iterable) self.space = space @@ -745,9 +745,13 @@ return self.int_w(self.hash(w_obj)) def len_w(self, w_obj): - """shotcut for space.int_w(space.len(w_obj))""" + """shortcut for space.int_w(space.len(w_obj))""" return self.int_w(self.len(w_obj)) + def contains_w(self, w_container, w_item): + """shortcut for space.is_true(space.contains(w_container, w_item))""" + return self.is_true(self.contains(w_container, w_item)) + def setitem_str(self, w_obj, key, w_value): return self.setitem(w_obj, self.wrap(key), w_value) @@ -846,7 +850,7 @@ return lst_w[:] # make the resulting list resizable def iteriterable(self, w_iterable): - return W_InterpIterable(self, w_iterable) + return InterpIterable(self, w_iterable) def _unpackiterable_unknown_length(self, w_iterator, w_iterable): """Unpack an iterable of unknown length into an interp-level @@ -1237,7 +1241,7 @@ if not isinstance(statement, PyCode): raise TypeError('space.exec_(): expected a string, code or PyCode object') w_key = self.wrap('__builtins__') - if not self.is_true(self.contains(w_globals, w_key)): + if not self.contains_w(w_globals, w_key): self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) diff --git a/pypy/module/__builtin__/interp_classobj.py b/pypy/module/__builtin__/interp_classobj.py --- a/pypy/module/__builtin__/interp_classobj.py +++ b/pypy/module/__builtin__/interp_classobj.py @@ -20,7 +20,7 @@ if not space.isinstance_w(w_dict, space.w_dict): raise_type_err(space, 'bases', 'tuple', w_bases) - if not space.is_true(space.contains(w_dict, space.wrap("__doc__"))): + if not space.contains_w(w_dict, space.wrap("__doc__")): space.setitem(w_dict, space.wrap("__doc__"), space.w_None) # XXX missing: lengthy and obscure logic about "__module__" diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.1" +VERSION = "1.5.2" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_demo/test/test_import.py b/pypy/module/_demo/test/test_import.py --- a/pypy/module/_demo/test/test_import.py +++ b/pypy/module/_demo/test/test_import.py @@ -12,8 +12,7 @@ w_modules = space.sys.get('modules') assert _demo.Module.demo_events == ['setup'] - assert not space.is_true(space.contains(w_modules, - space.wrap('_demo'))) + assert not space.contains_w(w_modules, space.wrap('_demo')) # first import w_import = space.builtin.get('__import__') diff --git a/pypy/module/_vmprof/interp_vmprof.py b/pypy/module/_vmprof/interp_vmprof.py --- a/pypy/module/_vmprof/interp_vmprof.py +++ b/pypy/module/_vmprof/interp_vmprof.py @@ -60,7 +60,7 @@ Must be smaller than 1.0 """ w_modules = space.sys.get('modules') - if space.is_true(space.contains(w_modules, space.wrap('_continuation'))): + if space.contains_w(w_modules, space.wrap('_continuation')): space.warn(space.wrap("Using _continuation/greenlet/stacklet together " "with vmprof will crash"), space.w_RuntimeWarning) diff --git a/pypy/module/cpyext/test/test_dictobject.py b/pypy/module/cpyext/test/test_dictobject.py --- a/pypy/module/cpyext/test/test_dictobject.py +++ b/pypy/module/cpyext/test/test_dictobject.py @@ -146,7 +146,7 @@ def test_dictproxy(self, space, api): w_dict = space.sys.get('modules') w_proxy = api.PyDictProxy_New(w_dict) - assert space.is_true(space.contains(w_proxy, space.wrap('sys'))) + assert space.contains_w(w_proxy, space.wrap('sys')) raises(OperationError, space.setitem, w_proxy, space.wrap('sys'), space.w_None) raises(OperationError, space.delitem, diff --git a/pypy/module/cpyext/test/test_import.py b/pypy/module/cpyext/test/test_import.py --- a/pypy/module/cpyext/test/test_import.py +++ b/pypy/module/cpyext/test/test_import.py @@ -21,7 +21,7 @@ def test_getmoduledict(self, space, api): testmod = "_functools" w_pre_dict = api.PyImport_GetModuleDict() - assert not space.is_true(space.contains(w_pre_dict, space.wrap(testmod))) + assert not space.contains_w(w_pre_dict, space.wrap(testmod)) with rffi.scoped_str2charp(testmod) as modname: w_module = api.PyImport_ImportModule(modname) @@ -29,7 +29,7 @@ assert w_module w_dict = api.PyImport_GetModuleDict() - assert space.is_true(space.contains(w_dict, space.wrap(testmod))) + assert space.contains_w(w_dict, space.wrap(testmod)) def test_reload(self, space, api): stat = api.PyImport_Import(space.wrap("stat")) diff --git a/pypy/module/cpyext/test/test_object.py b/pypy/module/cpyext/test/test_object.py --- a/pypy/module/cpyext/test/test_object.py +++ b/pypy/module/cpyext/test/test_object.py @@ -202,7 +202,7 @@ def test_dir(self, space, api): w_dir = api.PyObject_Dir(space.sys) assert space.isinstance_w(w_dir, space.w_list) - assert space.is_true(space.contains(w_dir, space.wrap('modules'))) + assert space.contains_w(w_dir, space.wrap('modules')) class AppTestObject(AppTestCpythonExtensionBase): def setup_class(cls): diff --git a/pypy/module/sys/interp_encoding.py b/pypy/module/sys/interp_encoding.py --- a/pypy/module/sys/interp_encoding.py +++ b/pypy/module/sys/interp_encoding.py @@ -42,7 +42,7 @@ def _getfilesystemencoding(space): encoding = base_encoding - if rlocale.HAVE_LANGINFO and rlocale.CODESET: + if rlocale.HAVE_LANGINFO: try: oldlocale = rlocale.setlocale(rlocale.LC_CTYPE, None) rlocale.setlocale(rlocale.LC_CTYPE, "") diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py @@ -5,6 +5,9 @@ if sys.platform == 'win32': py.test.skip('snippets do not run on win32') +if sys.version_info < (2, 7): + py.test.skip('fails e.g. on a Debian/Ubuntu which patches virtualenv' + ' in a non-2.6-friendly way') def create_venv(name): tmpdir = udir.join(name) diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py @@ -101,6 +101,7 @@ c = distutils.ccompiler.new_compiler() print('compiling %s with %r' % (name, modules)) extra_preargs = [] + debug = True if sys.platform == 'win32': libfiles = [] for m in modules: @@ -109,9 +110,12 @@ libfiles.append('Release\\%s.lib' % m[:-4]) modules = libfiles extra_preargs.append('/MANIFEST') + debug = False # you need to install extra stuff + # for this to work elif threads: extra_preargs.append('-pthread') - objects = c.compile([filename], macros=sorted(defines.items()), debug=True) + objects = c.compile([filename], macros=sorted(defines.items()), + debug=debug) c.link_executable(objects + modules, name, extra_preargs=extra_preargs) finally: os.chdir(curdir) @@ -119,12 +123,18 @@ def execute(self, name): path = self.get_path() env_extra = {'PYTHONPATH': prefix_pythonpath()} - libpath = os.environ.get('LD_LIBRARY_PATH') - if libpath: - libpath = path + ':' + libpath + if sys.platform == 'win32': + _path = os.environ.get('PATH') + # for libpypy-c.dll or Python27.dll + _path = os.path.split(sys.executable)[0] + ';' + _path + env_extra['PATH'] = _path else: - libpath = path - env_extra['LD_LIBRARY_PATH'] = libpath + libpath = os.environ.get('LD_LIBRARY_PATH') + if libpath: + libpath = path + ':' + libpath + else: + libpath = path + env_extra['LD_LIBRARY_PATH'] = libpath print('running %r in %r' % (name, path)) executable_name = name if sys.platform == 'win32': diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -1419,9 +1419,8 @@ return space.len(self.w_dict) def _all_contained_in(space, w_dictview, w_other): - w_iter = space.iter(w_dictview) - for w_item in space.iteriterable(w_iter): - if not space.is_true(space.contains(w_other, w_item)): + for w_item in space.iteriterable(w_dictview): + if not space.contains_w(w_other, w_item): return space.w_False return space.w_True diff --git a/rpython/doc/rlib.rst b/rpython/doc/rlib.rst --- a/rpython/doc/rlib.rst +++ b/rpython/doc/rlib.rst @@ -52,7 +52,7 @@ backend emits code, the function is called to determine the value. ``CDefinedIntSymbolic``: - Instances of ``ComputedIntSymbolic`` are also treated like integers of + Instances of ``CDefinedIntSymbolic`` are also treated like integers of unknown value by the annotator. When C code is emitted they will be represented by the attribute ``expr`` of the symbolic (which is also the first argument of the constructor). diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -3,16 +3,13 @@ from rpython.jit.backend.test.support import CCompiledMixin from rpython.rlib.jit import JitDriver from rpython.tool.udir import udir +from rpython.rlib import rthread from rpython.translator.translator import TranslationContext from rpython.jit.backend.detect_cpu import getcpuclass class CompiledVmprofTest(CCompiledMixin): CPUClass = getcpuclass() - def setup(self): - if self.CPUClass.backend_name != 'x86_64': - py.test.skip("vmprof only supports x86-64 CPUs at the moment") - def _get_TranslationContext(self): t = TranslationContext() t.config.translation.gc = 'incminimark' @@ -62,6 +59,7 @@ tmpfilename = str(udir.join('test_rvmprof')) def f(num): + rthread.get_ident() # register TLOFS_thread_ident code = MyCode("py:x:foo:3") rvmprof.register_code(code, get_name) fd = os.open(tmpfilename, os.O_WRONLY | os.O_CREAT, 0666) diff --git a/rpython/rlib/_stacklet_shadowstack.py b/rpython/rlib/_stacklet_shadowstack.py --- a/rpython/rlib/_stacklet_shadowstack.py +++ b/rpython/rlib/_stacklet_shadowstack.py @@ -30,6 +30,11 @@ mixlevelannotator.finish() lltype.attachRuntimeTypeInfo(STACKLET, destrptr=destrptr) +# Note: it's important that this is a light finalizer, otherwise +# the GC will call it but still expect the object to stay around for +# a while---and it can't stay around, because s_sscopy points to +# freed nonsense and customtrace() will crash + at rgc.must_be_light_finalizer def stacklet_destructor(stacklet): sscopy = stacklet.s_sscopy if sscopy: diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -30,11 +30,11 @@ def setup(): + compile_extra = ['-DRPYTHON_LL2CTYPES'] platform.verify_eci(ExternalCompilationInfo( - compile_extra=['-DRPYTHON_LL2CTYPES'], + compile_extra=compile_extra, **eci_kwds)) - eci = global_eci vmprof_init = rffi.llexternal("vmprof_init", [rffi.INT, rffi.DOUBLE, rffi.CCHARP], diff --git a/rpython/rlib/rvmprof/src/rvmprof.c b/rpython/rlib/rvmprof/src/rvmprof.c --- a/rpython/rlib/rvmprof/src/rvmprof.c +++ b/rpython/rlib/rvmprof/src/rvmprof.c @@ -1,23 +1,21 @@ #define _GNU_SOURCE 1 - #ifdef RPYTHON_LL2CTYPES /* only for testing: ll2ctypes sets RPY_EXTERN from the command-line */ -# ifndef RPY_EXTERN -# define RPY_EXTERN RPY_EXPORTED -# endif -# define RPY_EXPORTED extern __attribute__((visibility("default"))) -# define VMPROF_ADDR_OF_TRAMPOLINE(addr) 0 +#ifndef RPY_EXTERN +#define RPY_EXTERN RPY_EXPORTED +#endif +#ifdef _WIN32 +#define RPY_EXPORTED __declspec(dllexport) +#else +#define RPY_EXPORTED extern __attribute__((visibility("default"))) +#endif #else - # include "common_header.h" # include "structdef.h" # include "src/threadlocal.h" # include "rvmprof.h" -/*# ifndef VMPROF_ADDR_OF_TRAMPOLINE -# error "RPython program using rvmprof, but not calling vmprof_execute_code()" -# endif*/ #endif diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -7,9 +7,6 @@ static long profile_interval_usec = 0; static int opened_profile(char *interp_name); -#define MAX_STACK_DEPTH \ - ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) - #define MARKER_STACKTRACE '\x01' #define MARKER_VIRTUAL_IP '\x02' #define MARKER_TRAILER '\x03' @@ -20,6 +17,9 @@ #define VERSION_THREAD_ID '\x01' #define VERSION_TAG '\x02' +#define MAX_STACK_DEPTH \ + ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) + typedef struct prof_stacktrace_s { char padding[sizeof(long) - 1]; char marker; @@ -71,6 +71,43 @@ return _write_all((char*)&header, 5 * sizeof(long) + 4 + namelen); } +/* ************************************************************* + * functions to dump the stack trace + * ************************************************************* + */ + + +static int get_stack_trace(vmprof_stack_t* stack, intptr_t *result, int max_depth, intptr_t pc) +{ + int n = 0; + intptr_t addr = 0; + int bottom_jitted = 0; + // check if the pc is in JIT +#ifdef PYPY_JIT_CODEMAP + if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { + // the bottom part is jitted, means we can fill up the first part + // from the JIT + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + stack = stack->next; // skip the first item as it contains garbage + } +#endif + while (n < max_depth - 1 && stack) { + if (stack->kind == VMPROF_CODE_TAG) { + result[n] = stack->kind; + result[n + 1] = stack->value; + n += 2; + } +#ifdef PYPY_JIT_CODEMAP + else if (stack->kind == VMPROF_JITTED_TAG) { + pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + } +#endif + stack = stack->next; + } + return n; +} + #ifndef RPYTHON_LL2CTYPES static vmprof_stack_t *get_vmprof_stack(void) { diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -35,6 +35,7 @@ #include "vmprof_stack.h" #include "vmprof_getpc.h" #include "vmprof_mt.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" /************************************************************/ @@ -78,46 +79,6 @@ static char atfork_hook_installed = 0; -#include "vmprof_get_custom_offset.h" - -/* ************************************************************* - * functions to dump the stack trace - * ************************************************************* - */ - - -static int get_stack_trace(intptr_t *result, int max_depth, intptr_t pc, ucontext_t *ucontext) -{ - vmprof_stack_t* stack = get_vmprof_stack(); - int n = 0; - intptr_t addr = 0; - int bottom_jitted = 0; - // check if the pc is in JIT -#ifdef PYPY_JIT_CODEMAP - if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { - // the bottom part is jitted, means we can fill up the first part - // from the JIT - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - stack = stack->next; // skip the first item as it contains garbage - } -#endif - while (n < max_depth - 1 && stack) { - if (stack->kind == VMPROF_CODE_TAG) { - result[n] = stack->kind; - result[n + 1] = stack->value; - n += 2; - } -#ifdef PYPY_JIT_CODEMAP - else if (stack->kind == VMPROF_JITTED_TAG) { - pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - } -#endif - stack = stack->next; - } - return n; -} - static intptr_t get_current_thread_id(void) { /* xxx This function is a hack on two fronts: @@ -194,8 +155,8 @@ struct prof_stacktrace_s *st = (struct prof_stacktrace_s *)p->data; st->marker = MARKER_STACKTRACE; st->count = 1; - depth = get_stack_trace(st->stack, - MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext), ucontext); + depth = get_stack_trace(get_vmprof_stack(), st->stack, + MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext)); st->depth = depth; st->stack[depth++] = get_current_thread_id(); p->data_offset = offsetof(struct prof_stacktrace_s, marker); diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -10,13 +10,30 @@ return 0; } +#if defined(_MSC_VER) +#include +typedef SSIZE_T ssize_t; +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include #include "vmprof_stack.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" #include // This file has been inspired (but not copied from since the LICENSE // would not allow it) from verysleepy profiler +#define SINGLE_BUF_SIZE 8192 + volatile int thread_started = 0; volatile int enabled = 0; @@ -55,52 +72,75 @@ return 0; } -int vmprof_snapshot_thread(DWORD thread_id, PyThreadState *tstate, prof_stacktrace_s *stack) +int vmprof_snapshot_thread(struct pypy_threadlocal_s *p, prof_stacktrace_s *stack) { - HRESULT result; - HANDLE hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, thread_id); - int depth; + void *addr; + vmprof_stack_t *cur; + long tid; + HANDLE hThread; + long depth; + DWORD result; + CONTEXT ctx; + +#ifdef RPYTHON_LL2CTYPES + return 0; // not much we can do +#else +#ifndef RPY_TLOFS_thread_ident + return 0; // we can't freeze threads, unsafe +#else + hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, p->thread_ident); if (!hThread) { return -1; } result = SuspendThread(hThread); if(result == 0xffffffff) return -1; // possible, e.g. attached debugger or thread alread suspended - // find the correct thread - depth = read_trace_from_cpy_frame(tstate->frame, stack->stack, - MAX_STACK_DEPTH); + ctx.ContextFlags = CONTEXT_FULL; + if (!GetThreadContext(hThread, &ctx)) + return -1; + depth = get_stack_trace(p->vmprof_tl_stack, + stack->stack, MAX_STACK_DEPTH-2, ctx.Eip); stack->depth = depth; - stack->stack[depth++] = (void*)thread_id; + stack->stack[depth++] = (void*)p->thread_ident; stack->count = 1; stack->marker = MARKER_STACKTRACE; ResumeThread(hThread); return depth; +#endif +#endif } long __stdcall vmprof_mainloop(void *arg) { +#ifndef RPYTHON_LL2CTYPES + struct pypy_threadlocal_s *p; prof_stacktrace_s *stack = (prof_stacktrace_s*)malloc(SINGLE_BUF_SIZE); - HANDLE hThreadSnap = INVALID_HANDLE_VALUE; int depth; - PyThreadState *tstate; while (1) { - Sleep(profile_interval_usec * 1000); + //Sleep(profile_interval_usec * 1000); + Sleep(10); if (!enabled) { continue; } - tstate = PyInterpreterState_Head()->tstate_head; - while (tstate) { - depth = vmprof_snapshot_thread(tstate->thread_id, tstate, stack); - if (depth > 0) { - _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), - depth * sizeof(void *) + - sizeof(struct prof_stacktrace_s) - - offsetof(struct prof_stacktrace_s, marker)); + _RPython_ThreadLocals_Acquire(); + p = _RPython_ThreadLocals_Head(); // the first one is one behind head + p = _RPython_ThreadLocals_Enum(p); + while (p) { + if (p->ready == 42) { + depth = vmprof_snapshot_thread(p, stack); + if (depth > 0) { + _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), + depth * sizeof(void *) + + sizeof(struct prof_stacktrace_s) - + offsetof(struct prof_stacktrace_s, marker)); + } } - tstate = tstate->next; + p = _RPython_ThreadLocals_Enum(p); } + _RPython_ThreadLocals_Release(); } +#endif } RPY_EXTERN diff --git a/rpython/rlib/rvmprof/src/vmprof_stack.h b/rpython/rlib/rvmprof/src/vmprof_stack.h --- a/rpython/rlib/rvmprof/src/vmprof_stack.h +++ b/rpython/rlib/rvmprof/src/vmprof_stack.h @@ -1,7 +1,11 @@ #ifndef _VMPROF_STACK_H_ #define _VMPROF_STACK_H_ +#ifdef _WIN32 +#define intptr_t long // XXX windows VC++ 2008 lacks stdint.h +#else #include +#endif #define VMPROF_CODE_TAG 1 /* <- also in cintf.py */ #define VMPROF_BLACKHOLE_TAG 2 diff --git a/rpython/rlib/rvmprof/test/test_ztranslation.py b/rpython/rlib/rvmprof/test/test_ztranslation.py --- a/rpython/rlib/rvmprof/test/test_ztranslation.py +++ b/rpython/rlib/rvmprof/test/test_ztranslation.py @@ -3,11 +3,10 @@ sys.path += ['../../../..'] # for subprocess in test_interpreted import py from rpython.tool.udir import udir -from rpython.rlib import rvmprof +from rpython.rlib import rvmprof, rthread from rpython.translator.c.test.test_genc import compile from rpython.rlib.nonconst import NonConstant - class MyCode: def __init__(self, count): self.count = count @@ -39,6 +38,7 @@ PROF_FILE = str(udir.join('test_ztranslation.prof')) def main(argv=[]): + rthread.get_ident() # force TLOFS_thread_ident if NonConstant(False): # Hack to give os.open() the correct annotation os.open('foo', 1, 1) diff --git a/rpython/translator/backendopt/finalizer.py b/rpython/translator/backendopt/finalizer.py --- a/rpython/translator/backendopt/finalizer.py +++ b/rpython/translator/backendopt/finalizer.py @@ -18,7 +18,7 @@ """ ok_operations = ['ptr_nonzero', 'ptr_eq', 'ptr_ne', 'free', 'same_as', 'direct_ptradd', 'force_cast', 'track_alloc_stop', - 'raw_free'] + 'raw_free', 'adr_eq', 'adr_ne'] def analyze_light_finalizer(self, graph): result = self.analyze_direct_call(graph) diff --git a/rpython/translator/c/src/threadlocal.c b/rpython/translator/c/src/threadlocal.c --- a/rpython/translator/c/src/threadlocal.c +++ b/rpython/translator/c/src/threadlocal.c @@ -85,6 +85,11 @@ return prev->next; } +struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(void) +{ + return &linkedlist_head; +} + static void _RPy_ThreadLocals_Init(void *p) { struct pypy_threadlocal_s *tls = (struct pypy_threadlocal_s *)p; diff --git a/rpython/translator/c/src/threadlocal.h b/rpython/translator/c/src/threadlocal.h --- a/rpython/translator/c/src/threadlocal.h +++ b/rpython/translator/c/src/threadlocal.h @@ -27,6 +27,9 @@ RPY_EXTERN struct pypy_threadlocal_s * _RPython_ThreadLocals_Enum(struct pypy_threadlocal_s *prev); +/* will return the head of the list */ +RPY_EXTERN struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(); + #define OP_THREADLOCALREF_ACQUIRE(r) _RPython_ThreadLocals_Acquire() #define OP_THREADLOCALREF_RELEASE(r) _RPython_ThreadLocals_Release() #define OP_THREADLOCALREF_ENUM(p, r) r = _RPython_ThreadLocals_Enum(p) From pypy.commits at gmail.com Mon Feb 15 04:31:20 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 01:31:20 -0800 (PST) Subject: [pypy-commit] cffi default: pypy on windows now stores "python27.lib" in a directory "libs" Message-ID: <56c19ae8.2457c20a.460fe.ffff984d@mx.google.com> Author: Armin Rigo Branch: Changeset: r2638:2711f96bf410 Date: 2016-02-15 10:30 +0100 http://bitbucket.org/cffi/cffi/changeset/2711f96bf410/ Log: pypy on windows now stores "python27.lib" in a directory "libs" diff --git a/cffi/api.py b/cffi/api.py --- a/cffi/api.py +++ b/cffi/api.py @@ -551,13 +551,11 @@ # if '__pypy__' in sys.builtin_module_names: if sys.platform == "win32": - # we need 'libpypy-c.lib'. Right now, distributions of - # pypy contain it as 'include/python27.lib'. You need - # to manually copy it back to 'libpypy-c.lib'. XXX Will - # be fixed in the next pypy release. - pythonlib = "libpypy-c" + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" if hasattr(sys, 'prefix'): - ensure('library_dirs', sys.prefix) + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) else: # we need 'libpypy-c.{so,dylib}', which should be by # default located in 'sys.prefix/bin' From pypy.commits at gmail.com Mon Feb 15 04:32:17 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 01:32:17 -0800 (PST) Subject: [pypy-commit] pypy default: import cffi/2711f96bf410 Message-ID: <56c19b21.96941c0a.178ff.fffffd84@mx.google.com> Author: Armin Rigo Branch: Changeset: r82252:5b6ed997837d Date: 2016-02-15 10:31 +0100 http://bitbucket.org/pypy/pypy/changeset/5b6ed997837d/ Log: import cffi/2711f96bf410 diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -551,13 +551,11 @@ # if '__pypy__' in sys.builtin_module_names: if sys.platform == "win32": - # we need 'libpypy-c.lib'. Right now, distributions of - # pypy contain it as 'include/python27.lib'. You need - # to manually copy it back to 'libpypy-c.lib'. XXX Will - # be fixed in the next pypy release. - pythonlib = "libpypy-c" + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" if hasattr(sys, 'prefix'): - ensure('library_dirs', sys.prefix) + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) else: # we need 'libpypy-c.{so,dylib}', which should be by # default located in 'sys.prefix/bin' From pypy.commits at gmail.com Mon Feb 15 04:53:44 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 15 Feb 2016 01:53:44 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: separated two needs in the regalloc, ensure_reg always returns a register (a pool location is never returned), ensure_reg_or_pool now either returns a pool loc or register Message-ID: <56c1a028.41df1c0a.d1f8b.0bf5@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82253:13c8dfe611d6 Date: 2016-02-15 10:52 +0100 http://bitbucket.org/pypy/pypy/changeset/13c8dfe611d6/ Log: separated two needs in the regalloc, ensure_reg always returns a register (a pool location is never returned), ensure_reg_or_pool now either returns a pool loc or register diff --git a/rpython/jit/backend/zarch/helper/regalloc.py b/rpython/jit/backend/zarch/helper/regalloc.py --- a/rpython/jit/backend/zarch/helper/regalloc.py +++ b/rpython/jit/backend/zarch/helper/regalloc.py @@ -27,7 +27,7 @@ if check_imm32(a1): l1 = imm(a1.getint()) else: - l1 = self.ensure_reg(a1) + l1 = self.ensure_reg_or_pool(a1) self.force_result_in_reg(op, a0) self.free_op_vars() return [l0, l1] @@ -41,7 +41,7 @@ if check_imm32(a1): l1 = imm(a1.getint()) else: - l1 = self.ensure_reg(a1) + l1 = self.ensure_reg_or_pool(a1) self.force_result_in_reg(op, a0) self.free_op_vars() return [l0, l1] @@ -54,7 +54,7 @@ if check_imm32(a1): l1 = imm(a1.getint()) else: - l1 = self.ensure_reg(a1) + l1 = self.ensure_reg_or_pool(a1) lr,lq = self.rm.ensure_even_odd_pair(a0, op, bind_first=False) self.free_op_vars() return [lr, lq, l1] @@ -65,9 +65,9 @@ a1 = op.getarg(1) l1 = self.ensure_reg(a1) if isinstance(a0, Const): - poolloc = self.ensure_reg(a0) + poolloc = self.ensure_reg_or_pool(a0) lr,lq = self.rm.ensure_even_odd_pair(a0, op, bind_first=modulus, must_exist=False) - self.assembler.mc.LG(lq, poolloc) + self.assembler.regalloc_mov(poolloc, lq) else: lr,lq = self.rm.ensure_even_odd_pair(a0, op, bind_first=modulus) self.free_op_vars() diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -955,24 +955,20 @@ (base_loc, index_loc, value_loc, size_loc) = arglocs assert not base_loc.is_in_pool() assert not index_loc.is_in_pool() + assert not value_loc.is_in_pool() if index_loc.is_imm() and self._mem_offset_supported(index_loc.value): addr_loc = l.addr(index_loc.value, base_loc) else: self.mc.LGR(r.SCRATCH, index_loc) addr_loc = l.addr(0, base_loc, r.SCRATCH) - if value_loc.is_in_pool(): - self.mc.LG(r.SCRATCH2, value_loc) - value_loc = r.SCRATCH2 self._memory_store(value_loc, addr_loc, size_loc) def emit_gc_store_indexed(self, op, arglocs, regalloc): (base_loc, index_loc, value_loc, offset_loc, size_loc) = arglocs assert not base_loc.is_in_pool() assert not index_loc.is_in_pool() + assert not value_loc.is_in_pool() addr_loc = self._load_address(base_loc, index_loc, offset_loc, r.SCRATCH) - if value_loc.is_in_pool(): - self.mc.LG(r.SCRATCH2, value_loc) - value_loc = r.SCRATCH2 self._memory_store(value_loc, addr_loc, size_loc) def _load_address(self, base_loc, index_loc, offset_loc, helper_reg): diff --git a/rpython/jit/backend/zarch/regalloc.py b/rpython/jit/backend/zarch/regalloc.py --- a/rpython/jit/backend/zarch/regalloc.py +++ b/rpython/jit/backend/zarch/regalloc.py @@ -83,17 +83,24 @@ offset = self.assembler.pool.get_offset(var) return l.pool(offset, float=True) - def ensure_reg(self, box, force_in_reg): + def ensure_reg_or_pool(self, box): + if isinstance(box, Const): + return self.place_in_pool(box) + else: + assert box in self.temp_boxes + loc = self.make_sure_var_in_reg(box, + forbidden_vars=self.temp_boxes) + return loc + + def ensure_reg(self, box): if isinstance(box, Const): poolloc = self.place_in_pool(box) - if force_in_reg: - tmp = TempVar() - self.temp_boxes.append(tmp) - reg = self.force_allocate_reg(tmp) - assert poolloc.displace > 0 - self.assembler.mc.LD(reg, poolloc) - return reg - return poolloc + tmp = TempVar() + reg = self.force_allocate_reg(tmp, self.temp_boxes) + self.temp_boxes.append(tmp) + assert poolloc.displace > 0 + self.assembler.mc.LD(reg, poolloc) + return reg else: assert box in self.temp_boxes loc = self.make_sure_var_in_reg(box, @@ -133,18 +140,26 @@ off = self.pool.get_offset(c) return l.pool(off) - def ensure_reg(self, box, force_in_reg, selected_reg=None): + def ensure_reg_or_pool(self, box): + if isinstance(box, Const): + offset = self.assembler.pool.get_offset(box) + return l.pool(offset) + else: + assert box in self.temp_boxes + loc = self.make_sure_var_in_reg(box, + forbidden_vars=self.temp_boxes, + selected_reg=selected_reg) + return loc + + def ensure_reg(self, box): if isinstance(box, Const): offset = self.assembler.pool.get_offset(box) poolloc = l.pool(offset) - if force_in_reg: - if selected_reg is None: - tmp = TempInt() - selected_reg = self.force_allocate_reg(tmp, forbidden_vars=self.temp_boxes) - self.temp_boxes.append(tmp) - self.assembler.mc.LG(selected_reg, poolloc) - return selected_reg - return poolloc + tmp = TempInt() + reg = self.force_allocate_reg(tmp, forbidden_vars=self.temp_boxes) + self.temp_boxes.append(tmp) + self.assembler.mc.LG(reg, poolloc) + return reg else: assert box in self.temp_boxes loc = self.make_sure_var_in_reg(box, @@ -586,37 +601,35 @@ else: return self.rm.call_result_location(v) - def ensure_reg(self, box, force_in_reg=False): + def ensure_reg(self, box)a: if box.type == FLOAT: - return self.fprm.ensure_reg(box, force_in_reg) + return self.fprm.ensure_reg(box) else: - return self.rm.ensure_reg(box, force_in_reg) + return self.rm.ensure_reg(box) - def ensure_reg_or_16bit_imm(self, box, selected_reg=None): + def ensure_reg_or_16bit_imm(self, box): if box.type == FLOAT: return self.fprm.ensure_reg(box, True) else: if helper.check_imm(box): return imm(box.getint()) - return self.rm.ensure_reg(box, force_in_reg=True, selected_reg=selected_reg) + return self.rm.ensure_reg(box) - def ensure_reg_or_20bit_imm(self, box, selected_reg=None): + def ensure_reg_or_20bit_imm(self, box): if box.type == FLOAT: - return self.fprm.ensure_reg(box, True) + return self.fprm.ensure_reg(box) else: if helper.check_imm20(box): return imm(box.getint()) - return self.rm.ensure_reg(box, force_in_reg=True, selected_reg=selected_reg) + return self.rm.ensure_reg(box) - def ensure_reg_or_any_imm(self, box, selected_reg=None): + def ensure_reg_or_any_imm(self, box): if box.type == FLOAT: - return self.fprm.ensure_reg(box, True, - selected_reg=selected_reg) + return self.fprm.ensure_reg(box): else: if isinstance(box, Const): return imm(box.getint()) - return self.rm.ensure_reg(box, force_in_reg=True, - selected_reg=selected_reg) + return self.rm.ensure_reg(box) def get_scratch_reg(self, type, selected_reg=None): if type == FLOAT: @@ -673,7 +686,6 @@ # ****************************************************** def prepare_increment_debug_counter(self, op): - #poolloc = self.ensure_reg(op.getarg(0)) immvalue = self.convert_to_int(op.getarg(0)) base_loc = r.SCRATCH self.assembler.mc.load_imm(base_loc, immvalue) @@ -810,12 +822,12 @@ # sure it is in a register different from r.RES and r.RSZ. (It # should not be a ConstInt at all.) length_box = op.getarg(2) - lengthloc = self.ensure_reg(length_box, force_in_reg=True) + lengthloc = self.ensure_reg(length_box) return [lengthloc] def _prepare_gc_load(self, op): - base_loc = self.ensure_reg(op.getarg(0), force_in_reg=True) + base_loc = self.ensure_reg(op.getarg(0)) index_loc = self.ensure_reg_or_20bit_imm(op.getarg(1)) size_box = op.getarg(2) assert isinstance(size_box, ConstInt) @@ -832,7 +844,7 @@ prepare_gc_load_r = _prepare_gc_load def _prepare_gc_load_indexed(self, op): - base_loc = self.ensure_reg(op.getarg(0), force_in_reg=True) + base_loc = self.ensure_reg(op.getarg(0)) index_loc = self.ensure_reg_or_20bit_imm(op.getarg(1)) scale_box = op.getarg(2) offset_box = op.getarg(3) @@ -858,7 +870,7 @@ prepare_gc_load_indexed_r = _prepare_gc_load_indexed def prepare_gc_store(self, op): - base_loc = self.ensure_reg(op.getarg(0), force_in_reg=True) + base_loc = self.ensure_reg(op.getarg(0)) index_loc = self.ensure_reg_or_20bit_imm(op.getarg(1)) value_loc = self.ensure_reg(op.getarg(2)) size_box = op.getarg(3) @@ -869,7 +881,7 @@ def prepare_gc_store_indexed(self, op): args = op.getarglist() - base_loc = self.ensure_reg(op.getarg(0), force_in_reg=True) + base_loc = self.ensure_reg(op.getarg(0)) index_loc = self.ensure_reg_or_20bit_imm(op.getarg(1)) value_loc = self.ensure_reg(op.getarg(2)) scale_box = op.getarg(3) @@ -893,12 +905,12 @@ return EffectInfo.OS_NONE def prepare_convert_float_bytes_to_longlong(self, op): - loc1 = self.ensure_reg(op.getarg(0), force_in_reg=True) + loc1 = self.ensure_reg(op.getarg(0)) res = self.force_allocate_reg(op) return [loc1, res] def prepare_convert_longlong_bytes_to_float(self, op): - loc1 = self.ensure_reg(op.getarg(0), force_in_reg=True) + loc1 = self.ensure_reg(op.getarg(0)) res = self.force_allocate_reg(op) return [loc1, res] @@ -998,11 +1010,11 @@ return locs def prepare_cond_call_gc_wb(self, op): - arglocs = [self.ensure_reg(op.getarg(0), force_in_reg=True)] + arglocs = [self.ensure_reg(op.getarg(0))] return arglocs def prepare_cond_call_gc_wb_array(self, op): - arglocs = [self.ensure_reg(op.getarg(0), force_in_reg=True), + arglocs = [self.ensure_reg(op.getarg(0)), self.ensure_reg_or_16bit_imm(op.getarg(1)), None] if arglocs[1].is_reg(): @@ -1010,7 +1022,7 @@ return arglocs def _prepare_math_sqrt(self, op): - loc = self.ensure_reg(op.getarg(1), force_in_reg=True) + loc = self.ensure_reg(op.getarg(1)) self.free_op_vars() res = self.fprm.force_allocate_reg(op) return [loc, res] @@ -1060,7 +1072,7 @@ prepare_guard_overflow = _prepare_guard_cc def prepare_guard_class(self, op): - x = self.ensure_reg(op.getarg(0), force_in_reg=True) + x = self.ensure_reg(op.getarg(0)) y_val = force_int(op.getarg(1).getint()) arglocs = self._prepare_guard(op, [x, imm(y_val)]) return arglocs From pypy.commits at gmail.com Mon Feb 15 05:11:12 2016 From: pypy.commits at gmail.com (cfbolz) Date: Mon, 15 Feb 2016 02:11:12 -0800 (PST) Subject: [pypy-commit] pypy reorder-map-attributes: a failing test of an example that reorders infinitely often Message-ID: <56c1a440.84b61c0a.82dc2.1386@mx.google.com> Author: Carl Friedrich Bolz Branch: reorder-map-attributes Changeset: r82254:433dbd99e066 Date: 2016-02-14 17:30 +0100 http://bitbucket.org/pypy/pypy/changeset/433dbd99e066/ Log: a failing test of an example that reorders infinitely often diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py --- a/pypy/objspace/std/test/test_mapdict.py +++ b/pypy/objspace/std/test/test_mapdict.py @@ -148,34 +148,35 @@ obj2.setdictvalue(space, "a", 30) obj2.setdictvalue(space, "c", 40) obj2.setdictvalue(space, "b", 50) - + obj3.setdictvalue(space, "c", 30) obj3.setdictvalue(space, "a", 40) obj3.setdictvalue(space, "b", 50) - + obj4.setdictvalue(space, "c", 30) obj4.setdictvalue(space, "b", 40) obj4.setdictvalue(space, "a", 50) - + obj5.setdictvalue(space, "b", 30) obj5.setdictvalue(space, "a", 40) obj5.setdictvalue(space, "c", 50) - + obj6.setdictvalue(space, "b", 30) obj6.setdictvalue(space, "c", 40) obj6.setdictvalue(space, "a", 50) - + assert obj.map is obj2.map assert obj.map is obj3.map assert obj.map is obj4.map assert obj.map is obj5.map assert obj.map is obj6.map + def test_insert_different_orders_4(): cls = Class() obj = cls.instantiate() obj2 = cls.instantiate() - + obj.setdictvalue(space, "a", 10) obj.setdictvalue(space, "b", 20) obj.setdictvalue(space, "c", 30) @@ -192,7 +193,7 @@ cls = Class() obj = cls.instantiate() obj2 = cls.instantiate() - + obj.setdictvalue(space, "a", 10) obj.setdictvalue(space, "b", 20) obj.setdictvalue(space, "c", 30) @@ -211,6 +212,7 @@ assert obj.map is obj3.map + def test_bug_stack_overflow_insert_attributes(): cls = Class() obj = cls.instantiate() @@ -218,6 +220,7 @@ for i in range(1000): obj.setdictvalue(space, str(i), i) + def test_insert_different_orders_perm(): from itertools import permutations cls = Class() @@ -238,6 +241,19 @@ print len(seen_maps) + +def test_bug_infinite_loop(): + cls = Class() + obj = cls.instantiate() + obj.setdictvalue(space, "e", 1) + obj2 = cls.instantiate() + obj2.setdictvalue(space, "f", 2) + obj3 = cls.instantiate() + obj3.setdictvalue(space, "a", 3) + obj3.setdictvalue(space, "e", 4) + obj3.setdictvalue(space, "f", 5) + + def test_attr_immutability(monkeypatch): cls = Class() obj = cls.instantiate() @@ -1272,4 +1288,4 @@ def test_setdefault_fast(self): # mapdict can't pass this, which is fine - pass \ No newline at end of file + pass From pypy.commits at gmail.com Mon Feb 15 05:11:14 2016 From: pypy.commits at gmail.com (cfbolz) Date: Mon, 15 Feb 2016 02:11:14 -0800 (PST) Subject: [pypy-commit] pypy reorder-map-attributes: a tweak to the algorithm to solve the problem of infinite reorderings more thoroughly Message-ID: <56c1a442.ca56c20a.75c8d.ffffa228@mx.google.com> Author: Carl Friedrich Bolz Branch: reorder-map-attributes Changeset: r82255:3387e677ff14 Date: 2016-02-15 00:46 +0100 http://bitbucket.org/pypy/pypy/changeset/3387e677ff14/ Log: a tweak to the algorithm to solve the problem of infinite reorderings more thoroughly diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -1,4 +1,4 @@ -import weakref +import weakref, sys from rpython.rlib import jit, objectmodel, debug, rerased from rpython.rlib.rarithmetic import intmask, r_uint @@ -159,7 +159,7 @@ if self.cache_attrs is not None: return self.cache_attrs.get(key, None) return None - + def add_attr(self, obj, name, index, w_value): self._reorder_and_add(obj, name, index, w_value) if not jit.we_are_jitted(): @@ -193,53 +193,71 @@ jit.isconstant(name) and jit.isconstant(index)) def _reorder_and_add(self, obj, name, index, w_value): + # the idea is as follows: the subtrees of any map are ordered by insertion. + # the invariant is that subtrees that are inserted later must not contain + # the name of the attribute of any earlier inserted attribute anywhere + # m______ + # inserted first / \ ... \ further attributes + # attrname a 0/ 1\ n\ + # m a must not appear here anywhere + # + # when inserting a new attribute in an object we check whether any + # parent of lower order has seen that attribute yet. if yes, we follow + # that branch. if not, we normally append that attribute. When we + # follow a prior branch, we necessarily remove some attributes to be + # able to do that. They need to be re-added, which has to follow the + # reordering procedure recusively. + + # we store the to-be-readded attribute in stack_maps and stack_values + # those are lazily initialized to two lists large enough to store all + # current attributes stack_maps = None stack_values = None stack_index = 0 while True: current = self - localstack_index = stack_index + number_to_readd = 0 + current_order = sys.maxint + # walk up the map chain to find an ancestor with lower order that + # already has the current name as a child inserted while True: attr = current._get_cache_attr(name, index) - if attr is None: - # if not found in all ancestors + if attr is None or attr.order > current_order: + # we reached the top, so we didn't find it anywhere, + # just add it if not isinstance(current, PlainAttribute): self._add_attr_without_reordering(obj, name, index, w_value) break # if not found try parent else: - w_self_value = obj._mapdict_read_storage(current.storageindex) + number_to_readd += 1 + current_order = current.order + current = current.back + else: + # we found the attributes further up, need to save the + # previous values of the attributes we passed + if number_to_readd: if stack_maps is None: stack_maps = [None] * self.length() stack_values = [None] * self.length() - stack_maps[localstack_index] = current - stack_values[localstack_index] = w_self_value - localstack_index += 1 - current = current.back - else: + current = self + for i in range(number_to_readd): + assert isinstance(current, PlainAttribute) + w_self_value = obj._mapdict_read_storage( + current.storageindex) + stack_maps[stack_index] = current + stack_values[stack_index] = w_self_value + stack_index += 1 + current = current.back attr._switch_map_and_write_storage(obj, w_value) - if not localstack_index: - return - - if not stack_index: - # add the first attribute of the stack without reordering - # to prevent an endless loop - localstack_index += -1 - next_map = stack_maps[localstack_index] - w_value = stack_values[localstack_index] - obj._get_mapdict_map()._add_attr_without_reordering( - obj, next_map.name, next_map.index, w_value) - - stack_index = localstack_index break if not stack_index: return - # readd all other values from the stack (with reordering) - # the last element of the stack will be the new current - stack_index += -1 + # readd the current top of the stack + stack_index -= 1 next_map = stack_maps[stack_index] w_value = stack_values[stack_index] name = next_map.name @@ -350,7 +368,7 @@ return Terminator.set_terminator(self, obj, terminator) class PlainAttribute(AbstractAttribute): - _immutable_fields_ = ['name', 'index', 'storageindex', 'back', 'ever_mutated?'] + _immutable_fields_ = ['name', 'index', 'storageindex', 'back', 'ever_mutated?', 'order'] def __init__(self, name, index, back): AbstractAttribute.__init__(self, back.space, back.terminator) @@ -360,6 +378,7 @@ self.back = back self._size_estimate = self.length() * NUM_DIGITS_POW2 self.ever_mutated = False + self.order = len(back.cache_attrs) if back.cache_attrs else 0 def _copy_attr(self, obj, new_obj): w_value = self.read(obj, self.name, self.index) From pypy.commits at gmail.com Mon Feb 15 05:49:08 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 15 Feb 2016 02:49:08 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: sync to remote did not work, fixed more calls to ensure_reg Message-ID: <56c1ad24.034cc20a.9ac36.ffffb4a1@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82256:4d0391440a29 Date: 2016-02-15 11:48 +0100 http://bitbucket.org/pypy/pypy/changeset/4d0391440a29/ Log: sync to remote did not work, fixed more calls to ensure_reg diff --git a/rpython/jit/backend/zarch/helper/regalloc.py b/rpython/jit/backend/zarch/helper/regalloc.py --- a/rpython/jit/backend/zarch/helper/regalloc.py +++ b/rpython/jit/backend/zarch/helper/regalloc.py @@ -29,7 +29,6 @@ else: l1 = self.ensure_reg_or_pool(a1) self.force_result_in_reg(op, a0) - self.free_op_vars() return [l0, l1] def prepare_int_mul(self, op): @@ -43,7 +42,6 @@ else: l1 = self.ensure_reg_or_pool(a1) self.force_result_in_reg(op, a0) - self.free_op_vars() return [l0, l1] def prepare_int_mul_ovf(self, op): @@ -56,7 +54,6 @@ else: l1 = self.ensure_reg_or_pool(a1) lr,lq = self.rm.ensure_even_odd_pair(a0, op, bind_first=False) - self.free_op_vars() return [lr, lq, l1] def generate_div_mod(modulus): @@ -70,7 +67,6 @@ self.assembler.regalloc_mov(poolloc, lq) else: lr,lq = self.rm.ensure_even_odd_pair(a0, op, bind_first=modulus) - self.free_op_vars() return [lr, lq, l1] return f @@ -81,15 +77,8 @@ a0 = op.getarg(0) a1 = op.getarg(1) # sub is not commotative, thus cannot swap operands - l1 = self.ensure_reg(a1) - l0 = self.ensure_reg(a0) - if isinstance(a0, Const): - loc = self.force_allocate_reg(op) - self.assembler.mc.LG(loc, l0) - l0 = loc - else: - self.rm.force_result_in_reg(op, a0) - self.free_op_vars() + l1 = self.ensure_reg_or_pool(a1) + l0 = self.force_result_in_reg(op, a0) return [l0, l1] def prepare_int_logic(self, op): @@ -97,10 +86,8 @@ a1 = op.getarg(1) if a0.is_constant(): a0, a1 = a1, a0 - l0 = self.ensure_reg(a0) - l1 = self.ensure_reg(a1) - self.force_result_in_reg(op, a0) - self.free_op_vars() + l1 = self.ensure_reg_or_pool(a1) + l0 = self.force_result_in_reg(op, a0) return [l0, l1] def prepare_int_shift(self, op): @@ -111,11 +98,10 @@ # in the addr part of the instruction l1 = addr(a1.getint()) else: - tmp = self.rm.ensure_reg(a1, force_in_reg=True) + tmp = self.rm.ensure_reg(a1) l1 = addr(0, tmp) - l0 = self.ensure_reg(a0, force_in_reg=True) + l0 = self.ensure_reg(a0) lr = self.force_allocate_reg(op) - self.free_op_vars() return [lr, l0, l1] def generate_cmp_op(signed=True): @@ -128,21 +114,14 @@ l1 = imm(a1.getint()) else: l1 = self.ensure_reg(a1) - if l0.is_in_pool(): - poolloc = l0 - l0 = self.force_allocate_reg(op) - self.assembler.mc.LG(l0, poolloc) res = self.force_allocate_reg_or_cc(op) - #self.force_result_in_reg(op, a0) - self.free_op_vars() return [l0, l1, res, invert] return prepare_cmp_op def prepare_float_cmp_op(self, op): - l0 = self.ensure_reg(op.getarg(0), force_in_reg=True) - l1 = self.ensure_reg(op.getarg(1)) + l0 = self.ensure_reg(op.getarg(0)) + l1 = self.ensure_reg_or_pool(op.getarg(1)) res = self.force_allocate_reg_or_cc(op) - self.free_op_vars() return [l0, l1, res] def prepare_binary_op(self, op): @@ -151,7 +130,6 @@ l0 = self.ensure_reg(a0) l1 = self.ensure_reg(a1) self.force_result_in_reg(op, a0) - self.free_op_vars() return [l0, l1] def generate_prepare_float_binary_op(allow_swap=False): @@ -169,30 +147,24 @@ l0 = newloc else: self.force_result_in_reg(op, a0) - self.free_op_vars() return [l0, l1] return prepare_float_binary_op def prepare_unary_cmp(self, op): a0 = op.getarg(0) - assert not isinstance(a0, ConstInt) l0 = self.ensure_reg(a0) self.force_result_in_reg(op, a0) res = self.force_allocate_reg_or_cc(op) - self.free_op_vars() return [l0, res] def prepare_unary_op(self, op): a0 = op.getarg(0) - assert not isinstance(a0, ConstInt) - l0 = self.ensure_reg(a0, force_in_reg=True) + l0 = self.ensure_reg(a0) res = self.force_result_in_reg(op, a0) - self.free_op_vars() return [l0,] def prepare_same_as(self, op): a0 = op.getarg(0) l0 = self.ensure_reg(a0) res = self.force_allocate_reg(op) - self.free_op_vars() return [l0, res] diff --git a/rpython/jit/backend/zarch/regalloc.py b/rpython/jit/backend/zarch/regalloc.py --- a/rpython/jit/backend/zarch/regalloc.py +++ b/rpython/jit/backend/zarch/regalloc.py @@ -147,8 +147,7 @@ else: assert box in self.temp_boxes loc = self.make_sure_var_in_reg(box, - forbidden_vars=self.temp_boxes, - selected_reg=selected_reg) + forbidden_vars=self.temp_boxes) return loc def ensure_reg(self, box): @@ -163,8 +162,7 @@ else: assert box in self.temp_boxes loc = self.make_sure_var_in_reg(box, - forbidden_vars=self.temp_boxes, - selected_reg=selected_reg) + forbidden_vars=self.temp_boxes) return loc def get_scratch_reg(self, selected_reg=None): @@ -601,7 +599,13 @@ else: return self.rm.call_result_location(v) - def ensure_reg(self, box)a: + def ensure_reg_or_pool(self, box): + if box.type == FLOAT: + return self.fprm.ensure_reg_or_pool(box) + else: + return self.rm.ensure_reg_or_pool(box) + + def ensure_reg(self, box): if box.type == FLOAT: return self.fprm.ensure_reg(box) else: @@ -609,7 +613,7 @@ def ensure_reg_or_16bit_imm(self, box): if box.type == FLOAT: - return self.fprm.ensure_reg(box, True) + return self.fprm.ensure_reg(box) else: if helper.check_imm(box): return imm(box.getint()) @@ -625,7 +629,7 @@ def ensure_reg_or_any_imm(self, box): if box.type == FLOAT: - return self.fprm.ensure_reg(box): + return self.fprm.ensure_reg(box) else: if isinstance(box, Const): return imm(box.getint()) @@ -1107,7 +1111,7 @@ return locs def prepare_guard_exception(self, op): - loc = self.ensure_reg(op.getarg(0), force_in_reg=True) + loc = self.ensure_reg(op.getarg(0)) if op in self.longevity: resloc = self.force_allocate_reg(op) else: @@ -1116,7 +1120,7 @@ return arglocs def prepare_guard_is_object(self, op): - loc_object = self.ensure_reg(op.getarg(0), force_in_reg=True) + loc_object = self.ensure_reg(op.getarg(0)) arglocs = self._prepare_guard(op, [loc_object]) return arglocs @@ -1126,8 +1130,8 @@ prepare_save_exc_class = prepare_save_exception def prepare_restore_exception(self, op): - loc0 = self.ensure_reg(op.getarg(0), force_in_reg=True) - loc1 = self.ensure_reg(op.getarg(1), force_in_reg=True) + loc0 = self.ensure_reg(op.getarg(0)) + loc1 = self.ensure_reg(op.getarg(1)) return [loc0, loc1] def prepare_copystrcontent(self, op): @@ -1145,7 +1149,7 @@ must_exist=False, load_loc_odd=False) src_ofs_loc = self.ensure_reg_or_any_imm(op.getarg(2)) self.rm.temp_boxes.append(src_tmp) - dst_ptr_loc = self.ensure_reg(op.getarg(1), force_in_reg=True) + dst_ptr_loc = self.ensure_reg(op.getarg(1)) dst_ofs_loc = self.ensure_reg_or_any_imm(op.getarg(3)) length_loc = self.ensure_reg_or_any_imm(op.getarg(4)) # no need to spill, we do not call memcpy, but we use s390x's From pypy.commits at gmail.com Mon Feb 15 06:08:11 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 15 Feb 2016 03:08:11 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: added te temp_boxes too early, added comments Message-ID: <56c1b19b.654fc20a.a67a1.ffffa5a1@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82257:c587ef1d34a4 Date: 2016-02-15 12:07 +0100 http://bitbucket.org/pypy/pypy/changeset/c587ef1d34a4/ Log: added te temp_boxes too early, added comments diff --git a/rpython/jit/backend/zarch/regalloc.py b/rpython/jit/backend/zarch/regalloc.py --- a/rpython/jit/backend/zarch/regalloc.py +++ b/rpython/jit/backend/zarch/regalloc.py @@ -991,10 +991,10 @@ itemsize, ofs, _ = unpack_arraydescr(op.getdescr()) startindex_loc = self.ensure_reg_or_16bit_imm(op.getarg(1)) tempvar = TempInt() - self.rm.temp_boxes.append(tempvar) ofs_loc = self.ensure_reg_or_16bit_imm(ConstInt(ofs)) base_loc, length_loc = self.rm.ensure_even_odd_pair(op.getarg(0), tempvar, bind_first=True, must_exist=False, load_loc_odd=False) + self.rm.temp_boxes.append(tempvar) length_box = op.getarg(2) ll = self.rm.loc(length_box) @@ -1028,17 +1028,20 @@ def _prepare_math_sqrt(self, op): loc = self.ensure_reg(op.getarg(1)) self.free_op_vars() + # can be the same register as loc res = self.fprm.force_allocate_reg(op) return [loc, res] def prepare_cast_int_to_float(self, op): loc1 = self.ensure_reg(op.getarg(0)) + # ok not to use forbidden_vars, parameter is a int box res = self.fprm.force_allocate_reg(op) return [loc1, res] def prepare_cast_float_to_int(self, op): loc1 = self.ensure_reg(op.getarg(0)) self.free_op_vars() + # ok not to use forbidden_vars, parameter is a float box res = self.rm.force_allocate_reg(op) return [loc1, res] From pypy.commits at gmail.com Mon Feb 15 06:26:50 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 15 Feb 2016 03:26:50 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: stacklet switch removed one register move, rescuing f8-f15 Message-ID: <56c1b5fa.c8ac1c0a.f0c4b.2fdb@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82258:2d699125ebd7 Date: 2016-02-15 12:26 +0100 http://bitbucket.org/pypy/pypy/changeset/2d699125ebd7/ Log: stacklet switch removed one register move, rescuing f8-f15 diff --git a/rpython/translator/c/src/stacklet/switch_s390x_gcc.h b/rpython/translator/c/src/stacklet/switch_s390x_gcc.h --- a/rpython/translator/c/src/stacklet/switch_s390x_gcc.h +++ b/rpython/translator/c/src/stacklet/switch_s390x_gcc.h @@ -10,6 +10,7 @@ /* The Stackless version by Kristjan Valur Jonsson, ported to s390x by Richard Plangger */ + "lay 15,-64(15)\n" /* additional stack space to store f8-f15 */ "stmg 6,15,48(15)\n" "std 0,128(15)\n" @@ -17,6 +18,15 @@ "std 4,144(15)\n" "std 6,152(15)\n" + "std 8, 160(15)\n" + "std 9, 168(15)\n" + "std 10,176(15)\n" + "std 11,184(15)\n" + "std 12,192(15)\n" + "std 13,200(15)\n" + "std 14,208(15)\n" + "std 15,216(15)\n" + "lgr 10, %[restore_state]\n" /* save 'restore_state' for later */ "lgr 11, %[extra]\n" /* save 'extra' for later */ "lgr 14, %[save_state]\n" /* move 'save_state' into r14 for branching */ @@ -39,8 +49,7 @@ "lay 15, -160(15)\n" /* create temp stack space for callee to use */ - "lgr 14, 10\n" /* load restore_state */ - "basr 14, 14\n" /* call restore_state() */ + "basr 14, 10\n" /* call restore_state() */ "lay 15, 160(15)\n" /* destroy temp stack space */ /* The stack's content is now restored. */ @@ -55,6 +64,15 @@ "ld 4,144(15)\n" "ld 6,152(15)\n" + "ld 8, 160(15)\n" + "ld 9, 168(15)\n" + "ld 10,176(15)\n" + "ld 11,184(15)\n" + "ld 12,192(15)\n" + "ld 13,200(15)\n" + "ld 14,208(15)\n" + "ld 15,216(15)\n" + "lmg 6,15,48(15)\n" : "=r"(result) /* output variable: expected to be r2 */ From pypy.commits at gmail.com Mon Feb 15 06:29:16 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 15 Feb 2016 03:29:16 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: merged deafult Message-ID: <56c1b68c.89bd1c0a.5e6cc.39c9@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82259:39811a0085e6 Date: 2016-02-15 12:28 +0100 http://bitbucket.org/pypy/pypy/changeset/39811a0085e6/ Log: merged deafult diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -188,7 +188,7 @@ # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'include')) + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.1 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.1" -__version_info__ = (1, 5, 1) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h --- a/lib_pypy/cffi/_embedding.h +++ b/lib_pypy/cffi/_embedding.h @@ -233,7 +233,7 @@ f = PySys_GetObject((char *)"stderr"); if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.5.1" + "\ncompiled with cffi version: 1.5.2" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, sysconfig, types +import sys, types from .lock import allocate_lock try: @@ -550,16 +550,29 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: - if hasattr(sys, 'prefix'): - import os - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - pythonlib = "pypy-c" + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) else: if sys.platform == "win32": template = "python%d%d" if hasattr(sys, 'gettotalrefcount'): template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" if sysconfig.get_config_var('DEBUG_EXT'): template += sysconfig.get_config_var('DEBUG_EXT') diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py --- a/lib_pypy/cffi/vengine_cpy.py +++ b/lib_pypy/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py --- a/lib_pypy/cffi/vengine_gen.py +++ b/lib_pypy/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py --- a/lib_pypy/cffi/verifier.py +++ b/lib_pypy/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -36,13 +36,13 @@ "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", - "_csv", "cppyy", "_pypyjson" + "_csv", "cppyy", "_pypyjson", "_vmprof", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): +#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') +# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): # it's not enough that we get x86_64 - working_modules.add('_vmprof') +# working_modules.add('_vmprof') translation_modules = default_modules.copy() translation_modules.update([ diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -7,6 +7,9 @@ Fixed ``_PyLong_FromByteArray()``, which was buggy. +Fixed a crash with stacklets (or greenlets) on non-Linux machines +which showed up if you forget stacklets without resuming them. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy @@ -38,7 +41,8 @@ .. branch: compress-numbering -Improve the memory signature of numbering instances in the JIT. +Improve the memory signature of numbering instances in the JIT. This should massively +decrease the amount of memory consumed by the JIT, which is significant for most programs. .. branch: fix-trace-too-long-heuristic @@ -144,6 +148,11 @@ Refactor vmprof to work cross-operating-system. +.. branch: seperate-strucmember_h + +Seperate structmember.h from Python.h Also enhance creating api functions +to specify which header file they appear in (previously only pypy_decl.h) + .. branch: memop-simplify3 Further simplifying the backend operations malloc_cond_varsize and zero_array. diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -239,6 +239,9 @@ raise Exception("Cannot use the --output option with PyPy " "when --shared is on (it is by default). " "See issue #1971.") + if sys.platform == 'win32': + config.translation.libname = '..\\..\\libs\\python27.lib' + thisdir.join('..', '..', 'libs').ensure(dir=1) if config.translation.thread: config.objspace.usemodules.thread = True diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -306,7 +306,7 @@ return None -class W_InterpIterable(W_Root): +class InterpIterable(object): def __init__(self, space, w_iterable): self.w_iter = space.iter(w_iterable) self.space = space @@ -745,9 +745,13 @@ return self.int_w(self.hash(w_obj)) def len_w(self, w_obj): - """shotcut for space.int_w(space.len(w_obj))""" + """shortcut for space.int_w(space.len(w_obj))""" return self.int_w(self.len(w_obj)) + def contains_w(self, w_container, w_item): + """shortcut for space.is_true(space.contains(w_container, w_item))""" + return self.is_true(self.contains(w_container, w_item)) + def setitem_str(self, w_obj, key, w_value): return self.setitem(w_obj, self.wrap(key), w_value) @@ -846,7 +850,7 @@ return lst_w[:] # make the resulting list resizable def iteriterable(self, w_iterable): - return W_InterpIterable(self, w_iterable) + return InterpIterable(self, w_iterable) def _unpackiterable_unknown_length(self, w_iterator, w_iterable): """Unpack an iterable of unknown length into an interp-level @@ -1237,7 +1241,7 @@ if not isinstance(statement, PyCode): raise TypeError('space.exec_(): expected a string, code or PyCode object') w_key = self.wrap('__builtins__') - if not self.is_true(self.contains(w_globals, w_key)): + if not self.contains_w(w_globals, w_key): self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) diff --git a/pypy/module/__builtin__/interp_classobj.py b/pypy/module/__builtin__/interp_classobj.py --- a/pypy/module/__builtin__/interp_classobj.py +++ b/pypy/module/__builtin__/interp_classobj.py @@ -20,7 +20,7 @@ if not space.isinstance_w(w_dict, space.w_dict): raise_type_err(space, 'bases', 'tuple', w_bases) - if not space.is_true(space.contains(w_dict, space.wrap("__doc__"))): + if not space.contains_w(w_dict, space.wrap("__doc__")): space.setitem(w_dict, space.wrap("__doc__"), space.w_None) # XXX missing: lengthy and obscure logic about "__module__" diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.1" +VERSION = "1.5.2" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -69,6 +69,7 @@ def startup(self, space): from pypy.module._cffi_backend import embedding embedding.glob.space = space + embedding.glob.patched_sys = False def get_dict_rtld_constants(): diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -45,6 +45,26 @@ pass glob = Global() +def patch_sys(space): + # Annoying: CPython would just use the C-level std{in,out,err} as + # configured by the main application, for example in binary mode + # on Windows or with buffering turned off. We can't easily do the + # same. Instead, go for the safest bet (but possibly bad for + # performance) and open sys.std{in,out,err} unbuffered. On + # Windows I guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # pypy_init_embedded_cffi_module(). + if not glob.patched_sys: + space.appexec([], """(): + import os + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + glob.patched_sys = True + + def pypy_init_embedded_cffi_module(version, init_struct): # called from __init__.py name = "?" @@ -56,6 +76,7 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) + patch_sys(space) load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: @@ -84,72 +105,87 @@ return rffi.cast(rffi.INT, res) # ____________________________________________________________ + if os.name == 'nt': - do_startup = r''' -#include -#define WIN32_LEAN_AND_MEAN + + do_includes = r""" +#define _WIN32_WINNT 0x0501 #include -RPY_EXPORTED void rpython_startup_code(void); -RPY_EXPORTED int pypy_setup_home(char *, int); -static unsigned char _cffi_ready = 0; -static const char *volatile _cffi_module_name; +#define CFFI_INIT_HOME_PATH_MAX _MAX_PATH +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); -static void _cffi_init_error(const char *msg, const char *extra) +static int _cffi_init_home(char *output_home_path) { - fprintf(stderr, - "\nPyPy initialization failure when loading module '%s':\n%s%s\n", - _cffi_module_name, msg, extra); -} - -BOOL CALLBACK _cffi_init(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *lpContex) -{ - - HMODULE hModule; - TCHAR home[_MAX_PATH]; - rpython_startup_code(); - RPyGilAllocate(); + HMODULE hModule = 0; + DWORD res; GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, (LPCTSTR)&_cffi_init, &hModule); + if (hModule == 0 ) { - /* TODO turn the int into a string with FormatMessage */ - - _cffi_init_error("dladdr() failed: ", ""); - return TRUE; + _cffi_init_error("GetModuleHandleEx() failed", ""); + return -1; } - GetModuleFileName(hModule, home, _MAX_PATH); - if (pypy_setup_home(home, 1) != 0) { - _cffi_init_error("pypy_setup_home() failed", ""); - return TRUE; + res = GetModuleFileName(hModule, output_home_path, CFFI_INIT_HOME_PATH_MAX); + if (res >= CFFI_INIT_HOME_PATH_MAX) { + return -1; } - _cffi_ready = 1; - fprintf(stderr, "startup succeeded, home %s\n", home); - return TRUE; + return 0; } -RPY_EXPORTED -int pypy_carefully_make_gil(const char *name) +static void _cffi_init_once(void) { - /* For CFFI: this initializes the GIL and loads the home path. - It can be called completely concurrently from unrelated threads. - It assumes that we don't hold the GIL before (if it exists), and we - don't hold it afterwards. - */ - static INIT_ONCE s_init_once; + static LONG volatile lock = 0; + static int _init_called = 0; - _cffi_module_name = name; /* not really thread-safe, but better than - nothing */ - InitOnceExecuteOnce(&s_init_once, _cffi_init, NULL, NULL); - return (int)_cffi_ready - 1; -}''' + while (InterlockedCompareExchange(&lock, 1, 0) != 0) { + SwitchToThread(); /* spin loop */ + } + if (!_init_called) { + _cffi_init(); + _init_called = 1; + } + InterlockedCompareExchange(&lock, 0, 1); +} +""" + else: - do_startup = r""" -#include + + do_includes = r""" #include #include +#define CFFI_INIT_HOME_PATH_MAX PATH_MAX +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + Dl_info info; + dlerror(); /* reset */ + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return -1; + } + if (realpath(info.dli_fname, output_home_path) == NULL) { + perror("realpath() failed"); + _cffi_init_error("realpath() failed", ""); + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + pthread_once(&once_control, _cffi_init); +} +""" + +do_startup = do_includes + r""" RPY_EXPORTED void rpython_startup_code(void); RPY_EXPORTED int pypy_setup_home(char *, int); @@ -165,17 +201,13 @@ static void _cffi_init(void) { - Dl_info info; - char *home; + char home[CFFI_INIT_HOME_PATH_MAX + 1]; rpython_startup_code(); RPyGilAllocate(); - if (dladdr(&_cffi_init, &info) == 0) { - _cffi_init_error("dladdr() failed: ", dlerror()); + if (_cffi_init_home(home) != 0) return; - } - home = realpath(info.dli_fname, NULL); if (pypy_setup_home(home, 1) != 0) { _cffi_init_error("pypy_setup_home() failed", ""); return; @@ -191,11 +223,9 @@ It assumes that we don't hold the GIL before (if it exists), and we don't hold it afterwards. */ - static pthread_once_t once_control = PTHREAD_ONCE_INIT; - _cffi_module_name = name; /* not really thread-safe, but better than nothing */ - pthread_once(&once_control, _cffi_init); + _cffi_init_once(); return (int)_cffi_ready - 1; } """ diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,15 +4,18 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module +from pypy.module._cffi_backend import embedding def test_checkmodule(): # prepare_file_argument() is not working without translating the _file # module too def dummy_prepare_file_argument(space, fileobj): - # call load_cffi1_module() too, from a random place like here - cffi1_module.load_cffi1_module(space, "foo", "foo", 42) + # call pypy_init_embedded_cffi_module() from a random place like here + # --- this calls load_cffi1_module(), too + embedding.pypy_init_embedded_cffi_module( + rffi.cast(rffi.INT, embedding.EMBED_VERSION_MIN), + 42) return lltype.nullptr(rffi.CCHARP.TO) old = ctypeptr.prepare_file_argument try: diff --git a/pypy/module/_demo/test/test_import.py b/pypy/module/_demo/test/test_import.py --- a/pypy/module/_demo/test/test_import.py +++ b/pypy/module/_demo/test/test_import.py @@ -12,8 +12,7 @@ w_modules = space.sys.get('modules') assert _demo.Module.demo_events == ['setup'] - assert not space.is_true(space.contains(w_modules, - space.wrap('_demo'))) + assert not space.contains_w(w_modules, space.wrap('_demo')) # first import w_import = space.builtin.get('__import__') diff --git a/pypy/module/_vmprof/interp_vmprof.py b/pypy/module/_vmprof/interp_vmprof.py --- a/pypy/module/_vmprof/interp_vmprof.py +++ b/pypy/module/_vmprof/interp_vmprof.py @@ -60,7 +60,7 @@ Must be smaller than 1.0 """ w_modules = space.sys.get('modules') - if space.is_true(space.contains(w_modules, space.wrap('_continuation'))): + if space.contains_w(w_modules, space.wrap('_continuation')): space.warn(space.wrap("Using _continuation/greenlet/stacklet together " "with vmprof will crash"), space.w_RuntimeWarning) diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -59,7 +59,7 @@ class CConfig: _compilation_info_ = ExternalCompilationInfo( include_dirs=include_dirs, - includes=['Python.h', 'stdarg.h'], + includes=['Python.h', 'stdarg.h', 'structmember.h'], compile_extra=['-DPy_BUILD_CORE'], ) @@ -129,6 +129,7 @@ for name in constant_names: setattr(CConfig_constants, name, rffi_platform.ConstantInteger(name)) udir.join('pypy_decl.h').write("/* Will be filled later */\n") +udir.join('pypy_structmember_decl.h').write("/* Will be filled later */\n") udir.join('pypy_macros.h').write("/* Will be filled later */\n") globals().update(rffi_platform.configure(CConfig_constants)) @@ -147,7 +148,7 @@ # XXX: 20 lines of code to recursively copy a directory, really?? assert dstdir.check(dir=True) headers = include_dir.listdir('*.h') + include_dir.listdir('*.inl') - for name in ("pypy_decl.h", "pypy_macros.h"): + for name in ("pypy_decl.h", "pypy_macros.h", "pypy_structmember_decl.h"): headers.append(udir.join(name)) _copy_header_files(headers, dstdir) @@ -232,7 +233,7 @@ wrapper.c_name = cpyext_namespace.uniquename(self.c_name) return wrapper -def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, external=True, +def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header='pypy_decl.h', gil=None): """ Declares a function to be exported. @@ -241,8 +242,8 @@ special value 'CANNOT_FAIL' (also when restype is Void) turns an eventual exception into a wrapped SystemError. Unwrapped exceptions also cause a SytemError. - - set `external` to False to get a C function pointer, but not exported by - the API headers. + - `header` is the header file to export the function in, Set to None to get + a C function pointer, but not exported by the API headers. - set `gil` to "acquire", "release" or "around" to acquire the GIL, release the GIL, or both """ @@ -263,7 +264,7 @@ def decorate(func): func_name = func.func_name - if external: + if header is not None: c_name = None else: c_name = func_name @@ -271,7 +272,7 @@ c_name=c_name, gil=gil) func.api_func = api_function - if external: + if header is not None: assert func_name not in FUNCTIONS, ( "%s already registered" % func_name) @@ -363,8 +364,9 @@ unwrapper_catch = make_unwrapper(True) unwrapper_raise = make_unwrapper(False) - if external: + if header is not None: FUNCTIONS[func_name] = api_function + FUNCTIONS_BY_HEADER.setdefault(header, {})[func_name] = api_function INTERPLEVEL_API[func_name] = unwrapper_catch # used in tests return unwrapper_raise # used in 'normal' RPython code. return decorate @@ -383,6 +385,7 @@ INTERPLEVEL_API = {} FUNCTIONS = {} +FUNCTIONS_BY_HEADER = {} # These are C symbols which cpyext will export, but which are defined in .c # files somewhere in the implementation of cpyext (rather than being defined in @@ -811,6 +814,7 @@ global_code = '\n'.join(global_objects) prologue = ("#include \n" + "#include \n" "#include \n") code = (prologue + struct_declaration_code + @@ -960,7 +964,8 @@ "NOT_RPYTHON" # implement function callbacks and generate function decls functions = [] - pypy_decls = [] + decls = {} + pypy_decls = decls['pypy_decl.h'] = [] pypy_decls.append("#ifndef _PYPY_PYPY_DECL_H\n") pypy_decls.append("#define _PYPY_PYPY_DECL_H\n") pypy_decls.append("#ifndef PYPY_STANDALONE\n") @@ -973,17 +978,23 @@ for decl in FORWARD_DECLS: pypy_decls.append("%s;" % (decl,)) - for name, func in sorted(FUNCTIONS.iteritems()): - restype, args = c_function_signature(db, func) - pypy_decls.append("PyAPI_FUNC(%s) %s(%s);" % (restype, name, args)) - if api_struct: - callargs = ', '.join('arg%d' % (i,) - for i in range(len(func.argtypes))) - if func.restype is lltype.Void: - body = "{ _pypyAPI.%s(%s); }" % (name, callargs) - else: - body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) - functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) + for header_name, header_functions in FUNCTIONS_BY_HEADER.iteritems(): + if header_name not in decls: + header = decls[header_name] = [] + else: + header = decls[header_name] + + for name, func in sorted(header_functions.iteritems()): + restype, args = c_function_signature(db, func) + header.append("PyAPI_FUNC(%s) %s(%s);" % (restype, name, args)) + if api_struct: + callargs = ', '.join('arg%d' % (i,) + for i in range(len(func.argtypes))) + if func.restype is lltype.Void: + body = "{ _pypyAPI.%s(%s); }" % (name, callargs) + else: + body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) + functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) for name in VA_TP_LIST: name_no_star = process_va_name(name) header = ('%s pypy_va_get_%s(va_list* vp)' % @@ -1007,8 +1018,9 @@ pypy_decls.append("#endif /*PYPY_STANDALONE*/\n") pypy_decls.append("#endif /*_PYPY_PYPY_DECL_H*/\n") - pypy_decl_h = udir.join('pypy_decl.h') - pypy_decl_h.write('\n'.join(pypy_decls)) + for header_name, header_decls in decls.iteritems(): + decl_h = udir.join(header_name) + decl_h.write('\n'.join(header_decls)) return functions separate_module_files = [source_dir / "varargwrapper.c", diff --git a/pypy/module/cpyext/bufferobject.py b/pypy/module/cpyext/bufferobject.py --- a/pypy/module/cpyext/bufferobject.py +++ b/pypy/module/cpyext/bufferobject.py @@ -73,7 +73,7 @@ "Don't know how to realize a buffer")) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def buffer_dealloc(space, py_obj): py_buf = rffi.cast(PyBufferObject, py_obj) if py_buf.c_b_base: diff --git a/pypy/module/cpyext/frameobject.py b/pypy/module/cpyext/frameobject.py --- a/pypy/module/cpyext/frameobject.py +++ b/pypy/module/cpyext/frameobject.py @@ -39,7 +39,7 @@ py_frame.c_f_locals = make_ref(space, frame.get_w_locals()) rffi.setintfield(py_frame, 'c_f_lineno', frame.getorcreatedebug().f_lineno) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def frame_dealloc(space, py_obj): py_frame = rffi.cast(PyFrameObject, py_obj) py_code = rffi.cast(PyObject, py_frame.c_f_code) diff --git a/pypy/module/cpyext/funcobject.py b/pypy/module/cpyext/funcobject.py --- a/pypy/module/cpyext/funcobject.py +++ b/pypy/module/cpyext/funcobject.py @@ -56,7 +56,7 @@ assert isinstance(w_obj, Function) py_func.c_func_name = make_ref(space, space.wrap(w_obj.name)) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def function_dealloc(space, py_obj): py_func = rffi.cast(PyFunctionObject, py_obj) Py_DecRef(space, py_func.c_func_name) @@ -75,7 +75,7 @@ rffi.setintfield(py_code, 'c_co_flags', co_flags) rffi.setintfield(py_code, 'c_co_argcount', w_obj.co_argcount) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def code_dealloc(space, py_obj): py_code = rffi.cast(PyCodeObject, py_obj) Py_DecRef(space, py_code.c_co_name) diff --git a/pypy/module/cpyext/include/Python.h b/pypy/module/cpyext/include/Python.h --- a/pypy/module/cpyext/include/Python.h +++ b/pypy/module/cpyext/include/Python.h @@ -84,6 +84,7 @@ #include "pyconfig.h" #include "object.h" +#include "pymath.h" #include "pyport.h" #include "warnings.h" @@ -115,7 +116,6 @@ #include "compile.h" #include "frameobject.h" #include "eval.h" -#include "pymath.h" #include "pymem.h" #include "pycobject.h" #include "pycapsule.h" @@ -132,9 +132,6 @@ /* Missing definitions */ #include "missing.h" -// XXX This shouldn't be included here -#include "structmember.h" - #include /* Define macros for inline documentation. */ diff --git a/pypy/module/cpyext/include/floatobject.h b/pypy/module/cpyext/include/floatobject.h --- a/pypy/module/cpyext/include/floatobject.h +++ b/pypy/module/cpyext/include/floatobject.h @@ -7,6 +7,18 @@ extern "C" { #endif +#define PyFloat_STR_PRECISION 12 + +#ifdef Py_NAN +#define Py_RETURN_NAN return PyFloat_FromDouble(Py_NAN) +#endif + +#define Py_RETURN_INF(sign) do \ + if (copysign(1., sign) == 1.) { \ + return PyFloat_FromDouble(Py_HUGE_VAL); \ + } else { \ + return PyFloat_FromDouble(-Py_HUGE_VAL); \ + } while(0) #ifdef __cplusplus } diff --git a/pypy/module/cpyext/include/pymath.h b/pypy/module/cpyext/include/pymath.h --- a/pypy/module/cpyext/include/pymath.h +++ b/pypy/module/cpyext/include/pymath.h @@ -17,4 +17,35 @@ #define Py_HUGE_VAL HUGE_VAL #endif +/* Py_NAN + * A value that evaluates to a NaN. On IEEE 754 platforms INF*0 or + * INF/INF works. Define Py_NO_NAN in pyconfig.h if your platform + * doesn't support NaNs. + */ +#if !defined(Py_NAN) && !defined(Py_NO_NAN) +#if !defined(__INTEL_COMPILER) + #define Py_NAN (Py_HUGE_VAL * 0.) +#else /* __INTEL_COMPILER */ + #if defined(ICC_NAN_STRICT) + #pragma float_control(push) + #pragma float_control(precise, on) + #pragma float_control(except, on) + #if defined(_MSC_VER) + __declspec(noinline) + #else /* Linux */ + __attribute__((noinline)) + #endif /* _MSC_VER */ + static double __icc_nan() + { + return sqrt(-1.0); + } + #pragma float_control (pop) + #define Py_NAN __icc_nan() + #else /* ICC_NAN_RELAXED as default for Intel Compiler */ + static union { unsigned char buf[8]; double __icc_nan; } __nan_store = {0,0,0,0,0,0,0xf8,0x7f}; + #define Py_NAN (__nan_store.__icc_nan) + #endif /* ICC_NAN_STRICT */ +#endif /* __INTEL_COMPILER */ +#endif + #endif /* Py_PYMATH_H */ diff --git a/pypy/module/cpyext/include/structmember.h b/pypy/module/cpyext/include/structmember.h --- a/pypy/module/cpyext/include/structmember.h +++ b/pypy/module/cpyext/include/structmember.h @@ -4,54 +4,85 @@ extern "C" { #endif + +/* Interface to map C struct members to Python object attributes */ + #include /* For offsetof */ + +/* The offsetof() macro calculates the offset of a structure member + in its structure. Unfortunately this cannot be written down + portably, hence it is provided by a Standard C header file. + For pre-Standard C compilers, here is a version that usually works + (but watch out!): */ + #ifndef offsetof #define offsetof(type, member) ( (int) & ((type*)0) -> member ) #endif +/* An array of memberlist structures defines the name, type and offset + of selected members of a C structure. These can be read by + PyMember_Get() and set by PyMember_Set() (except if their READONLY flag + is set). The array must be terminated with an entry whose name + pointer is NULL. */ + + typedef struct PyMemberDef { - /* Current version, use this */ - char *name; - int type; - Py_ssize_t offset; - int flags; - char *doc; + /* Current version, use this */ + char *name; + int type; + Py_ssize_t offset; + int flags; + char *doc; } PyMemberDef; +/* Types */ +#define T_SHORT 0 +#define T_INT 1 +#define T_LONG 2 +#define T_FLOAT 3 +#define T_DOUBLE 4 +#define T_STRING 5 +#define T_OBJECT 6 +/* XXX the ordering here is weird for binary compatibility */ +#define T_CHAR 7 /* 1-character string */ +#define T_BYTE 8 /* 8-bit signed int */ +/* unsigned variants: */ +#define T_UBYTE 9 +#define T_USHORT 10 +#define T_UINT 11 +#define T_ULONG 12 -/* Types. These constants are also in structmemberdefs.py. */ -#define T_SHORT 0 -#define T_INT 1 -#define T_LONG 2 -#define T_FLOAT 3 -#define T_DOUBLE 4 -#define T_STRING 5 -#define T_OBJECT 6 -#define T_CHAR 7 /* 1-character string */ -#define T_BYTE 8 /* 8-bit signed int */ -#define T_UBYTE 9 -#define T_USHORT 10 -#define T_UINT 11 -#define T_ULONG 12 -#define T_STRING_INPLACE 13 /* Strings contained in the structure */ -#define T_BOOL 14 -#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError - when the value is NULL, instead of - converting to None. */ -#define T_LONGLONG 17 -#define T_ULONGLONG 18 -#define T_PYSSIZET 19 +/* Added by Jack: strings contained in the structure */ +#define T_STRING_INPLACE 13 + +/* Added by Lillo: bools contained in the structure (assumed char) */ +#define T_BOOL 14 + +#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError + when the value is NULL, instead of + converting to None. */ +#ifdef HAVE_LONG_LONG +#define T_LONGLONG 17 +#define T_ULONGLONG 18 +#endif /* HAVE_LONG_LONG */ + +#define T_PYSSIZET 19 /* Py_ssize_t */ /* Flags. These constants are also in structmemberdefs.py. */ -#define READONLY 1 -#define RO READONLY /* Shorthand */ +#define READONLY 1 +#define RO READONLY /* Shorthand */ #define READ_RESTRICTED 2 #define PY_WRITE_RESTRICTED 4 -#define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) +#define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) + + +/* API functions. */ +#include "pypy_structmember_decl.h" #ifdef __cplusplus } #endif #endif /* !Py_STRUCTMEMBER_H */ + diff --git a/pypy/module/cpyext/methodobject.py b/pypy/module/cpyext/methodobject.py --- a/pypy/module/cpyext/methodobject.py +++ b/pypy/module/cpyext/methodobject.py @@ -50,7 +50,7 @@ py_func.c_m_self = make_ref(space, w_obj.w_self) py_func.c_m_module = make_ref(space, w_obj.w_module) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def cfunction_dealloc(space, py_obj): py_func = rffi.cast(PyCFunctionObject, py_obj) Py_DecRef(space, py_func.c_m_self) diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -70,7 +70,7 @@ alloc : allocate and basic initialization of a raw PyObject attach : Function called to tie a raw structure to a pypy object realize : Function called to create a pypy object from a raw struct - dealloc : a cpython_api(external=False), similar to PyObject_dealloc + dealloc : a cpython_api(header=None), similar to PyObject_dealloc """ tp_basestruct = kw.pop('basestruct', PyObject.TO) diff --git a/pypy/module/cpyext/pytraceback.py b/pypy/module/cpyext/pytraceback.py --- a/pypy/module/cpyext/pytraceback.py +++ b/pypy/module/cpyext/pytraceback.py @@ -41,7 +41,7 @@ rffi.setintfield(py_traceback, 'c_tb_lasti', traceback.lasti) rffi.setintfield(py_traceback, 'c_tb_lineno',traceback.get_lineno()) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def traceback_dealloc(space, py_obj): py_traceback = rffi.cast(PyTracebackObject, py_obj) Py_DecRef(space, rffi.cast(PyObject, py_traceback.c_tb_next)) diff --git a/pypy/module/cpyext/sliceobject.py b/pypy/module/cpyext/sliceobject.py --- a/pypy/module/cpyext/sliceobject.py +++ b/pypy/module/cpyext/sliceobject.py @@ -36,7 +36,7 @@ py_slice.c_stop = make_ref(space, w_obj.w_stop) py_slice.c_step = make_ref(space, w_obj.w_step) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def slice_dealloc(space, py_obj): """Frees allocated PyStringObject resources. """ diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -309,7 +309,7 @@ return space.wrap(generic_cpy_call(space, func_target, w_self, w_other)) - at cpython_api([PyTypeObjectPtr, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyTypeObjectPtr, PyObject, PyObject], PyObject, header=None) def slot_tp_new(space, type, w_args, w_kwds): from pypy.module.cpyext.tupleobject import PyTuple_Check pyo = rffi.cast(PyObject, type) @@ -320,30 +320,30 @@ w_args_new = space.newtuple(args_w) return space.call(w_func, w_args_new, w_kwds) - at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, external=False) + at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, header=None) def slot_tp_init(space, w_self, w_args, w_kwds): w_descr = space.lookup(w_self, '__init__') args = Arguments.frompacked(space, w_args, w_kwds) space.get_and_call_args(w_descr, w_self, args) return 0 - at cpython_api([PyObject, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyObject, PyObject, PyObject], PyObject, header=None) def slot_tp_call(space, w_self, w_args, w_kwds): return space.call(w_self, w_args, w_kwds) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_str(space, w_self): return space.str(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_nb_int(space, w_self): return space.int(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iter(space, w_self): return space.iter(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iternext(space, w_self): return space.next(w_self) @@ -371,7 +371,7 @@ return @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, - error=-1, external=True) # XXX should not be exported + error=-1) # XXX should be header=None @func_renamer("cpyext_tp_setattro_%s" % (typedef.name,)) def slot_tp_setattro(space, w_self, w_name, w_value): if w_value is not None: @@ -385,8 +385,7 @@ if getattr_fn is None: return - @cpython_api([PyObject, PyObject], PyObject, - external=True) + @cpython_api([PyObject, PyObject], PyObject) @func_renamer("cpyext_tp_getattro_%s" % (typedef.name,)) def slot_tp_getattro(space, w_self, w_name): return space.call_function(getattr_fn, w_self, w_name) diff --git a/pypy/module/cpyext/stringobject.py b/pypy/module/cpyext/stringobject.py --- a/pypy/module/cpyext/stringobject.py +++ b/pypy/module/cpyext/stringobject.py @@ -103,7 +103,7 @@ track_reference(space, py_obj, w_obj) return w_obj - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def string_dealloc(space, py_obj): """Frees allocated PyStringObject resources. """ diff --git a/pypy/module/cpyext/structmember.py b/pypy/module/cpyext/structmember.py --- a/pypy/module/cpyext/structmember.py +++ b/pypy/module/cpyext/structmember.py @@ -31,8 +31,10 @@ (T_PYSSIZET, rffi.SSIZE_T, PyLong_AsSsize_t), ]) +_HEADER = 'pypy_structmember_decl.h' - at cpython_api([PyObject, lltype.Ptr(PyMemberDef)], PyObject) + + at cpython_api([PyObject, lltype.Ptr(PyMemberDef)], PyObject, header=_HEADER) def PyMember_GetOne(space, obj, w_member): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset @@ -83,7 +85,8 @@ return w_result - at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, + error=-1, header=_HEADER) def PyMember_SetOne(space, obj, w_member, w_value): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset diff --git a/pypy/module/cpyext/test/test_cpyext.py b/pypy/module/cpyext/test/test_cpyext.py --- a/pypy/module/cpyext/test/test_cpyext.py +++ b/pypy/module/cpyext/test/test_cpyext.py @@ -863,3 +863,15 @@ os.unlink('_imported_already') except OSError: pass + + def test_no_structmember(self): + """structmember.h should not be included by default.""" + mod = self.import_extension('foo', [ + ('bar', 'METH_NOARGS', + ''' + /* reuse a name that is #defined in structmember.h */ + int RO; + Py_RETURN_NONE; + ''' + ), + ]) diff --git a/pypy/module/cpyext/test/test_dictobject.py b/pypy/module/cpyext/test/test_dictobject.py --- a/pypy/module/cpyext/test/test_dictobject.py +++ b/pypy/module/cpyext/test/test_dictobject.py @@ -146,7 +146,7 @@ def test_dictproxy(self, space, api): w_dict = space.sys.get('modules') w_proxy = api.PyDictProxy_New(w_dict) - assert space.is_true(space.contains(w_proxy, space.wrap('sys'))) + assert space.contains_w(w_proxy, space.wrap('sys')) raises(OperationError, space.setitem, w_proxy, space.wrap('sys'), space.w_None) raises(OperationError, space.delitem, diff --git a/pypy/module/cpyext/test/test_floatobject.py b/pypy/module/cpyext/test/test_floatobject.py --- a/pypy/module/cpyext/test/test_floatobject.py +++ b/pypy/module/cpyext/test/test_floatobject.py @@ -45,3 +45,35 @@ ]) assert module.from_string() == 1234.56 assert type(module.from_string()) is float + +class AppTestFloatMacros(AppTestCpythonExtensionBase): + def test_return_nan(self): + import math + + module = self.import_extension('foo', [ + ("return_nan", "METH_NOARGS", + "Py_RETURN_NAN;"), + ]) + assert math.isnan(module.return_nan()) + + def test_return_inf(self): + import math + + module = self.import_extension('foo', [ + ("return_inf", "METH_NOARGS", + "Py_RETURN_INF(10);"), + ]) + inf = module.return_inf() + assert inf > 0 + assert math.isinf(inf) + + def test_return_inf_negative(self): + import math + + module = self.import_extension('foo', [ + ("return_neginf", "METH_NOARGS", + "Py_RETURN_INF(-10);"), + ]) + neginf = module.return_neginf() + assert neginf < 0 + assert math.isinf(neginf) diff --git a/pypy/module/cpyext/test/test_import.py b/pypy/module/cpyext/test/test_import.py --- a/pypy/module/cpyext/test/test_import.py +++ b/pypy/module/cpyext/test/test_import.py @@ -21,7 +21,7 @@ def test_getmoduledict(self, space, api): testmod = "_functools" w_pre_dict = api.PyImport_GetModuleDict() - assert not space.is_true(space.contains(w_pre_dict, space.wrap(testmod))) + assert not space.contains_w(w_pre_dict, space.wrap(testmod)) with rffi.scoped_str2charp(testmod) as modname: w_module = api.PyImport_ImportModule(modname) @@ -29,7 +29,7 @@ assert w_module w_dict = api.PyImport_GetModuleDict() - assert space.is_true(space.contains(w_dict, space.wrap(testmod))) + assert space.contains_w(w_dict, space.wrap(testmod)) def test_reload(self, space, api): stat = api.PyImport_Import(space.wrap("stat")) diff --git a/pypy/module/cpyext/test/test_intobject.py b/pypy/module/cpyext/test/test_intobject.py --- a/pypy/module/cpyext/test/test_intobject.py +++ b/pypy/module/cpyext/test/test_intobject.py @@ -99,6 +99,7 @@ """), ], prologue=""" + #include "structmember.h" typedef struct { PyObject_HEAD diff --git a/pypy/module/cpyext/test/test_object.py b/pypy/module/cpyext/test/test_object.py --- a/pypy/module/cpyext/test/test_object.py +++ b/pypy/module/cpyext/test/test_object.py @@ -202,7 +202,7 @@ def test_dir(self, space, api): w_dir = api.PyObject_Dir(space.sys) assert space.isinstance_w(w_dir, space.w_list) - assert space.is_true(space.contains(w_dir, space.wrap('modules'))) + assert space.contains_w(w_dir, space.wrap('modules')) class AppTestObject(AppTestCpythonExtensionBase): def setup_class(cls): diff --git a/pypy/module/cpyext/test/test_translate.py b/pypy/module/cpyext/test/test_translate.py --- a/pypy/module/cpyext/test/test_translate.py +++ b/pypy/module/cpyext/test/test_translate.py @@ -19,7 +19,7 @@ @specialize.memo() def get_tp_function(space, typedef): - @cpython_api([], lltype.Signed, error=-1, external=False) + @cpython_api([], lltype.Signed, error=-1, header=None) def slot_tp_function(space): return typedef.value diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -183,7 +183,7 @@ if pto.c_tp_new: add_tp_new_wrapper(space, dict_w, pto) - at cpython_api([PyObject, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyObject, PyObject, PyObject], PyObject, header=None) def tp_new_wrapper(space, self, w_args, w_kwds): tp_new = rffi.cast(PyTypeObjectPtr, self).c_tp_new @@ -311,7 +311,7 @@ dealloc=type_dealloc) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def subtype_dealloc(space, obj): pto = obj.c_ob_type base = pto @@ -327,7 +327,7 @@ # hopefully this does not clash with the memory model assumed in # extension modules - at cpython_api([PyObject, Py_ssize_tP], lltype.Signed, external=False, + at cpython_api([PyObject, Py_ssize_tP], lltype.Signed, header=None, error=CANNOT_FAIL) def str_segcount(space, w_obj, ref): if ref: @@ -335,7 +335,7 @@ return 1 @cpython_api([PyObject, Py_ssize_t, rffi.VOIDPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def str_getreadbuffer(space, w_str, segment, ref): from pypy.module.cpyext.stringobject import PyString_AsString if segment != 0: @@ -348,7 +348,7 @@ return space.len_w(w_str) @cpython_api([PyObject, Py_ssize_t, rffi.CCHARPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def str_getcharbuffer(space, w_str, segment, ref): from pypy.module.cpyext.stringobject import PyString_AsString if segment != 0: @@ -361,7 +361,7 @@ return space.len_w(w_str) @cpython_api([PyObject, Py_ssize_t, rffi.VOIDPP], lltype.Signed, - external=False, error=-1) + header=None, error=-1) def buf_getreadbuffer(space, pyref, segment, ref): from pypy.module.cpyext.bufferobject import PyBufferObject if segment != 0: @@ -393,7 +393,7 @@ buf_getreadbuffer.api_func.get_wrapper(space)) pto.c_tp_as_buffer = c_buf - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def type_dealloc(space, obj): from pypy.module.cpyext.object import PyObject_dealloc obj_pto = rffi.cast(PyTypeObjectPtr, obj) diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -75,7 +75,7 @@ track_reference(space, py_obj, w_obj) return w_obj - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def unicode_dealloc(space, py_obj): py_unicode = rffi.cast(PyUnicodeObject, py_obj) if py_unicode.c_buffer: diff --git a/pypy/module/sys/interp_encoding.py b/pypy/module/sys/interp_encoding.py --- a/pypy/module/sys/interp_encoding.py +++ b/pypy/module/sys/interp_encoding.py @@ -34,11 +34,15 @@ elif sys.platform == "darwin": base_encoding = "utf-8" else: - base_encoding = None + # In CPython, the default base encoding is NULL. This is paired with a + # comment that says "If non-NULL, this is different than the default + # encoding for strings". Therefore, the default filesystem encoding is the + # default encoding for strings, which is ASCII. + base_encoding = "ascii" def _getfilesystemencoding(space): encoding = base_encoding - if rlocale.HAVE_LANGINFO and rlocale.CODESET: + if rlocale.HAVE_LANGINFO: try: oldlocale = rlocale.setlocale(rlocale.LC_CTYPE, None) rlocale.setlocale(rlocale.LC_CTYPE, "") diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py @@ -5,6 +5,9 @@ if sys.platform == 'win32': py.test.skip('snippets do not run on win32') +if sys.version_info < (2, 7): + py.test.skip('fails e.g. on a Debian/Ubuntu which patches virtualenv' + ' in a non-2.6-friendly way') def create_venv(name): tmpdir = udir.join(name) diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py @@ -101,6 +101,7 @@ c = distutils.ccompiler.new_compiler() print('compiling %s with %r' % (name, modules)) extra_preargs = [] + debug = True if sys.platform == 'win32': libfiles = [] for m in modules: @@ -109,9 +110,12 @@ libfiles.append('Release\\%s.lib' % m[:-4]) modules = libfiles extra_preargs.append('/MANIFEST') + debug = False # you need to install extra stuff + # for this to work elif threads: extra_preargs.append('-pthread') - objects = c.compile([filename], macros=sorted(defines.items()), debug=True) + objects = c.compile([filename], macros=sorted(defines.items()), + debug=debug) c.link_executable(objects + modules, name, extra_preargs=extra_preargs) finally: os.chdir(curdir) @@ -119,12 +123,18 @@ def execute(self, name): path = self.get_path() env_extra = {'PYTHONPATH': prefix_pythonpath()} - libpath = os.environ.get('LD_LIBRARY_PATH') - if libpath: - libpath = path + ':' + libpath + if sys.platform == 'win32': + _path = os.environ.get('PATH') + # for libpypy-c.dll or Python27.dll + _path = os.path.split(sys.executable)[0] + ';' + _path + env_extra['PATH'] = _path else: - libpath = path - env_extra['LD_LIBRARY_PATH'] = libpath + libpath = os.environ.get('LD_LIBRARY_PATH') + if libpath: + libpath = path + ':' + libpath + else: + libpath = path + env_extra['LD_LIBRARY_PATH'] = libpath print('running %r in %r' % (name, path)) executable_name = name if sys.platform == 'win32': diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py --- a/pypy/objspace/fake/objspace.py +++ b/pypy/objspace/fake/objspace.py @@ -397,9 +397,14 @@ space.wrap(value) class FakeCompiler(object): - pass + def compile(self, code, name, mode, flags): + return FakePyCode() FakeObjSpace.default_compiler = FakeCompiler() +class FakePyCode(W_Root): + def exec_code(self, space, w_globals, w_locals): + return W_Root() + class FakeModule(W_Root): def __init__(self): diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -1419,9 +1419,8 @@ return space.len(self.w_dict) def _all_contained_in(space, w_dictview, w_other): - w_iter = space.iter(w_dictview) - for w_item in space.iteriterable(w_iter): - if not space.is_true(space.contains(w_other, w_item)): + for w_item in space.iteriterable(w_dictview): + if not space.contains_w(w_other, w_item): return space.w_False return space.w_True diff --git a/pypy/tool/release/package.py b/pypy/tool/release/package.py --- a/pypy/tool/release/package.py +++ b/pypy/tool/release/package.py @@ -108,13 +108,8 @@ # builddir = py.path.local(options.builddir) pypydir = builddir.ensure(name, dir=True) + includedir = basedir.join('include') - # Recursively copy all headers, shutil has only ignore - # so we do a double-negative to include what we want - def copyonly(dirpath, contents): - return set(contents) - set( # XXX function not used? - shutil.ignore_patterns('*.h', '*.incl')(dirpath, contents), - ) shutil.copytree(str(includedir), str(pypydir.join('include'))) pypydir.ensure('include', dir=True) @@ -139,22 +134,27 @@ continue print "Picking %s" % p binaries.append((p, p.basename)) - importlib_name = 'python27.lib' - if pypy_c.dirpath().join(importlib_name).check(): - shutil.copyfile(str(pypy_c.dirpath().join(importlib_name)), - str(pypydir.join('include/python27.lib'))) - print "Picking %s as %s" % (pypy_c.dirpath().join(importlib_name), - pypydir.join('include/python27.lib')) + libsdir = basedir.join('libs') + if libsdir.exists(): + print 'Picking %s (and contents)' % libsdir + shutil.copytree(str(libsdir), str(pypydir.join('libs'))) else: - pass - # XXX users will complain that they cannot compile cpyext - # modules for windows, has the lib moved or are there no - # exported functions in the dll so no import library is created? + print '"libs" dir with import library not found.' + print 'You have to create %r' % (str(libsdir),) + print 'and copy libpypy-c.lib in there, renamed to python27.lib' + # XXX users will complain that they cannot compile capi (cpyext) + # modules for windows, also embedding pypy (i.e. in cffi) + # will fail. + # Has the lib moved, was translation not 'shared', or are + # there no exported functions in the dll so no import + # library was created? if not options.no_tk: try: p = pypy_c.dirpath().join('tcl85.dll') if not p.check(): p = py.path.local.sysfind('tcl85.dll') + if p is None: + raise WindowsError("tcl85.dll not found") tktcldir = p.dirpath().join('..').join('lib') shutil.copytree(str(tktcldir), str(pypydir.join('tcl'))) except WindowsError: diff --git a/rpython/config/translationoption.py b/rpython/config/translationoption.py --- a/rpython/config/translationoption.py +++ b/rpython/config/translationoption.py @@ -192,6 +192,8 @@ "If true, makes an lldebug0 build", default=False, cmdline="--lldebug0"), StrOption("icon", "Path to the (Windows) icon to use for the executable"), + StrOption("libname", + "Windows: name and possibly location of the lib file to create"), OptionDescription("backendopt", "Backend Optimization Options", [ # control inlining diff --git a/rpython/doc/rlib.rst b/rpython/doc/rlib.rst --- a/rpython/doc/rlib.rst +++ b/rpython/doc/rlib.rst @@ -52,7 +52,7 @@ backend emits code, the function is called to determine the value. ``CDefinedIntSymbolic``: - Instances of ``ComputedIntSymbolic`` are also treated like integers of + Instances of ``CDefinedIntSymbolic`` are also treated like integers of unknown value by the annotator. When C code is emitted they will be represented by the attribute ``expr`` of the symbolic (which is also the first argument of the constructor). diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py --- a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -3,16 +3,13 @@ from rpython.jit.backend.test.support import CCompiledMixin from rpython.rlib.jit import JitDriver from rpython.tool.udir import udir +from rpython.rlib import rthread from rpython.translator.translator import TranslationContext from rpython.jit.backend.detect_cpu import getcpuclass class CompiledVmprofTest(CCompiledMixin): CPUClass = getcpuclass() - def setup(self): - if self.CPUClass.backend_name != 'x86_64': - py.test.skip("vmprof only supports x86-64 CPUs at the moment") - def _get_TranslationContext(self): t = TranslationContext() t.config.translation.gc = 'incminimark' @@ -62,6 +59,7 @@ tmpfilename = str(udir.join('test_rvmprof')) def f(num): + rthread.get_ident() # register TLOFS_thread_ident code = MyCode("py:x:foo:3") rvmprof.register_code(code, get_name) fd = os.open(tmpfilename, os.O_WRONLY | os.O_CREAT, 0666) diff --git a/rpython/rlib/_stacklet_shadowstack.py b/rpython/rlib/_stacklet_shadowstack.py --- a/rpython/rlib/_stacklet_shadowstack.py +++ b/rpython/rlib/_stacklet_shadowstack.py @@ -30,6 +30,11 @@ mixlevelannotator.finish() lltype.attachRuntimeTypeInfo(STACKLET, destrptr=destrptr) +# Note: it's important that this is a light finalizer, otherwise +# the GC will call it but still expect the object to stay around for +# a while---and it can't stay around, because s_sscopy points to +# freed nonsense and customtrace() will crash + at rgc.must_be_light_finalizer def stacklet_destructor(stacklet): sscopy = stacklet.s_sscopy if sscopy: diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -33,11 +33,11 @@ if detect_cpu.autodetect().startswith(detect_cpu.MODEL_S390_64): raise VMProfPlatformUnsupported("rvmprof not supported on" " s390x CPUs for now") + compile_extra = ['-DRPYTHON_LL2CTYPES'] platform.verify_eci(ExternalCompilationInfo( - compile_extra=['-DRPYTHON_LL2CTYPES'], + compile_extra=compile_extra, **eci_kwds)) - eci = global_eci vmprof_init = rffi.llexternal("vmprof_init", [rffi.INT, rffi.DOUBLE, rffi.CCHARP], diff --git a/rpython/rlib/rvmprof/src/rvmprof.c b/rpython/rlib/rvmprof/src/rvmprof.c --- a/rpython/rlib/rvmprof/src/rvmprof.c +++ b/rpython/rlib/rvmprof/src/rvmprof.c @@ -1,23 +1,21 @@ #define _GNU_SOURCE 1 - #ifdef RPYTHON_LL2CTYPES /* only for testing: ll2ctypes sets RPY_EXTERN from the command-line */ -# ifndef RPY_EXTERN -# define RPY_EXTERN RPY_EXPORTED -# endif -# define RPY_EXPORTED extern __attribute__((visibility("default"))) -# define VMPROF_ADDR_OF_TRAMPOLINE(addr) 0 +#ifndef RPY_EXTERN +#define RPY_EXTERN RPY_EXPORTED +#endif +#ifdef _WIN32 +#define RPY_EXPORTED __declspec(dllexport) +#else +#define RPY_EXPORTED extern __attribute__((visibility("default"))) +#endif #else - # include "common_header.h" # include "structdef.h" # include "src/threadlocal.h" # include "rvmprof.h" -/*# ifndef VMPROF_ADDR_OF_TRAMPOLINE -# error "RPython program using rvmprof, but not calling vmprof_execute_code()" -# endif*/ #endif diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -7,9 +7,6 @@ static long profile_interval_usec = 0; static int opened_profile(char *interp_name); -#define MAX_STACK_DEPTH \ - ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) - #define MARKER_STACKTRACE '\x01' #define MARKER_VIRTUAL_IP '\x02' #define MARKER_TRAILER '\x03' @@ -20,6 +17,9 @@ #define VERSION_THREAD_ID '\x01' #define VERSION_TAG '\x02' +#define MAX_STACK_DEPTH \ + ((SINGLE_BUF_SIZE - sizeof(struct prof_stacktrace_s)) / sizeof(void *)) + typedef struct prof_stacktrace_s { char padding[sizeof(long) - 1]; char marker; @@ -71,6 +71,43 @@ return _write_all((char*)&header, 5 * sizeof(long) + 4 + namelen); } +/* ************************************************************* + * functions to dump the stack trace + * ************************************************************* + */ + + +static int get_stack_trace(vmprof_stack_t* stack, intptr_t *result, int max_depth, intptr_t pc) +{ + int n = 0; + intptr_t addr = 0; + int bottom_jitted = 0; + // check if the pc is in JIT +#ifdef PYPY_JIT_CODEMAP + if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { + // the bottom part is jitted, means we can fill up the first part + // from the JIT + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + stack = stack->next; // skip the first item as it contains garbage + } +#endif + while (n < max_depth - 1 && stack) { + if (stack->kind == VMPROF_CODE_TAG) { + result[n] = stack->kind; + result[n + 1] = stack->value; + n += 2; + } +#ifdef PYPY_JIT_CODEMAP + else if (stack->kind == VMPROF_JITTED_TAG) { + pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; + n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); + } +#endif + stack = stack->next; + } + return n; +} + #ifndef RPYTHON_LL2CTYPES static vmprof_stack_t *get_vmprof_stack(void) { diff --git a/rpython/rlib/rvmprof/src/vmprof_main.h b/rpython/rlib/rvmprof/src/vmprof_main.h --- a/rpython/rlib/rvmprof/src/vmprof_main.h +++ b/rpython/rlib/rvmprof/src/vmprof_main.h @@ -35,6 +35,7 @@ #include "vmprof_stack.h" #include "vmprof_getpc.h" #include "vmprof_mt.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" /************************************************************/ @@ -78,46 +79,6 @@ static char atfork_hook_installed = 0; -#include "vmprof_get_custom_offset.h" - -/* ************************************************************* - * functions to dump the stack trace - * ************************************************************* - */ - - -static int get_stack_trace(intptr_t *result, int max_depth, intptr_t pc, ucontext_t *ucontext) -{ - vmprof_stack_t* stack = get_vmprof_stack(); - int n = 0; - intptr_t addr = 0; - int bottom_jitted = 0; - // check if the pc is in JIT -#ifdef PYPY_JIT_CODEMAP - if (pypy_find_codemap_at_addr((intptr_t)pc, &addr)) { - // the bottom part is jitted, means we can fill up the first part - // from the JIT - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - stack = stack->next; // skip the first item as it contains garbage - } -#endif - while (n < max_depth - 1 && stack) { - if (stack->kind == VMPROF_CODE_TAG) { - result[n] = stack->kind; - result[n + 1] = stack->value; - n += 2; - } -#ifdef PYPY_JIT_CODEMAP - else if (stack->kind == VMPROF_JITTED_TAG) { - pc = ((intptr_t*)(stack->value - sizeof(intptr_t)))[0]; - n = vmprof_write_header_for_jit_addr(result, n, pc, max_depth); - } -#endif - stack = stack->next; - } - return n; -} - static intptr_t get_current_thread_id(void) { /* xxx This function is a hack on two fronts: @@ -194,8 +155,8 @@ struct prof_stacktrace_s *st = (struct prof_stacktrace_s *)p->data; st->marker = MARKER_STACKTRACE; st->count = 1; - depth = get_stack_trace(st->stack, - MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext), ucontext); + depth = get_stack_trace(get_vmprof_stack(), st->stack, + MAX_STACK_DEPTH-2, GetPC((ucontext_t*)ucontext)); st->depth = depth; st->stack[depth++] = get_current_thread_id(); p->data_offset = offsetof(struct prof_stacktrace_s, marker); diff --git a/rpython/rlib/rvmprof/src/vmprof_main_win32.h b/rpython/rlib/rvmprof/src/vmprof_main_win32.h --- a/rpython/rlib/rvmprof/src/vmprof_main_win32.h +++ b/rpython/rlib/rvmprof/src/vmprof_main_win32.h @@ -10,13 +10,30 @@ return 0; } +#if defined(_MSC_VER) +#include +typedef SSIZE_T ssize_t; +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include #include "vmprof_stack.h" +#include "vmprof_get_custom_offset.h" #include "vmprof_common.h" #include // This file has been inspired (but not copied from since the LICENSE // would not allow it) from verysleepy profiler +#define SINGLE_BUF_SIZE 8192 + volatile int thread_started = 0; volatile int enabled = 0; @@ -55,52 +72,75 @@ return 0; } -int vmprof_snapshot_thread(DWORD thread_id, PyThreadState *tstate, prof_stacktrace_s *stack) +int vmprof_snapshot_thread(struct pypy_threadlocal_s *p, prof_stacktrace_s *stack) { - HRESULT result; - HANDLE hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, thread_id); - int depth; + void *addr; + vmprof_stack_t *cur; + long tid; + HANDLE hThread; + long depth; + DWORD result; + CONTEXT ctx; + +#ifdef RPYTHON_LL2CTYPES + return 0; // not much we can do +#else +#ifndef RPY_TLOFS_thread_ident + return 0; // we can't freeze threads, unsafe +#else + hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, p->thread_ident); if (!hThread) { return -1; } result = SuspendThread(hThread); if(result == 0xffffffff) return -1; // possible, e.g. attached debugger or thread alread suspended - // find the correct thread - depth = read_trace_from_cpy_frame(tstate->frame, stack->stack, - MAX_STACK_DEPTH); + ctx.ContextFlags = CONTEXT_FULL; + if (!GetThreadContext(hThread, &ctx)) + return -1; + depth = get_stack_trace(p->vmprof_tl_stack, + stack->stack, MAX_STACK_DEPTH-2, ctx.Eip); stack->depth = depth; - stack->stack[depth++] = (void*)thread_id; + stack->stack[depth++] = (void*)p->thread_ident; stack->count = 1; stack->marker = MARKER_STACKTRACE; ResumeThread(hThread); return depth; +#endif +#endif } long __stdcall vmprof_mainloop(void *arg) { +#ifndef RPYTHON_LL2CTYPES + struct pypy_threadlocal_s *p; prof_stacktrace_s *stack = (prof_stacktrace_s*)malloc(SINGLE_BUF_SIZE); - HANDLE hThreadSnap = INVALID_HANDLE_VALUE; int depth; - PyThreadState *tstate; while (1) { - Sleep(profile_interval_usec * 1000); + //Sleep(profile_interval_usec * 1000); + Sleep(10); if (!enabled) { continue; } - tstate = PyInterpreterState_Head()->tstate_head; - while (tstate) { - depth = vmprof_snapshot_thread(tstate->thread_id, tstate, stack); - if (depth > 0) { - _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), - depth * sizeof(void *) + - sizeof(struct prof_stacktrace_s) - - offsetof(struct prof_stacktrace_s, marker)); + _RPython_ThreadLocals_Acquire(); + p = _RPython_ThreadLocals_Head(); // the first one is one behind head + p = _RPython_ThreadLocals_Enum(p); + while (p) { + if (p->ready == 42) { + depth = vmprof_snapshot_thread(p, stack); + if (depth > 0) { + _write_all((char*)stack + offsetof(prof_stacktrace_s, marker), + depth * sizeof(void *) + + sizeof(struct prof_stacktrace_s) - + offsetof(struct prof_stacktrace_s, marker)); + } } - tstate = tstate->next; + p = _RPython_ThreadLocals_Enum(p); } + _RPython_ThreadLocals_Release(); } +#endif } RPY_EXTERN diff --git a/rpython/rlib/rvmprof/src/vmprof_stack.h b/rpython/rlib/rvmprof/src/vmprof_stack.h --- a/rpython/rlib/rvmprof/src/vmprof_stack.h +++ b/rpython/rlib/rvmprof/src/vmprof_stack.h @@ -1,7 +1,11 @@ #ifndef _VMPROF_STACK_H_ #define _VMPROF_STACK_H_ +#ifdef _WIN32 +#define intptr_t long // XXX windows VC++ 2008 lacks stdint.h +#else #include +#endif #define VMPROF_CODE_TAG 1 /* <- also in cintf.py */ #define VMPROF_BLACKHOLE_TAG 2 diff --git a/rpython/rlib/rvmprof/test/test_ztranslation.py b/rpython/rlib/rvmprof/test/test_ztranslation.py --- a/rpython/rlib/rvmprof/test/test_ztranslation.py +++ b/rpython/rlib/rvmprof/test/test_ztranslation.py @@ -3,11 +3,10 @@ sys.path += ['../../../..'] # for subprocess in test_interpreted import py from rpython.tool.udir import udir -from rpython.rlib import rvmprof +from rpython.rlib import rvmprof, rthread from rpython.translator.c.test.test_genc import compile from rpython.rlib.nonconst import NonConstant - class MyCode: def __init__(self, count): self.count = count @@ -39,6 +38,7 @@ PROF_FILE = str(udir.join('test_ztranslation.prof')) def main(argv=[]): + rthread.get_ident() # force TLOFS_thread_ident if NonConstant(False): # Hack to give os.open() the correct annotation os.open('foo', 1, 1) diff --git a/rpython/translator/backendopt/finalizer.py b/rpython/translator/backendopt/finalizer.py --- a/rpython/translator/backendopt/finalizer.py +++ b/rpython/translator/backendopt/finalizer.py @@ -18,7 +18,7 @@ """ ok_operations = ['ptr_nonzero', 'ptr_eq', 'ptr_ne', 'free', 'same_as', 'direct_ptradd', 'force_cast', 'track_alloc_stop', - 'raw_free'] + 'raw_free', 'adr_eq', 'adr_ne'] def analyze_light_finalizer(self, graph): result = self.analyze_direct_call(graph) diff --git a/rpython/translator/c/src/threadlocal.c b/rpython/translator/c/src/threadlocal.c --- a/rpython/translator/c/src/threadlocal.c +++ b/rpython/translator/c/src/threadlocal.c @@ -85,6 +85,11 @@ return prev->next; } +struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(void) +{ + return &linkedlist_head; +} + static void _RPy_ThreadLocals_Init(void *p) { struct pypy_threadlocal_s *tls = (struct pypy_threadlocal_s *)p; diff --git a/rpython/translator/c/src/threadlocal.h b/rpython/translator/c/src/threadlocal.h --- a/rpython/translator/c/src/threadlocal.h +++ b/rpython/translator/c/src/threadlocal.h @@ -27,6 +27,9 @@ RPY_EXTERN struct pypy_threadlocal_s * _RPython_ThreadLocals_Enum(struct pypy_threadlocal_s *prev); +/* will return the head of the list */ +RPY_EXTERN struct pypy_threadlocal_s *_RPython_ThreadLocals_Head(); + #define OP_THREADLOCALREF_ACQUIRE(r) _RPython_ThreadLocals_Acquire() #define OP_THREADLOCALREF_RELEASE(r) _RPython_ThreadLocals_Release() #define OP_THREADLOCALREF_ENUM(p, r) r = _RPython_ThreadLocals_Enum(p) diff --git a/rpython/translator/driver.py b/rpython/translator/driver.py --- a/rpython/translator/driver.py +++ b/rpython/translator/driver.py @@ -487,13 +487,14 @@ exe = py.path.local(exename) exename = exe.new(purebasename=exe.purebasename + 'w') shutil_copy(str(exename), str(newexename)) - # the import library is named python27.lib, according - # to the pragma in pyconfig.h - libname = str(newsoname.dirpath().join('python27.lib')) + # for pypy, the import library is renamed and moved to + # libs/python27.lib, according to the pragma in pyconfig.h + libname = self.config.translation.libname + libname = libname or soname.new(ext='lib').basename + libname = str(newsoname.dirpath().join(libname)) shutil.copyfile(str(soname.new(ext='lib')), libname) self.log.info("copied: %s" % (libname,)) - # XXX TODO : replace the nonsense above with - # ext_to_copy = ['lib', 'pdb'] + # the pdb file goes in the same place as pypy(w).exe ext_to_copy = ['pdb',] for ext in ext_to_copy: name = soname.new(ext=ext) From pypy.commits at gmail.com Mon Feb 15 07:26:10 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 15 Feb 2016 04:26:10 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: remove _vmprof when translating for s390x (for now) Message-ID: <56c1c3e2.45611c0a.c357.4e5f@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82260:11fdfd9e852f Date: 2016-02-15 13:25 +0100 http://bitbucket.org/pypy/pypy/changeset/11fdfd9e852f/ Log: remove _vmprof when translating for s390x (for now) diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -39,10 +39,8 @@ "_csv", "cppyy", "_pypyjson", "_vmprof", ]) -#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') -# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): - # it's not enough that we get x86_64 -# working_modules.add('_vmprof') +if os.uname()[4] == 's390x': + working_modules.remove("_vmprof") translation_modules = default_modules.copy() translation_modules.update([ From pypy.commits at gmail.com Mon Feb 15 07:29:48 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 15 Feb 2016 04:29:48 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: extended comment + issue in libc (rposix macro expansion) Message-ID: <56c1c4bc.01adc20a.cbec7.ffffe4e3@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82261:bb5b1c0fef8d Date: 2016-02-15 13:29 +0100 http://bitbucket.org/pypy/pypy/changeset/bb5b1c0fef8d/ Log: extended comment + issue in libc (rposix macro expansion) diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py --- a/rpython/rlib/rposix.py +++ b/rpython/rlib/rposix.py @@ -874,6 +874,10 @@ # note that rffi.INT as first parameter type is intentional. # on s390x providing a lltype.Signed as param type, the # macro wrapper function will always return 0 + # reason: legacy code required a union wait. see + # https://sourceware.org/bugzilla/show_bug.cgi?id=19613 + # for more details. If this get's fixed we can use lltype.Signed + # again. c_func = external(name, [rffi.INT], lltype.Signed, macro=_MACRO_ON_POSIX) returning_int = name in ('WEXITSTATUS', 'WSTOPSIG', 'WTERMSIG') From pypy.commits at gmail.com Mon Feb 15 08:12:00 2016 From: pypy.commits at gmail.com (mjacob) Date: Mon, 15 Feb 2016 05:12:00 -0800 (PST) Subject: [pypy-commit] pypy default: Make integer constant fit in 32 bits to make vmprof tests pass on 32-bit again. Message-ID: <56c1cea0.654fc20a.a67a1.ffffd9fd@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82262:02eab80025cb Date: 2016-02-15 14:10 +0100 http://bitbucket.org/pypy/pypy/changeset/02eab80025cb/ Log: Make integer constant fit in 32 bits to make vmprof tests pass on 32-bit again. diff --git a/rpython/rlib/rvmprof/test/test_rvmprof.py b/rpython/rlib/rvmprof/test/test_rvmprof.py --- a/rpython/rlib/rvmprof/test/test_rvmprof.py +++ b/rpython/rlib/rvmprof/test/test_rvmprof.py @@ -101,7 +101,7 @@ s = 0 for i in range(num): s += (i << 1) - if s % 32423423423 == 0: + if s % 2423423423 == 0: print s return s From pypy.commits at gmail.com Mon Feb 15 08:25:44 2016 From: pypy.commits at gmail.com (mjacob) Date: Mon, 15 Feb 2016 05:25:44 -0800 (PST) Subject: [pypy-commit] pypy default: Change constant fit in 31 bits - it's signed. Message-ID: <56c1d1d8.8ab71c0a.aacf4.668f@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82263:b6d62c49d868 Date: 2016-02-15 14:24 +0100 http://bitbucket.org/pypy/pypy/changeset/b6d62c49d868/ Log: Change constant fit in 31 bits - it's signed. diff --git a/rpython/rlib/rvmprof/test/test_rvmprof.py b/rpython/rlib/rvmprof/test/test_rvmprof.py --- a/rpython/rlib/rvmprof/test/test_rvmprof.py +++ b/rpython/rlib/rvmprof/test/test_rvmprof.py @@ -101,7 +101,7 @@ s = 0 for i in range(num): s += (i << 1) - if s % 2423423423 == 0: + if s % 2123423423 == 0: print s return s From pypy.commits at gmail.com Mon Feb 15 09:04:38 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 06:04:38 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Add one missing 'result_borrowed' to weakrefobject.py. Add comments Message-ID: <56c1daf6.cf0b1c0a.8b231.7c72@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82264:2f74cfc0598e Date: 2016-02-15 15:03 +0100 http://bitbucket.org/pypy/pypy/changeset/2f74cfc0598e/ Log: Add one missing 'result_borrowed' to weakrefobject.py. Add comments to various other similar places diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -14,7 +14,8 @@ PyDict_Check, PyDict_CheckExact = build_type_checkers("Dict") - at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL, result_borrowed=True) + at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL, + result_borrowed=True) def PyDict_GetItem(space, w_dict, w_key): try: w_res = space.getitem(w_dict, w_key) @@ -22,7 +23,7 @@ return None # NOTE: this works so far because all our dict strategies store # *values* as full objects, which stay alive as long as the dict is - # alive and not modified. + # alive and not modified. So we can return a borrowed ref. return w_res @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) @@ -61,7 +62,7 @@ w_res = None # NOTE: this works so far because all our dict strategies store # *values* as full objects, which stay alive as long as the dict is - # alive and not modified. + # alive and not modified. So we can return a borrowed ref. return w_res @cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=-1) diff --git a/pypy/module/cpyext/import_.py b/pypy/module/cpyext/import_.py --- a/pypy/module/cpyext/import_.py +++ b/pypy/module/cpyext/import_.py @@ -1,7 +1,6 @@ from pypy.interpreter import module from pypy.module.cpyext.api import ( generic_cpy_call, cpython_api, PyObject, CONST_STRING) -from pypy.module.cpyext.pyobject import borrow_from from rpython.rtyper.lltypesystem import lltype, rffi from pypy.interpreter.error import OperationError from pypy.interpreter.module import Module @@ -56,7 +55,7 @@ from pypy.module.imp.importing import reload return reload(space, w_mod) - at cpython_api([CONST_STRING], PyObject) + at cpython_api([CONST_STRING], PyObject, result_borrowed=True) def PyImport_AddModule(space, name): """Return the module object corresponding to a module name. The name argument may be of the form package.module. First check the modules @@ -74,14 +73,16 @@ w_mod = check_sys_modules_w(space, modulename) if not w_mod or space.is_w(w_mod, space.w_None): w_mod = Module(space, space.wrap(modulename)) - return borrow_from(None, w_mod) + space.setitem(space.sys.get('modules'), space.wrap(modulename), w_mod) + # return a borrowed ref --- assumes one copy in sys.modules + return w_mod - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyImport_GetModuleDict(space): """Return the dictionary used for the module administration (a.k.a. sys.modules). Note that this is a per-interpreter variable.""" w_modulesDict = space.sys.get('modules') - return borrow_from(None, w_modulesDict) + return w_modulesDict # borrowed ref @cpython_api([rffi.CCHARP, PyObject], PyObject) def PyImport_ExecCodeModule(space, name, w_code): diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py --- a/pypy/module/cpyext/listobject.py +++ b/pypy/module/cpyext/listobject.py @@ -51,7 +51,8 @@ "list index out of range")) w_list.ensure_object_strategy() # make sure we can return a borrowed obj # XXX ^^^ how does this interact with CPyListStrategy? - return w_list.getitem(index) + w_res = w_list.getitem(index) + return w_res # borrowed ref @cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) diff --git a/pypy/module/cpyext/pyerrors.py b/pypy/module/cpyext/pyerrors.py --- a/pypy/module/cpyext/pyerrors.py +++ b/pypy/module/cpyext/pyerrors.py @@ -6,7 +6,7 @@ from pypy.module.cpyext.api import cpython_api, CANNOT_FAIL, CONST_STRING from pypy.module.exceptions.interp_exceptions import W_RuntimeWarning from pypy.module.cpyext.pyobject import ( - PyObject, PyObjectP, make_ref, from_ref, Py_DecRef, borrow_from) + PyObject, PyObjectP, make_ref, from_ref, Py_DecRef) from pypy.module.cpyext.state import State from pypy.module.cpyext.import_ import PyImport_Import from rpython.rlib import rposix, jit @@ -33,7 +33,7 @@ state = space.fromcache(State) if state.operror is None: return None - return state.operror.w_type + return state.operror.w_type # borrowed ref @cpython_api([], lltype.Void) def PyErr_Clear(space): diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py --- a/pypy/module/cpyext/sequence.py +++ b/pypy/module/cpyext/sequence.py @@ -64,7 +64,7 @@ else: assert isinstance(w_obj, tupleobject.W_TupleObject) w_res = w_obj.wrappeditems[index] - return w_res + return w_res # borrowed ref @cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL) def PySequence_Fast_GET_SIZE(space, w_obj): diff --git a/pypy/module/cpyext/tupleobject.py b/pypy/module/cpyext/tupleobject.py --- a/pypy/module/cpyext/tupleobject.py +++ b/pypy/module/cpyext/tupleobject.py @@ -4,7 +4,7 @@ build_type_checkers, PyObjectFields, cpython_struct, bootstrap_function) from pypy.module.cpyext.pyobject import (PyObject, PyObjectP, Py_DecRef, - borrow_from, make_ref, from_ref, decref, + make_ref, from_ref, decref, track_reference, make_typedescr, get_typedescr) from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.objspace.std.tupleobject import W_TupleObject @@ -148,7 +148,7 @@ if index < 0 or index >= size: raise OperationError(space.w_IndexError, space.wrap("tuple index out of range")) - return ref.c_ob_item[index] + return ref.c_ob_item[index] # borrowed ref @cpython_api([PyObject], Py_ssize_t, error=-1) def PyTuple_Size(space, ref): diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -646,7 +646,7 @@ name = space.str_w(w_name) w_obj = w_type.lookup(name) # this assumes that w_obj is not dynamically created, but will stay alive - # until w_type is modified or dies + # until w_type is modified or dies. Assuming this, we return a borrowed ref return w_obj @cpython_api([PyTypeObjectPtr], lltype.Void) diff --git a/pypy/module/cpyext/weakrefobject.py b/pypy/module/cpyext/weakrefobject.py --- a/pypy/module/cpyext/weakrefobject.py +++ b/pypy/module/cpyext/weakrefobject.py @@ -1,5 +1,5 @@ from pypy.module.cpyext.api import cpython_api -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.module._weakref.interp__weakref import W_Weakref, proxy @cpython_api([PyObject, PyObject], PyObject) @@ -30,19 +30,19 @@ """ return proxy(space, w_obj, w_callback) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyWeakref_GetObject(space, w_ref): """Return the referenced object from a weak reference. If the referent is no longer live, returns None. This function returns a borrowed reference. """ - return PyWeakref_GET_OBJECT(space, w_ref) + return space.call_function(w_ref) # borrowed ref @cpython_api([PyObject], PyObject, result_borrowed=True) def PyWeakref_GET_OBJECT(space, w_ref): """Similar to PyWeakref_GetObject(), but implemented as a macro that does no error checking. """ - return space.call_function(w_ref) + return space.call_function(w_ref) # borrowed ref @cpython_api([PyObject], PyObject) def PyWeakref_LockObject(space, w_ref): From pypy.commits at gmail.com Mon Feb 15 09:12:19 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 06:12:19 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: More borrow_from() -> result_borrowed Message-ID: <56c1dcc3.4d0d1c0a.622cd.ffff801f@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82265:9c0cbbbea383 Date: 2016-02-15 15:11 +0100 http://bitbucket.org/pypy/pypy/changeset/9c0cbbbea383/ Log: More borrow_from() -> result_borrowed diff --git a/pypy/module/cpyext/eval.py b/pypy/module/cpyext/eval.py --- a/pypy/module/cpyext/eval.py +++ b/pypy/module/cpyext/eval.py @@ -4,7 +4,7 @@ from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, fread, feof, Py_ssize_tP, cpython_struct, is_valid_fp) -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.module.cpyext.pyerrors import PyErr_SetFromErrno from pypy.module.cpyext.funcobject import PyCodeObject from pypy.module.__builtin__ import compiling @@ -23,7 +23,7 @@ def PyEval_CallObjectWithKeywords(space, w_obj, w_arg, w_kwds): return space.call(w_obj, w_arg, w_kwds) - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyEval_GetBuiltins(space): """Return a dictionary of the builtins in the current execution frame, or the interpreter of the thread state if no frame is @@ -36,25 +36,25 @@ w_builtins = w_builtins.getdict(space) else: w_builtins = space.builtin.getdict(space) - return borrow_from(None, w_builtins) + return w_builtins # borrowed ref in all cases - at cpython_api([], PyObject, error=CANNOT_FAIL) + at cpython_api([], PyObject, error=CANNOT_FAIL, result_borrowed=True) def PyEval_GetLocals(space): """Return a dictionary of the local variables in the current execution frame, or NULL if no frame is currently executing.""" caller = space.getexecutioncontext().gettopframe_nohidden() if caller is None: return None - return borrow_from(None, caller.getdictscope()) + return caller.getdictscope() # borrowed ref - at cpython_api([], PyObject, error=CANNOT_FAIL) + at cpython_api([], PyObject, error=CANNOT_FAIL, result_borrowed=True) def PyEval_GetGlobals(space): """Return a dictionary of the global variables in the current execution frame, or NULL if no frame is currently executing.""" caller = space.getexecutioncontext().gettopframe_nohidden() if caller is None: return None - return borrow_from(None, caller.get_w_globals()) + return caller.get_w_globals() # borrowed ref @cpython_api([PyCodeObject, PyObject, PyObject], PyObject) def PyEval_EvalCode(space, w_code, w_globals, w_locals): diff --git a/pypy/module/cpyext/funcobject.py b/pypy/module/cpyext/funcobject.py --- a/pypy/module/cpyext/funcobject.py +++ b/pypy/module/cpyext/funcobject.py @@ -3,7 +3,7 @@ PyObjectFields, generic_cpy_call, CONST_STRING, CANNOT_FAIL, Py_ssize_t, cpython_api, bootstrap_function, cpython_struct, build_type_checkers) from pypy.module.cpyext.pyobject import ( - PyObject, make_ref, from_ref, Py_DecRef, make_typedescr, borrow_from) + PyObject, make_ref, from_ref, Py_DecRef, make_typedescr) from rpython.rlib.unroll import unrolling_iterable from pypy.interpreter.error import OperationError from pypy.interpreter.function import Function, Method @@ -83,12 +83,12 @@ from pypy.module.cpyext.object import PyObject_dealloc PyObject_dealloc(space, py_obj) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyFunction_GetCode(space, w_func): """Return the code object associated with the function object op.""" func = space.interp_w(Function, w_func) w_code = space.wrap(func.code) - return borrow_from(w_func, w_code) + return w_code # borrowed ref @cpython_api([PyObject, PyObject, PyObject], PyObject) def PyMethod_New(space, w_func, w_self, w_cls): @@ -99,25 +99,25 @@ class which provides the unbound method.""" return Method(space, w_func, w_self, w_cls) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyMethod_Function(space, w_method): """Return the function object associated with the method meth.""" assert isinstance(w_method, Method) - return borrow_from(w_method, w_method.w_function) + return w_method.w_function # borrowed ref - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyMethod_Self(space, w_method): """Return the instance associated with the method meth if it is bound, otherwise return NULL.""" assert isinstance(w_method, Method) - return borrow_from(w_method, w_method.w_instance) + return w_method.w_instance # borrowed ref - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyMethod_Class(space, w_method): """Return the class object from which the method meth was created; if this was created from an instance, it will be the class of the instance.""" assert isinstance(w_method, Method) - return borrow_from(w_method, w_method.w_class) + return w_method.w_class # borrowed ref def unwrap_list_of_strings(space, w_list): return [space.str_w(w_item) for w_item in space.fixedview(w_list)] diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py --- a/pypy/module/cpyext/listobject.py +++ b/pypy/module/cpyext/listobject.py @@ -3,7 +3,7 @@ from pypy.module.cpyext.api import (cpython_api, CANNOT_FAIL, Py_ssize_t, build_type_checkers) from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall -from pypy.module.cpyext.pyobject import Py_DecRef, PyObject, borrow_from +from pypy.module.cpyext.pyobject import Py_DecRef, PyObject from pypy.objspace.std.listobject import W_ListObject from pypy.interpreter.error import OperationError diff --git a/pypy/module/cpyext/pyfile.py b/pypy/module/cpyext/pyfile.py --- a/pypy/module/cpyext/pyfile.py +++ b/pypy/module/cpyext/pyfile.py @@ -1,7 +1,7 @@ -from rpython.rtyper.lltypesystem import rffi, lltype +$from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, build_type_checkers) -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.module.cpyext.object import Py_PRINT_RAW from pypy.interpreter.error import OperationError from pypy.module._file.interp_file import W_File @@ -83,7 +83,8 @@ @cpython_api([PyObject], PyObject) def PyFile_Name(space, w_p): """Return the name of the file specified by p as a string object.""" - return borrow_from(w_p, space.getattr(w_p, space.wrap("name"))) + w_name = space.getattr(w_p, space.wrap("name")) + return w_name # borrowed ref, should be a W_StringObject from the file @cpython_api([PyObject, rffi.INT_real], rffi.INT_real, error=CANNOT_FAIL) def PyFile_SoftSpace(space, w_p, newflag): diff --git a/pypy/module/cpyext/pytraceback.py b/pypy/module/cpyext/pytraceback.py --- a/pypy/module/cpyext/pytraceback.py +++ b/pypy/module/cpyext/pytraceback.py @@ -3,7 +3,7 @@ PyObjectFields, generic_cpy_call, CONST_STRING, CANNOT_FAIL, Py_ssize_t, cpython_api, bootstrap_function, cpython_struct, build_type_checkers) from pypy.module.cpyext.pyobject import ( - PyObject, make_ref, from_ref, Py_DecRef, make_typedescr, borrow_from) + PyObject, make_ref, from_ref, Py_DecRef, make_typedescr) from pypy.module.cpyext.frameobject import PyFrameObject from rpython.rlib.unroll import unrolling_iterable from pypy.interpreter.error import OperationError diff --git a/pypy/module/cpyext/setobject.py b/pypy/module/cpyext/setobject.py --- a/pypy/module/cpyext/setobject.py +++ b/pypy/module/cpyext/setobject.py @@ -3,7 +3,7 @@ from pypy.module.cpyext.api import (cpython_api, Py_ssize_t, CANNOT_FAIL, build_type_checkers) from pypy.module.cpyext.pyobject import (PyObject, PyObjectP, Py_DecRef, - borrow_from, make_ref, from_ref) + make_ref, from_ref) from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.objspace.std.setobject import W_SetObject, newset diff --git a/pypy/module/cpyext/sysmodule.py b/pypy/module/cpyext/sysmodule.py --- a/pypy/module/cpyext/sysmodule.py +++ b/pypy/module/cpyext/sysmodule.py @@ -1,16 +1,16 @@ from pypy.interpreter.error import OperationError from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import CANNOT_FAIL, cpython_api, CONST_STRING -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject - at cpython_api([CONST_STRING], PyObject, error=CANNOT_FAIL) + at cpython_api([CONST_STRING], PyObject, error=CANNOT_FAIL, result_borrowed=True) def PySys_GetObject(space, name): """Return the object name from the sys module or NULL if it does not exist, without setting an exception.""" name = rffi.charp2str(name) w_dict = space.sys.getdict(space) w_obj = space.finditem_str(w_dict, name) - return borrow_from(None, w_obj) + return w_obj # borrowed ref: kept alive in space.sys.w_dict @cpython_api([CONST_STRING, PyObject], rffi.INT_real, error=-1) def PySys_SetObject(space, name, w_obj): From pypy.commits at gmail.com Mon Feb 15 09:18:10 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 06:18:10 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: typo Message-ID: <56c1de22.d4e41c0a.29b09.7817@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82266:0079bb6a6e6d Date: 2016-02-15 15:17 +0100 http://bitbucket.org/pypy/pypy/changeset/0079bb6a6e6d/ Log: typo diff --git a/pypy/module/cpyext/pyfile.py b/pypy/module/cpyext/pyfile.py --- a/pypy/module/cpyext/pyfile.py +++ b/pypy/module/cpyext/pyfile.py @@ -1,4 +1,4 @@ -$from rpython.rtyper.lltypesystem import rffi, lltype +from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, build_type_checkers) from pypy.module.cpyext.pyobject import PyObject From pypy.commits at gmail.com Mon Feb 15 10:33:49 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 07:33:49 -0800 (PST) Subject: [pypy-commit] pypy default: Test and fix for another corner case (likely shown by Krakatau) Message-ID: <56c1efdd.d22e1c0a.11b7f.ffff9372@mx.google.com> Author: Armin Rigo Branch: Changeset: r82267:96c2ec82f010 Date: 2016-02-15 16:32 +0100 http://bitbucket.org/pypy/pypy/changeset/96c2ec82f010/ Log: Test and fix for another corner case (likely shown by Krakatau) diff --git a/rpython/jit/metainterp/optimizeopt/rewrite.py b/rpython/jit/metainterp/optimizeopt/rewrite.py --- a/rpython/jit/metainterp/optimizeopt/rewrite.py +++ b/rpython/jit/metainterp/optimizeopt/rewrite.py @@ -380,7 +380,7 @@ raise InvalidLoop("promote of a virtual") old_guard_op = info.get_last_guard(self.optimizer) if old_guard_op is not None: - op = self.replace_guard_class_with_guard_value(op, info, + op = self.replace_old_guard_with_guard_value(op, info, old_guard_op) elif arg0.type == 'f': arg0 = self.get_box_replacement(arg0) @@ -390,11 +390,26 @@ assert isinstance(constbox, Const) self.optimize_guard(op, constbox) - def replace_guard_class_with_guard_value(self, op, info, old_guard_op): - if old_guard_op.opnum != rop.GUARD_NONNULL: - previous_classbox = info.get_known_class(self.optimizer.cpu) - expected_classbox = self.optimizer.cpu.ts.cls_of_box(op.getarg(1)) - assert previous_classbox is not None + def replace_old_guard_with_guard_value(self, op, info, old_guard_op): + # there already has been a guard_nonnull or guard_class or + # guard_nonnull_class on this value, which is rather silly. + # This function replaces the original guard with a + # guard_value. Must be careful: doing so is unsafe if the + # original guard checks for something inconsistent, + # i.e. different than what it would give if the guard_value + # passed (this is a rare case, but possible). If we get + # inconsistent results in this way, then we must not do the + # replacement, otherwise we'd put guard_value up there but all + # intermediate ops might be executed by assuming something + # different, from the old guard that is now removed... + + c_value = op.getarg(1) + if not c_value.nonnull(): + raise InvalidLoop('A GUARD_VALUE(..., NULL) follows some other ' + 'guard that it is not NULL') + previous_classbox = info.get_known_class(self.optimizer.cpu) + if previous_classbox is not None: + expected_classbox = self.optimizer.cpu.ts.cls_of_box(c_value) assert expected_classbox is not None if not previous_classbox.same_constant( expected_classbox): diff --git a/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py b/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py --- a/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py +++ b/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py @@ -3063,6 +3063,16 @@ self.optimize_loop(ops, expected, preamble) #self.check_expanded_fail_descr("i0", rop.GUARD_VALUE) + def test_invalid_guard_value_after_guard_class(self): + ops = """ + [p1, i0, i1, i2, p2] + guard_class(p1, ConstClass(node_vtable)) [i0] + i3 = int_add(i1, i2) + guard_value(p1, NULL) [i1] + jump(p2, i0, i1, i3, p2) + """ + self.raises(InvalidLoop, self.optimize_loop, ops, ops) + def test_guard_class_oois(self): ops = """ [p1] From pypy.commits at gmail.com Mon Feb 15 11:31:58 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 08:31:58 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Fix for test_typeobject.py:test_mp_ass_subscript Message-ID: <56c1fd7e.c8ac1c0a.f0c4b.ffffaa53@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82268:fbcb51a49880 Date: 2016-02-15 17:30 +0100 http://bitbucket.org/pypy/pypy/changeset/fbcb51a49880/ Log: Fix for test_typeobject.py:test_mp_ass_subscript diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -47,6 +47,7 @@ size = rffi.sizeof(self.basestruct) if itemcount: size += itemcount * pytype.c_tp_itemsize + assert size >= rffi.sizeof(PyObject.TO) buf = lltype.malloc(rffi.VOIDP.TO, size, flavor='raw', zero=True) pyobj = rffi.cast(PyObject, buf) diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -235,6 +235,9 @@ def inherit_special(space, pto, base_pto): # XXX missing: copy basicsize and flags in a magical way + # (minimally, if tp_basicsize is zero we copy it from the base) + if not pto.c_tp_basicsize: + pto.c_tp_basicsize = base_pto.c_tp_basicsize flags = rffi.cast(lltype.Signed, pto.c_tp_flags) base_object_pyo = make_ref(space, space.w_object) base_object_pto = rffi.cast(PyTypeObjectPtr, base_object_pyo) From pypy.commits at gmail.com Mon Feb 15 12:04:50 2016 From: pypy.commits at gmail.com (mjacob) Date: Mon, 15 Feb 2016 09:04:50 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix test by inserting sys.meta_path hook at the beginning. Message-ID: <56c20532.890bc30a.7fea6.50a2@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82269:3495c617f6d0 Date: 2016-02-15 17:16 +0100 http://bitbucket.org/pypy/pypy/changeset/3495c617f6d0/ Log: Fix test by inserting sys.meta_path hook at the beginning. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -1040,7 +1040,7 @@ import sys, math del sys.modules["math"] - sys.meta_path.append(Importer()) + sys.meta_path.insert(0, Importer()) try: import math assert len(tried_imports) == 1 @@ -1050,7 +1050,7 @@ else: assert tried_imports[0][0] == "math" finally: - sys.meta_path.pop() + sys.meta_path.pop(0) def test_meta_path_block(self): class ImportBlocker(object): @@ -1069,7 +1069,7 @@ if modname in sys.modules: mod = sys.modules del sys.modules[modname] - sys.meta_path.append(ImportBlocker(modname)) + sys.meta_path.insert(0, ImportBlocker(modname)) try: raises(ImportError, __import__, modname) # the imp module doesn't use meta_path, and is not blocked @@ -1077,7 +1077,7 @@ file, filename, stuff = imp.find_module(modname) imp.load_module(modname, file, filename, stuff) finally: - sys.meta_path.pop() + sys.meta_path.pop(0) if mod: sys.modules[modname] = mod From pypy.commits at gmail.com Mon Feb 15 12:04:52 2016 From: pypy.commits at gmail.com (mjacob) Date: Mon, 15 Feb 2016 09:04:52 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix test by checking for correct behaviour. Message-ID: <56c20534.0ab81c0a.41694.ffffc1d7@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82270:0a7d0c775a46 Date: 2016-02-15 17:20 +0100 http://bitbucket.org/pypy/pypy/changeset/0a7d0c775a46/ Log: Fix test by checking for correct behaviour. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -1105,8 +1105,7 @@ import b except ImportError: pass - assert isinstance(sys.path_importer_cache['yyy'], - imp.NullImporter) + assert sys.path_importer_cache['yyy'] is None finally: sys.path.pop(0) sys.path.pop(0) From pypy.commits at gmail.com Mon Feb 15 12:43:40 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 15 Feb 2016 09:43:40 -0800 (PST) Subject: [pypy-commit] pypy default: Remove dead code: Bookkeeper._find_current_op() Message-ID: <56c20e4c.4c181c0a.e3696.ffffcb81@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82271:4bec5c63bc9e Date: 2016-02-15 17:42 +0000 http://bitbucket.org/pypy/pypy/changeset/4bec5c63bc9e/ Log: Remove dead code: Bookkeeper._find_current_op() diff --git a/rpython/annotator/bookkeeper.py b/rpython/annotator/bookkeeper.py --- a/rpython/annotator/bookkeeper.py +++ b/rpython/annotator/bookkeeper.py @@ -551,20 +551,6 @@ emulated = callback return self.pbc_call(pbc, args, emulated=emulated) - def _find_current_op(self, opname=None, arity=None, pos=None, s_type=None): - """ Find operation that is currently being annotated. Do some - sanity checks to see whether the correct op was found.""" - # XXX XXX HACK HACK HACK - fn, block, i = self.position_key - op = block.operations[i] - if opname is not None: - assert op.opname == opname - if arity is not None: - assert len(op.args) == arity - if pos is not None: - assert self.annotator.binding(op.args[pos]) == s_type - return op - def whereami(self): return self.annotator.whereami(self.position_key) From pypy.commits at gmail.com Mon Feb 15 13:59:48 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 15 Feb 2016 10:59:48 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix Message-ID: <56c22024.c9161c0a.10957.6a0b@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82272:bde14abe82c8 Date: 2016-02-15 19:58 +0100 http://bitbucket.org/pypy/pypy/changeset/bde14abe82c8/ Log: fix diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -635,7 +635,7 @@ arg = args[i] if is_PyObject(typ) and is_wrapped: assert is_pyobj(arg) - arg_conv = from_ref(space, arg) + arg_conv = from_ref(space, rffi.cast(PyObject, arg)) else: arg_conv = arg boxed_args += (arg_conv, ) From pypy.commits at gmail.com Mon Feb 15 14:43:52 2016 From: pypy.commits at gmail.com (mattip) Date: Mon, 15 Feb 2016 11:43:52 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fixes in tests Message-ID: <56c22a78.625dc20a.20f4e.ffff93fd@mx.google.com> Author: mattip Branch: cpyext-gc-support-2 Changeset: r82273:068b526e7520 Date: 2016-02-15 21:42 +0200 http://bitbucket.org/pypy/pypy/changeset/068b526e7520/ Log: fixes in tests diff --git a/pypy/module/cpyext/test/test_api.py b/pypy/module/cpyext/test/test_api.py --- a/pypy/module/cpyext/test/test_api.py +++ b/pypy/module/cpyext/test/test_api.py @@ -64,7 +64,7 @@ raise try: - del self.space.getexecutioncontext().cpyext_threadstate + self.space.getexecutioncontext().cleanup_cpyext_threadstate() except AttributeError: pass diff --git a/pypy/module/cpyext/test/test_stringobject.py b/pypy/module/cpyext/test/test_stringobject.py --- a/pypy/module/cpyext/test/test_stringobject.py +++ b/pypy/module/cpyext/test/test_stringobject.py @@ -28,7 +28,7 @@ if(PyString_Size(s) == 11) { result = 1; } - if(s->ob_type->tp_basicsize != sizeof(void*)*4) + if(s->ob_type->tp_basicsize != sizeof(void*)*5) result = 0; Py_DECREF(s); return PyBool_FromLong(result); diff --git a/rpython/rtyper/lltypesystem/ll2ctypes.py b/rpython/rtyper/lltypesystem/ll2ctypes.py --- a/rpython/rtyper/lltypesystem/ll2ctypes.py +++ b/rpython/rtyper/lltypesystem/ll2ctypes.py @@ -530,6 +530,7 @@ # Ctypes-aware subclasses of the _parentable classes ALLOCATED = {} # mapping {address: _container} +DEBUG_ALLOCATED = True def get_common_subclass(cls1, cls2, cache={}): """Return a unique subclass with (cls1, cls2) as bases.""" @@ -569,7 +570,8 @@ raise Exception("internal ll2ctypes error - " "double conversion from lltype to ctypes?") # XXX don't store here immortal structures - print "LL2CTYPES:", addr + if DEBUG_ALLOCATED: + print "LL2CTYPES:", hex(addr) ALLOCATED[addr] = self def _addressof_storage(self): @@ -582,7 +584,8 @@ self._check() # no double-frees # allow the ctypes object to go away now addr = ctypes.cast(self._storage, ctypes.c_void_p).value - print "LL2C FREE:", addr + if DEBUG_ALLOCATED: + print "LL2C FREE:", hex(addr) try: del ALLOCATED[addr] except KeyError: From pypy.commits at gmail.com Mon Feb 15 23:18:02 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 15 Feb 2016 20:18:02 -0800 (PST) Subject: [pypy-commit] pypy llimpl: hg merge default Message-ID: <56c2a2fa.02931c0a.5f6f8.60fe@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82274:e6a89c683651 Date: 2016-02-15 17:45 +0000 http://bitbucket.org/pypy/pypy/changeset/e6a89c683651/ Log: hg merge default diff too long, truncating to 2000 out of 3299 lines diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -188,7 +188,7 @@ # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'include')) + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.0 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.2" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -544,28 +544,45 @@ def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # if '__pypy__' in sys.builtin_module_names: - if hasattr(sys, 'prefix'): - import os - libdir = os.path.join(sys.prefix, 'bin') - dirs = kwds.setdefault('library_dirs', []) - if libdir not in dirs: - dirs.append(libdir) - pythonlib = "pypy-c" + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags - libraries = kwds.setdefault('libraries', []) - if pythonlib not in libraries: - libraries.append(pythonlib) + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): @@ -631,7 +648,7 @@ compiled DLL. Use '*' to force distutils' choice, suitable for regular CPython C API modules. Use a file name ending in '.*' to ask for the system's default extension for dynamic libraries - (.so/.dll). + (.so/.dll/.dylib). The default is '*' when building a non-embedded C API extension, and (module_name + '.*') when building an embedded library. @@ -695,6 +712,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,14 +21,12 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def compile(tmpdir, ext, compiler_verbose=0): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose, - target_extension, embedding) + outputfilename = _build(tmpdir, ext, compiler_verbose) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -38,32 +36,7 @@ os.environ[key] = value return outputfilename -def _save_val(name): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - return config_vars.get(name, Ellipsis) - -def _restore_val(name, value): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - config_vars[name] = value - if value is Ellipsis: - del config_vars[name] - -def _win32_hack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): - MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ - MSVCCompiler._remove_visual_c_ref - MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file - -def _win32_unhack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - MSVCCompiler._remove_visual_c_ref = \ - MSVCCompiler._remove_visual_c_ref_CFFI_BAK - -def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def _build(tmpdir, ext, compiler_verbose=0): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -76,25 +49,14 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: - if sys.platform == 'win32' and embedding: - _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 - old_SO = _save_val('SO') - old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: - if target_extension is not None: - _restore_val('SO', target_extension) - _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') cmd_obj = dist.get_command_obj('build_ext') [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) - _restore_val('SO', old_SO) - _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) - if sys.platform == 'win32' and embedding: - _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and @@ -1357,6 +1359,58 @@ parts[-1] += extension return os.path.join(outputdir, *parts), parts + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, compiler_verbose=1, target=None, **kwds): @@ -1382,36 +1436,22 @@ target = '%s.*' % module_name else: target = '*' - if target == '*': - target_module_name = module_name - target_extension = None # use default - else: - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - else: - target += '.so' - # split along the first '.' (not the last one, otherwise the - # preceeding dots are interpreted as splitting package names) - index = target.find('.') - if index < 0: - raise ValueError("target argument %r should be a file name " - "containing a '.'" % (target,)) - target_module_name = target[:index] - target_extension = target[index:] # - ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: + patchlist = [] cwd = os.getcwd() try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose, - target_extension, - embedding=embedding) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) finally: os.chdir(cwd) + _unpatch_meths(patchlist) return outputfilename else: return ext, updated diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py --- a/lib_pypy/cffi/vengine_cpy.py +++ b/lib_pypy/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py --- a/lib_pypy/cffi/vengine_gen.py +++ b/lib_pypy/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py --- a/lib_pypy/cffi/verifier.py +++ b/lib_pypy/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -36,13 +36,13 @@ "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", - "_csv", "cppyy", "_pypyjson" + "_csv", "cppyy", "_pypyjson", "_vmprof", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): +#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') +# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): # it's not enough that we get x86_64 - working_modules.add('_vmprof') +# working_modules.add('_vmprof') translation_modules = default_modules.copy() translation_modules.update([ diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -7,6 +7,9 @@ Fixed ``_PyLong_FromByteArray()``, which was buggy. +Fixed a crash with stacklets (or greenlets) on non-Linux machines +which showed up if you forget stacklets without resuming them. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy @@ -38,7 +41,8 @@ .. branch: compress-numbering -Improve the memory signature of numbering instances in the JIT. +Improve the memory signature of numbering instances in the JIT. This should massively +decrease the amount of memory consumed by the JIT, which is significant for most programs. .. branch: fix-trace-too-long-heuristic @@ -142,4 +146,9 @@ .. branch: vmprof-newstack -Refactor vmprof to work cross-operating-system. \ No newline at end of file +Refactor vmprof to work cross-operating-system. + +.. branch: seperate-strucmember_h + +Seperate structmember.h from Python.h Also enhance creating api functions +to specify which header file they appear in (previously only pypy_decl.h) diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -239,6 +239,9 @@ raise Exception("Cannot use the --output option with PyPy " "when --shared is on (it is by default). " "See issue #1971.") + if sys.platform == 'win32': + config.translation.libname = '..\\..\\libs\\python27.lib' + thisdir.join('..', '..', 'libs').ensure(dir=1) if config.translation.thread: config.objspace.usemodules.thread = True diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -306,7 +306,7 @@ return None -class W_InterpIterable(W_Root): +class InterpIterable(object): def __init__(self, space, w_iterable): self.w_iter = space.iter(w_iterable) self.space = space @@ -745,9 +745,13 @@ return self.int_w(self.hash(w_obj)) def len_w(self, w_obj): - """shotcut for space.int_w(space.len(w_obj))""" + """shortcut for space.int_w(space.len(w_obj))""" return self.int_w(self.len(w_obj)) + def contains_w(self, w_container, w_item): + """shortcut for space.is_true(space.contains(w_container, w_item))""" + return self.is_true(self.contains(w_container, w_item)) + def setitem_str(self, w_obj, key, w_value): return self.setitem(w_obj, self.wrap(key), w_value) @@ -846,7 +850,7 @@ return lst_w[:] # make the resulting list resizable def iteriterable(self, w_iterable): - return W_InterpIterable(self, w_iterable) + return InterpIterable(self, w_iterable) def _unpackiterable_unknown_length(self, w_iterator, w_iterable): """Unpack an iterable of unknown length into an interp-level @@ -1237,7 +1241,7 @@ if not isinstance(statement, PyCode): raise TypeError('space.exec_(): expected a string, code or PyCode object') w_key = self.wrap('__builtins__') - if not self.is_true(self.contains(w_globals, w_key)): + if not self.contains_w(w_globals, w_key): self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) diff --git a/pypy/module/__builtin__/interp_classobj.py b/pypy/module/__builtin__/interp_classobj.py --- a/pypy/module/__builtin__/interp_classobj.py +++ b/pypy/module/__builtin__/interp_classobj.py @@ -20,7 +20,7 @@ if not space.isinstance_w(w_dict, space.w_dict): raise_type_err(space, 'bases', 'tuple', w_bases) - if not space.is_true(space.contains(w_dict, space.wrap("__doc__"))): + if not space.contains_w(w_dict, space.wrap("__doc__")): space.setitem(w_dict, space.wrap("__doc__"), space.w_None) # XXX missing: lengthy and obscure logic about "__module__" diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.0" +VERSION = "1.5.2" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -69,6 +69,7 @@ def startup(self, space): from pypy.module._cffi_backend import embedding embedding.glob.space = space + embedding.glob.patched_sys = False def get_dict_rtld_constants(): diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -45,6 +45,26 @@ pass glob = Global() +def patch_sys(space): + # Annoying: CPython would just use the C-level std{in,out,err} as + # configured by the main application, for example in binary mode + # on Windows or with buffering turned off. We can't easily do the + # same. Instead, go for the safest bet (but possibly bad for + # performance) and open sys.std{in,out,err} unbuffered. On + # Windows I guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # pypy_init_embedded_cffi_module(). + if not glob.patched_sys: + space.appexec([], """(): + import os + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + glob.patched_sys = True + + def pypy_init_embedded_cffi_module(version, init_struct): # called from __init__.py name = "?" @@ -56,6 +76,7 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) + patch_sys(space) load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: @@ -84,72 +105,87 @@ return rffi.cast(rffi.INT, res) # ____________________________________________________________ + if os.name == 'nt': - do_startup = r''' -#include -#define WIN32_LEAN_AND_MEAN + + do_includes = r""" +#define _WIN32_WINNT 0x0501 #include -RPY_EXPORTED void rpython_startup_code(void); -RPY_EXPORTED int pypy_setup_home(char *, int); -static unsigned char _cffi_ready = 0; -static const char *volatile _cffi_module_name; +#define CFFI_INIT_HOME_PATH_MAX _MAX_PATH +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); -static void _cffi_init_error(const char *msg, const char *extra) +static int _cffi_init_home(char *output_home_path) { - fprintf(stderr, - "\nPyPy initialization failure when loading module '%s':\n%s%s\n", - _cffi_module_name, msg, extra); -} - -BOOL CALLBACK _cffi_init(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *lpContex) -{ - - HMODULE hModule; - TCHAR home[_MAX_PATH]; - rpython_startup_code(); - RPyGilAllocate(); + HMODULE hModule = 0; + DWORD res; GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, (LPCTSTR)&_cffi_init, &hModule); + if (hModule == 0 ) { - /* TODO turn the int into a string with FormatMessage */ - - _cffi_init_error("dladdr() failed: ", ""); - return TRUE; + _cffi_init_error("GetModuleHandleEx() failed", ""); + return -1; } - GetModuleFileName(hModule, home, _MAX_PATH); - if (pypy_setup_home(home, 1) != 0) { - _cffi_init_error("pypy_setup_home() failed", ""); - return TRUE; + res = GetModuleFileName(hModule, output_home_path, CFFI_INIT_HOME_PATH_MAX); + if (res >= CFFI_INIT_HOME_PATH_MAX) { + return -1; } - _cffi_ready = 1; - fprintf(stderr, "startup succeeded, home %s\n", home); - return TRUE; + return 0; } -RPY_EXPORTED -int pypy_carefully_make_gil(const char *name) +static void _cffi_init_once(void) { - /* For CFFI: this initializes the GIL and loads the home path. - It can be called completely concurrently from unrelated threads. - It assumes that we don't hold the GIL before (if it exists), and we - don't hold it afterwards. - */ - static INIT_ONCE s_init_once; + static LONG volatile lock = 0; + static int _init_called = 0; - _cffi_module_name = name; /* not really thread-safe, but better than - nothing */ - InitOnceExecuteOnce(&s_init_once, _cffi_init, NULL, NULL); - return (int)_cffi_ready - 1; -}''' + while (InterlockedCompareExchange(&lock, 1, 0) != 0) { + SwitchToThread(); /* spin loop */ + } + if (!_init_called) { + _cffi_init(); + _init_called = 1; + } + InterlockedCompareExchange(&lock, 0, 1); +} +""" + else: - do_startup = r""" -#include + + do_includes = r""" #include #include +#define CFFI_INIT_HOME_PATH_MAX PATH_MAX +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + Dl_info info; + dlerror(); /* reset */ + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return -1; + } + if (realpath(info.dli_fname, output_home_path) == NULL) { + perror("realpath() failed"); + _cffi_init_error("realpath() failed", ""); + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + pthread_once(&once_control, _cffi_init); +} +""" + +do_startup = do_includes + r""" RPY_EXPORTED void rpython_startup_code(void); RPY_EXPORTED int pypy_setup_home(char *, int); @@ -165,17 +201,13 @@ static void _cffi_init(void) { - Dl_info info; - char *home; + char home[CFFI_INIT_HOME_PATH_MAX + 1]; rpython_startup_code(); RPyGilAllocate(); - if (dladdr(&_cffi_init, &info) == 0) { - _cffi_init_error("dladdr() failed: ", dlerror()); + if (_cffi_init_home(home) != 0) return; - } - home = realpath(info.dli_fname, NULL); if (pypy_setup_home(home, 1) != 0) { _cffi_init_error("pypy_setup_home() failed", ""); return; @@ -191,11 +223,9 @@ It assumes that we don't hold the GIL before (if it exists), and we don't hold it afterwards. */ - static pthread_once_t once_control = PTHREAD_ONCE_INIT; - _cffi_module_name = name; /* not really thread-safe, but better than nothing */ - pthread_once(&once_control, _cffi_init); + _cffi_init_once(); return (int)_cffi_ready - 1; } """ diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,15 +4,18 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module +from pypy.module._cffi_backend import embedding def test_checkmodule(): # prepare_file_argument() is not working without translating the _file # module too def dummy_prepare_file_argument(space, fileobj): - # call load_cffi1_module() too, from a random place like here - cffi1_module.load_cffi1_module(space, "foo", "foo", 42) + # call pypy_init_embedded_cffi_module() from a random place like here + # --- this calls load_cffi1_module(), too + embedding.pypy_init_embedded_cffi_module( + rffi.cast(rffi.INT, embedding.EMBED_VERSION_MIN), + 42) return lltype.nullptr(rffi.CCHARP.TO) old = ctypeptr.prepare_file_argument try: diff --git a/pypy/module/_demo/test/test_import.py b/pypy/module/_demo/test/test_import.py --- a/pypy/module/_demo/test/test_import.py +++ b/pypy/module/_demo/test/test_import.py @@ -12,8 +12,7 @@ w_modules = space.sys.get('modules') assert _demo.Module.demo_events == ['setup'] - assert not space.is_true(space.contains(w_modules, - space.wrap('_demo'))) + assert not space.contains_w(w_modules, space.wrap('_demo')) # first import w_import = space.builtin.get('__import__') diff --git a/pypy/module/_vmprof/interp_vmprof.py b/pypy/module/_vmprof/interp_vmprof.py --- a/pypy/module/_vmprof/interp_vmprof.py +++ b/pypy/module/_vmprof/interp_vmprof.py @@ -60,7 +60,7 @@ Must be smaller than 1.0 """ w_modules = space.sys.get('modules') - if space.is_true(space.contains(w_modules, space.wrap('_continuation'))): + if space.contains_w(w_modules, space.wrap('_continuation')): space.warn(space.wrap("Using _continuation/greenlet/stacklet together " "with vmprof will crash"), space.w_RuntimeWarning) diff --git a/pypy/module/_vmprof/test/test_direct.py b/pypy/module/_vmprof/test/test_direct.py --- a/pypy/module/_vmprof/test/test_direct.py +++ b/pypy/module/_vmprof/test/test_direct.py @@ -69,4 +69,4 @@ result = ffi.cast("void**", buf) res = lib.vmprof_write_header_for_jit_addr(result, 0, ffi.NULL, 100) assert res == 10 - assert [x for x in buf] == [6, 0, 3, 16, 3, 12, 3, 8, 3, 4 + assert [x for x in buf] == [6, 0, 3, 16, 3, 12, 3, 8, 3, 4] diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -59,7 +59,7 @@ class CConfig: _compilation_info_ = ExternalCompilationInfo( include_dirs=include_dirs, - includes=['Python.h', 'stdarg.h'], + includes=['Python.h', 'stdarg.h', 'structmember.h'], compile_extra=['-DPy_BUILD_CORE'], ) @@ -129,6 +129,7 @@ for name in constant_names: setattr(CConfig_constants, name, rffi_platform.ConstantInteger(name)) udir.join('pypy_decl.h').write("/* Will be filled later */\n") +udir.join('pypy_structmember_decl.h').write("/* Will be filled later */\n") udir.join('pypy_macros.h').write("/* Will be filled later */\n") globals().update(rffi_platform.configure(CConfig_constants)) @@ -147,7 +148,7 @@ # XXX: 20 lines of code to recursively copy a directory, really?? assert dstdir.check(dir=True) headers = include_dir.listdir('*.h') + include_dir.listdir('*.inl') - for name in ("pypy_decl.h", "pypy_macros.h"): + for name in ("pypy_decl.h", "pypy_macros.h", "pypy_structmember_decl.h"): headers.append(udir.join(name)) _copy_header_files(headers, dstdir) @@ -232,7 +233,7 @@ wrapper.c_name = cpyext_namespace.uniquename(self.c_name) return wrapper -def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, external=True, +def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header='pypy_decl.h', gil=None): """ Declares a function to be exported. @@ -241,8 +242,8 @@ special value 'CANNOT_FAIL' (also when restype is Void) turns an eventual exception into a wrapped SystemError. Unwrapped exceptions also cause a SytemError. - - set `external` to False to get a C function pointer, but not exported by - the API headers. + - `header` is the header file to export the function in, Set to None to get + a C function pointer, but not exported by the API headers. - set `gil` to "acquire", "release" or "around" to acquire the GIL, release the GIL, or both """ @@ -263,7 +264,7 @@ def decorate(func): func_name = func.func_name - if external: + if header is not None: c_name = None else: c_name = func_name @@ -271,7 +272,7 @@ c_name=c_name, gil=gil) func.api_func = api_function - if external: + if header is not None: assert func_name not in FUNCTIONS, ( "%s already registered" % func_name) @@ -363,8 +364,9 @@ unwrapper_catch = make_unwrapper(True) unwrapper_raise = make_unwrapper(False) - if external: + if header is not None: FUNCTIONS[func_name] = api_function + FUNCTIONS_BY_HEADER.setdefault(header, {})[func_name] = api_function INTERPLEVEL_API[func_name] = unwrapper_catch # used in tests return unwrapper_raise # used in 'normal' RPython code. return decorate @@ -383,6 +385,7 @@ INTERPLEVEL_API = {} FUNCTIONS = {} +FUNCTIONS_BY_HEADER = {} # These are C symbols which cpyext will export, but which are defined in .c # files somewhere in the implementation of cpyext (rather than being defined in @@ -811,6 +814,7 @@ global_code = '\n'.join(global_objects) prologue = ("#include \n" + "#include \n" "#include \n") code = (prologue + struct_declaration_code + @@ -960,7 +964,8 @@ "NOT_RPYTHON" # implement function callbacks and generate function decls functions = [] - pypy_decls = [] + decls = {} + pypy_decls = decls['pypy_decl.h'] = [] pypy_decls.append("#ifndef _PYPY_PYPY_DECL_H\n") pypy_decls.append("#define _PYPY_PYPY_DECL_H\n") pypy_decls.append("#ifndef PYPY_STANDALONE\n") @@ -973,17 +978,23 @@ for decl in FORWARD_DECLS: pypy_decls.append("%s;" % (decl,)) - for name, func in sorted(FUNCTIONS.iteritems()): - restype, args = c_function_signature(db, func) - pypy_decls.append("PyAPI_FUNC(%s) %s(%s);" % (restype, name, args)) - if api_struct: - callargs = ', '.join('arg%d' % (i,) - for i in range(len(func.argtypes))) - if func.restype is lltype.Void: - body = "{ _pypyAPI.%s(%s); }" % (name, callargs) - else: - body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) - functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) + for header_name, header_functions in FUNCTIONS_BY_HEADER.iteritems(): + if header_name not in decls: + header = decls[header_name] = [] + else: + header = decls[header_name] + + for name, func in sorted(header_functions.iteritems()): + restype, args = c_function_signature(db, func) + header.append("PyAPI_FUNC(%s) %s(%s);" % (restype, name, args)) + if api_struct: + callargs = ', '.join('arg%d' % (i,) + for i in range(len(func.argtypes))) + if func.restype is lltype.Void: + body = "{ _pypyAPI.%s(%s); }" % (name, callargs) + else: + body = "{ return _pypyAPI.%s(%s); }" % (name, callargs) + functions.append('%s %s(%s)\n%s' % (restype, name, args, body)) for name in VA_TP_LIST: name_no_star = process_va_name(name) header = ('%s pypy_va_get_%s(va_list* vp)' % @@ -1007,8 +1018,9 @@ pypy_decls.append("#endif /*PYPY_STANDALONE*/\n") pypy_decls.append("#endif /*_PYPY_PYPY_DECL_H*/\n") - pypy_decl_h = udir.join('pypy_decl.h') - pypy_decl_h.write('\n'.join(pypy_decls)) + for header_name, header_decls in decls.iteritems(): + decl_h = udir.join(header_name) + decl_h.write('\n'.join(header_decls)) return functions separate_module_files = [source_dir / "varargwrapper.c", diff --git a/pypy/module/cpyext/bufferobject.py b/pypy/module/cpyext/bufferobject.py --- a/pypy/module/cpyext/bufferobject.py +++ b/pypy/module/cpyext/bufferobject.py @@ -73,7 +73,7 @@ "Don't know how to realize a buffer")) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def buffer_dealloc(space, py_obj): py_buf = rffi.cast(PyBufferObject, py_obj) if py_buf.c_b_base: diff --git a/pypy/module/cpyext/frameobject.py b/pypy/module/cpyext/frameobject.py --- a/pypy/module/cpyext/frameobject.py +++ b/pypy/module/cpyext/frameobject.py @@ -39,7 +39,7 @@ py_frame.c_f_locals = make_ref(space, frame.get_w_locals()) rffi.setintfield(py_frame, 'c_f_lineno', frame.getorcreatedebug().f_lineno) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def frame_dealloc(space, py_obj): py_frame = rffi.cast(PyFrameObject, py_obj) py_code = rffi.cast(PyObject, py_frame.c_f_code) diff --git a/pypy/module/cpyext/funcobject.py b/pypy/module/cpyext/funcobject.py --- a/pypy/module/cpyext/funcobject.py +++ b/pypy/module/cpyext/funcobject.py @@ -56,7 +56,7 @@ assert isinstance(w_obj, Function) py_func.c_func_name = make_ref(space, space.wrap(w_obj.name)) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def function_dealloc(space, py_obj): py_func = rffi.cast(PyFunctionObject, py_obj) Py_DecRef(space, py_func.c_func_name) @@ -75,7 +75,7 @@ rffi.setintfield(py_code, 'c_co_flags', co_flags) rffi.setintfield(py_code, 'c_co_argcount', w_obj.co_argcount) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def code_dealloc(space, py_obj): py_code = rffi.cast(PyCodeObject, py_obj) Py_DecRef(space, py_code.c_co_name) diff --git a/pypy/module/cpyext/include/Python.h b/pypy/module/cpyext/include/Python.h --- a/pypy/module/cpyext/include/Python.h +++ b/pypy/module/cpyext/include/Python.h @@ -84,6 +84,7 @@ #include "pyconfig.h" #include "object.h" +#include "pymath.h" #include "pyport.h" #include "warnings.h" @@ -115,7 +116,6 @@ #include "compile.h" #include "frameobject.h" #include "eval.h" -#include "pymath.h" #include "pymem.h" #include "pycobject.h" #include "pycapsule.h" @@ -132,9 +132,6 @@ /* Missing definitions */ #include "missing.h" -// XXX This shouldn't be included here -#include "structmember.h" - #include /* Define macros for inline documentation. */ diff --git a/pypy/module/cpyext/include/floatobject.h b/pypy/module/cpyext/include/floatobject.h --- a/pypy/module/cpyext/include/floatobject.h +++ b/pypy/module/cpyext/include/floatobject.h @@ -7,6 +7,18 @@ extern "C" { #endif +#define PyFloat_STR_PRECISION 12 + +#ifdef Py_NAN +#define Py_RETURN_NAN return PyFloat_FromDouble(Py_NAN) +#endif + +#define Py_RETURN_INF(sign) do \ + if (copysign(1., sign) == 1.) { \ + return PyFloat_FromDouble(Py_HUGE_VAL); \ + } else { \ + return PyFloat_FromDouble(-Py_HUGE_VAL); \ + } while(0) #ifdef __cplusplus } diff --git a/pypy/module/cpyext/include/pymath.h b/pypy/module/cpyext/include/pymath.h --- a/pypy/module/cpyext/include/pymath.h +++ b/pypy/module/cpyext/include/pymath.h @@ -17,4 +17,35 @@ #define Py_HUGE_VAL HUGE_VAL #endif +/* Py_NAN + * A value that evaluates to a NaN. On IEEE 754 platforms INF*0 or + * INF/INF works. Define Py_NO_NAN in pyconfig.h if your platform + * doesn't support NaNs. + */ +#if !defined(Py_NAN) && !defined(Py_NO_NAN) +#if !defined(__INTEL_COMPILER) + #define Py_NAN (Py_HUGE_VAL * 0.) +#else /* __INTEL_COMPILER */ + #if defined(ICC_NAN_STRICT) + #pragma float_control(push) + #pragma float_control(precise, on) + #pragma float_control(except, on) + #if defined(_MSC_VER) + __declspec(noinline) + #else /* Linux */ + __attribute__((noinline)) + #endif /* _MSC_VER */ + static double __icc_nan() + { + return sqrt(-1.0); + } + #pragma float_control (pop) + #define Py_NAN __icc_nan() + #else /* ICC_NAN_RELAXED as default for Intel Compiler */ + static union { unsigned char buf[8]; double __icc_nan; } __nan_store = {0,0,0,0,0,0,0xf8,0x7f}; + #define Py_NAN (__nan_store.__icc_nan) + #endif /* ICC_NAN_STRICT */ +#endif /* __INTEL_COMPILER */ +#endif + #endif /* Py_PYMATH_H */ diff --git a/pypy/module/cpyext/include/structmember.h b/pypy/module/cpyext/include/structmember.h --- a/pypy/module/cpyext/include/structmember.h +++ b/pypy/module/cpyext/include/structmember.h @@ -4,54 +4,85 @@ extern "C" { #endif + +/* Interface to map C struct members to Python object attributes */ + #include /* For offsetof */ + +/* The offsetof() macro calculates the offset of a structure member + in its structure. Unfortunately this cannot be written down + portably, hence it is provided by a Standard C header file. + For pre-Standard C compilers, here is a version that usually works + (but watch out!): */ + #ifndef offsetof #define offsetof(type, member) ( (int) & ((type*)0) -> member ) #endif +/* An array of memberlist structures defines the name, type and offset + of selected members of a C structure. These can be read by + PyMember_Get() and set by PyMember_Set() (except if their READONLY flag + is set). The array must be terminated with an entry whose name + pointer is NULL. */ + + typedef struct PyMemberDef { - /* Current version, use this */ - char *name; - int type; - Py_ssize_t offset; - int flags; - char *doc; + /* Current version, use this */ + char *name; + int type; + Py_ssize_t offset; + int flags; + char *doc; } PyMemberDef; +/* Types */ +#define T_SHORT 0 +#define T_INT 1 +#define T_LONG 2 +#define T_FLOAT 3 +#define T_DOUBLE 4 +#define T_STRING 5 +#define T_OBJECT 6 +/* XXX the ordering here is weird for binary compatibility */ +#define T_CHAR 7 /* 1-character string */ +#define T_BYTE 8 /* 8-bit signed int */ +/* unsigned variants: */ +#define T_UBYTE 9 +#define T_USHORT 10 +#define T_UINT 11 +#define T_ULONG 12 -/* Types. These constants are also in structmemberdefs.py. */ -#define T_SHORT 0 -#define T_INT 1 -#define T_LONG 2 -#define T_FLOAT 3 -#define T_DOUBLE 4 -#define T_STRING 5 -#define T_OBJECT 6 -#define T_CHAR 7 /* 1-character string */ -#define T_BYTE 8 /* 8-bit signed int */ -#define T_UBYTE 9 -#define T_USHORT 10 -#define T_UINT 11 -#define T_ULONG 12 -#define T_STRING_INPLACE 13 /* Strings contained in the structure */ -#define T_BOOL 14 -#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError - when the value is NULL, instead of - converting to None. */ -#define T_LONGLONG 17 -#define T_ULONGLONG 18 -#define T_PYSSIZET 19 +/* Added by Jack: strings contained in the structure */ +#define T_STRING_INPLACE 13 + +/* Added by Lillo: bools contained in the structure (assumed char) */ +#define T_BOOL 14 + +#define T_OBJECT_EX 16 /* Like T_OBJECT, but raises AttributeError + when the value is NULL, instead of + converting to None. */ +#ifdef HAVE_LONG_LONG +#define T_LONGLONG 17 +#define T_ULONGLONG 18 +#endif /* HAVE_LONG_LONG */ + +#define T_PYSSIZET 19 /* Py_ssize_t */ /* Flags. These constants are also in structmemberdefs.py. */ -#define READONLY 1 -#define RO READONLY /* Shorthand */ +#define READONLY 1 +#define RO READONLY /* Shorthand */ #define READ_RESTRICTED 2 #define PY_WRITE_RESTRICTED 4 -#define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) +#define RESTRICTED (READ_RESTRICTED | PY_WRITE_RESTRICTED) + + +/* API functions. */ +#include "pypy_structmember_decl.h" #ifdef __cplusplus } #endif #endif /* !Py_STRUCTMEMBER_H */ + diff --git a/pypy/module/cpyext/methodobject.py b/pypy/module/cpyext/methodobject.py --- a/pypy/module/cpyext/methodobject.py +++ b/pypy/module/cpyext/methodobject.py @@ -50,7 +50,7 @@ py_func.c_m_self = make_ref(space, w_obj.w_self) py_func.c_m_module = make_ref(space, w_obj.w_module) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def cfunction_dealloc(space, py_obj): py_func = rffi.cast(PyCFunctionObject, py_obj) Py_DecRef(space, py_func.c_m_self) diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -70,7 +70,7 @@ alloc : allocate and basic initialization of a raw PyObject attach : Function called to tie a raw structure to a pypy object realize : Function called to create a pypy object from a raw struct - dealloc : a cpython_api(external=False), similar to PyObject_dealloc + dealloc : a cpython_api(header=None), similar to PyObject_dealloc """ tp_basestruct = kw.pop('basestruct', PyObject.TO) diff --git a/pypy/module/cpyext/pytraceback.py b/pypy/module/cpyext/pytraceback.py --- a/pypy/module/cpyext/pytraceback.py +++ b/pypy/module/cpyext/pytraceback.py @@ -41,7 +41,7 @@ rffi.setintfield(py_traceback, 'c_tb_lasti', traceback.lasti) rffi.setintfield(py_traceback, 'c_tb_lineno',traceback.get_lineno()) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def traceback_dealloc(space, py_obj): py_traceback = rffi.cast(PyTracebackObject, py_obj) Py_DecRef(space, rffi.cast(PyObject, py_traceback.c_tb_next)) diff --git a/pypy/module/cpyext/sliceobject.py b/pypy/module/cpyext/sliceobject.py --- a/pypy/module/cpyext/sliceobject.py +++ b/pypy/module/cpyext/sliceobject.py @@ -36,7 +36,7 @@ py_slice.c_stop = make_ref(space, w_obj.w_stop) py_slice.c_step = make_ref(space, w_obj.w_step) - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def slice_dealloc(space, py_obj): """Frees allocated PyStringObject resources. """ diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -309,7 +309,7 @@ return space.wrap(generic_cpy_call(space, func_target, w_self, w_other)) - at cpython_api([PyTypeObjectPtr, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyTypeObjectPtr, PyObject, PyObject], PyObject, header=None) def slot_tp_new(space, type, w_args, w_kwds): from pypy.module.cpyext.tupleobject import PyTuple_Check pyo = rffi.cast(PyObject, type) @@ -320,30 +320,30 @@ w_args_new = space.newtuple(args_w) return space.call(w_func, w_args_new, w_kwds) - at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, external=False) + at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, header=None) def slot_tp_init(space, w_self, w_args, w_kwds): w_descr = space.lookup(w_self, '__init__') args = Arguments.frompacked(space, w_args, w_kwds) space.get_and_call_args(w_descr, w_self, args) return 0 - at cpython_api([PyObject, PyObject, PyObject], PyObject, external=False) + at cpython_api([PyObject, PyObject, PyObject], PyObject, header=None) def slot_tp_call(space, w_self, w_args, w_kwds): return space.call(w_self, w_args, w_kwds) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_str(space, w_self): return space.str(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_nb_int(space, w_self): return space.int(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iter(space, w_self): return space.iter(w_self) - at cpython_api([PyObject], PyObject, external=False) + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iternext(space, w_self): return space.next(w_self) @@ -371,7 +371,7 @@ return @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, - error=-1, external=True) # XXX should not be exported + error=-1) # XXX should be header=None @func_renamer("cpyext_tp_setattro_%s" % (typedef.name,)) def slot_tp_setattro(space, w_self, w_name, w_value): if w_value is not None: @@ -385,8 +385,7 @@ if getattr_fn is None: return - @cpython_api([PyObject, PyObject], PyObject, - external=True) + @cpython_api([PyObject, PyObject], PyObject) @func_renamer("cpyext_tp_getattro_%s" % (typedef.name,)) def slot_tp_getattro(space, w_self, w_name): return space.call_function(getattr_fn, w_self, w_name) diff --git a/pypy/module/cpyext/stringobject.py b/pypy/module/cpyext/stringobject.py --- a/pypy/module/cpyext/stringobject.py +++ b/pypy/module/cpyext/stringobject.py @@ -103,7 +103,7 @@ track_reference(space, py_obj, w_obj) return w_obj - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def string_dealloc(space, py_obj): """Frees allocated PyStringObject resources. """ diff --git a/pypy/module/cpyext/structmember.py b/pypy/module/cpyext/structmember.py --- a/pypy/module/cpyext/structmember.py +++ b/pypy/module/cpyext/structmember.py @@ -31,8 +31,10 @@ (T_PYSSIZET, rffi.SSIZE_T, PyLong_AsSsize_t), ]) +_HEADER = 'pypy_structmember_decl.h' - at cpython_api([PyObject, lltype.Ptr(PyMemberDef)], PyObject) + + at cpython_api([PyObject, lltype.Ptr(PyMemberDef)], PyObject, header=_HEADER) def PyMember_GetOne(space, obj, w_member): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset @@ -83,7 +85,8 @@ return w_result - at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, + error=-1, header=_HEADER) def PyMember_SetOne(space, obj, w_member, w_value): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset diff --git a/pypy/module/cpyext/test/test_cpyext.py b/pypy/module/cpyext/test/test_cpyext.py --- a/pypy/module/cpyext/test/test_cpyext.py +++ b/pypy/module/cpyext/test/test_cpyext.py @@ -863,3 +863,15 @@ os.unlink('_imported_already') except OSError: pass + + def test_no_structmember(self): + """structmember.h should not be included by default.""" + mod = self.import_extension('foo', [ + ('bar', 'METH_NOARGS', + ''' + /* reuse a name that is #defined in structmember.h */ + int RO; + Py_RETURN_NONE; + ''' + ), + ]) diff --git a/pypy/module/cpyext/test/test_dictobject.py b/pypy/module/cpyext/test/test_dictobject.py --- a/pypy/module/cpyext/test/test_dictobject.py +++ b/pypy/module/cpyext/test/test_dictobject.py @@ -146,7 +146,7 @@ def test_dictproxy(self, space, api): w_dict = space.sys.get('modules') w_proxy = api.PyDictProxy_New(w_dict) - assert space.is_true(space.contains(w_proxy, space.wrap('sys'))) + assert space.contains_w(w_proxy, space.wrap('sys')) raises(OperationError, space.setitem, w_proxy, space.wrap('sys'), space.w_None) raises(OperationError, space.delitem, diff --git a/pypy/module/cpyext/test/test_floatobject.py b/pypy/module/cpyext/test/test_floatobject.py --- a/pypy/module/cpyext/test/test_floatobject.py +++ b/pypy/module/cpyext/test/test_floatobject.py @@ -45,3 +45,35 @@ ]) assert module.from_string() == 1234.56 assert type(module.from_string()) is float + +class AppTestFloatMacros(AppTestCpythonExtensionBase): + def test_return_nan(self): + import math + + module = self.import_extension('foo', [ + ("return_nan", "METH_NOARGS", + "Py_RETURN_NAN;"), + ]) + assert math.isnan(module.return_nan()) + + def test_return_inf(self): + import math + + module = self.import_extension('foo', [ + ("return_inf", "METH_NOARGS", + "Py_RETURN_INF(10);"), + ]) + inf = module.return_inf() + assert inf > 0 + assert math.isinf(inf) + + def test_return_inf_negative(self): + import math + + module = self.import_extension('foo', [ + ("return_neginf", "METH_NOARGS", + "Py_RETURN_INF(-10);"), + ]) + neginf = module.return_neginf() + assert neginf < 0 + assert math.isinf(neginf) diff --git a/pypy/module/cpyext/test/test_import.py b/pypy/module/cpyext/test/test_import.py --- a/pypy/module/cpyext/test/test_import.py +++ b/pypy/module/cpyext/test/test_import.py @@ -21,7 +21,7 @@ def test_getmoduledict(self, space, api): testmod = "_functools" w_pre_dict = api.PyImport_GetModuleDict() - assert not space.is_true(space.contains(w_pre_dict, space.wrap(testmod))) + assert not space.contains_w(w_pre_dict, space.wrap(testmod)) with rffi.scoped_str2charp(testmod) as modname: w_module = api.PyImport_ImportModule(modname) @@ -29,7 +29,7 @@ assert w_module w_dict = api.PyImport_GetModuleDict() - assert space.is_true(space.contains(w_dict, space.wrap(testmod))) + assert space.contains_w(w_dict, space.wrap(testmod)) def test_reload(self, space, api): stat = api.PyImport_Import(space.wrap("stat")) diff --git a/pypy/module/cpyext/test/test_intobject.py b/pypy/module/cpyext/test/test_intobject.py --- a/pypy/module/cpyext/test/test_intobject.py +++ b/pypy/module/cpyext/test/test_intobject.py @@ -99,6 +99,7 @@ """), ], prologue=""" From pypy.commits at gmail.com Mon Feb 15 23:18:04 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 15 Feb 2016 20:18:04 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Create SomeExternalFunction, clean up signature checking and sandboxing of externals Message-ID: <56c2a2fc.8abb1c0a.6e7d9.5913@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82275:7b8f4a3f1bd2 Date: 2016-02-16 04:17 +0000 http://bitbucket.org/pypy/pypy/changeset/7b8f4a3f1bd2/ Log: Create SomeExternalFunction, clean up signature checking and sandboxing of externals diff --git a/rpython/annotator/policy.py b/rpython/annotator/policy.py --- a/rpython/annotator/policy.py +++ b/rpython/annotator/policy.py @@ -86,12 +86,11 @@ continue key = ('sandboxing', s_func.const) if key not in bk.emulated_pbc_calls: - entry = s_func.entry - params_s = entry.signature_args - s_result = entry.signature_result + params_s = s_func.args_s + s_result = s_func.s_result from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline sandbox_trampoline = make_sandbox_trampoline( - entry.name, params_s, s_result) + s_func.name, params_s, s_result) sandbox_trampoline._signature_ = [SomeTuple(items=params_s)], s_result bk.emulate_pbc_call(key, bk.immutablevalue(sandbox_trampoline), params_s) else: diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -1,16 +1,22 @@ from rpython.rtyper.extregistry import ExtRegistryEntry from rpython.rtyper.lltypesystem.lltype import typeOf, FuncType, functionptr, _ptr -from rpython.annotator.model import unionof +from rpython.annotator.model import unionof, SomeBuiltin from rpython.annotator.signature import annotation, SignatureError -class ExtFuncEntry(ExtRegistryEntry): - safe_not_sandboxed = False +class SomeExternalFunction(SomeBuiltin): + def __init__(self, name, args_s, s_result): + self.name = name + self.args_s = args_s + self.s_result = s_result - def check_args(self, *args_s): - params_s = self.signature_args - assert len(args_s) == len(params_s),\ - "Argument number mismatch" - + def check_args(self, callspec): + params_s = self.args_s + args_s, kwargs = callspec.unpack() + if kwargs: + raise SignatureError( + "External functions cannot be called with keyword arguments") + if len(args_s) != len(params_s): + raise SignatureError("Argument number mismatch") for i, s_param in enumerate(params_s): arg = unionof(args_s[i], s_param) if not s_param.contains(arg): @@ -18,18 +24,20 @@ "arg %d must be %s,\n" " got %s" % ( self.name, i+1, s_param, args_s[i])) - return params_s - def compute_result_annotation(self, *args_s): - self.check_args(*args_s) - return self.signature_result + def call(self, callspec): + self.check_args(callspec) + return self.s_result + +class ExtFuncEntry(ExtRegistryEntry): + safe_not_sandboxed = False def compute_annotation(self): - s_result = super(ExtFuncEntry, self).compute_annotation() + s_result = SomeExternalFunction( + self.name, self.signature_args, self.signature_result) if (self.bookkeeper.annotator.translator.config.translation.sandbox and not self.safe_not_sandboxed): s_result.needs_sandboxing = True - s_result.entry = self return s_result def specialize_call(self, hop): From pypy.commits at gmail.com Tue Feb 16 03:51:51 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 16 Feb 2016 00:51:51 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: catchup with default Message-ID: <56c2e327.41dfc20a.cfe0f.4c78@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82276:c9f5118bcf08 Date: 2016-02-16 09:51 +0100 http://bitbucket.org/pypy/pypy/changeset/c9f5118bcf08/ Log: catchup with default diff --git a/rpython/annotator/bookkeeper.py b/rpython/annotator/bookkeeper.py --- a/rpython/annotator/bookkeeper.py +++ b/rpython/annotator/bookkeeper.py @@ -551,20 +551,6 @@ emulated = callback return self.pbc_call(pbc, args, emulated=emulated) - def _find_current_op(self, opname=None, arity=None, pos=None, s_type=None): - """ Find operation that is currently being annotated. Do some - sanity checks to see whether the correct op was found.""" - # XXX XXX HACK HACK HACK - fn, block, i = self.position_key - op = block.operations[i] - if opname is not None: - assert op.opname == opname - if arity is not None: - assert len(op.args) == arity - if pos is not None: - assert self.annotator.binding(op.args[pos]) == s_type - return op - def whereami(self): return self.annotator.whereami(self.position_key) diff --git a/rpython/jit/metainterp/optimizeopt/rewrite.py b/rpython/jit/metainterp/optimizeopt/rewrite.py --- a/rpython/jit/metainterp/optimizeopt/rewrite.py +++ b/rpython/jit/metainterp/optimizeopt/rewrite.py @@ -380,7 +380,7 @@ raise InvalidLoop("promote of a virtual") old_guard_op = info.get_last_guard(self.optimizer) if old_guard_op is not None: - op = self.replace_guard_class_with_guard_value(op, info, + op = self.replace_old_guard_with_guard_value(op, info, old_guard_op) elif arg0.type == 'f': arg0 = self.get_box_replacement(arg0) @@ -390,11 +390,26 @@ assert isinstance(constbox, Const) self.optimize_guard(op, constbox) - def replace_guard_class_with_guard_value(self, op, info, old_guard_op): - if old_guard_op.opnum != rop.GUARD_NONNULL: - previous_classbox = info.get_known_class(self.optimizer.cpu) - expected_classbox = self.optimizer.cpu.ts.cls_of_box(op.getarg(1)) - assert previous_classbox is not None + def replace_old_guard_with_guard_value(self, op, info, old_guard_op): + # there already has been a guard_nonnull or guard_class or + # guard_nonnull_class on this value, which is rather silly. + # This function replaces the original guard with a + # guard_value. Must be careful: doing so is unsafe if the + # original guard checks for something inconsistent, + # i.e. different than what it would give if the guard_value + # passed (this is a rare case, but possible). If we get + # inconsistent results in this way, then we must not do the + # replacement, otherwise we'd put guard_value up there but all + # intermediate ops might be executed by assuming something + # different, from the old guard that is now removed... + + c_value = op.getarg(1) + if not c_value.nonnull(): + raise InvalidLoop('A GUARD_VALUE(..., NULL) follows some other ' + 'guard that it is not NULL') + previous_classbox = info.get_known_class(self.optimizer.cpu) + if previous_classbox is not None: + expected_classbox = self.optimizer.cpu.ts.cls_of_box(c_value) assert expected_classbox is not None if not previous_classbox.same_constant( expected_classbox): diff --git a/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py b/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py --- a/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py +++ b/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py @@ -3063,6 +3063,16 @@ self.optimize_loop(ops, expected, preamble) #self.check_expanded_fail_descr("i0", rop.GUARD_VALUE) + def test_invalid_guard_value_after_guard_class(self): + ops = """ + [p1, i0, i1, i2, p2] + guard_class(p1, ConstClass(node_vtable)) [i0] + i3 = int_add(i1, i2) + guard_value(p1, NULL) [i1] + jump(p2, i0, i1, i3, p2) + """ + self.raises(InvalidLoop, self.optimize_loop, ops, ops) + def test_guard_class_oois(self): ops = """ [p1] diff --git a/rpython/rlib/rvmprof/test/test_rvmprof.py b/rpython/rlib/rvmprof/test/test_rvmprof.py --- a/rpython/rlib/rvmprof/test/test_rvmprof.py +++ b/rpython/rlib/rvmprof/test/test_rvmprof.py @@ -101,7 +101,7 @@ s = 0 for i in range(num): s += (i << 1) - if s % 32423423423 == 0: + if s % 2123423423 == 0: print s return s From pypy.commits at gmail.com Tue Feb 16 05:39:52 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 16 Feb 2016 02:39:52 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: stacklet switch command s390x was broken due to saving of f8-f15, storing them into the standard frame in unused slots instead Message-ID: <56c2fc78.016b1c0a.9fef2.1aa6@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82277:b41c97d15afd Date: 2016-02-16 11:38 +0100 http://bitbucket.org/pypy/pypy/changeset/b41c97d15afd/ Log: stacklet switch command s390x was broken due to saving of f8-f15, storing them into the standard frame in unused slots instead diff --git a/rpython/translator/c/src/stacklet/switch_s390x_gcc.h b/rpython/translator/c/src/stacklet/switch_s390x_gcc.h --- a/rpython/translator/c/src/stacklet/switch_s390x_gcc.h +++ b/rpython/translator/c/src/stacklet/switch_s390x_gcc.h @@ -10,22 +10,18 @@ /* The Stackless version by Kristjan Valur Jonsson, ported to s390x by Richard Plangger */ - "lay 15,-64(15)\n" /* additional stack space to store f8-f15 */ "stmg 6,15,48(15)\n" - "std 0,128(15)\n" - "std 2,136(15)\n" - "std 4,144(15)\n" - "std 6,152(15)\n" + // store f8 - f15 into the stack frame that is not used! + "std 8,128(15)\n" + "std 9,136(15)\n" + "std 10,144(15)\n" + "std 11,152(15)\n" - "std 8, 160(15)\n" - "std 9, 168(15)\n" - "std 10,176(15)\n" - "std 11,184(15)\n" - "std 12,192(15)\n" - "std 13,200(15)\n" - "std 14,208(15)\n" - "std 15,216(15)\n" + "std 12,16(15)\n" + "std 13,24(15)\n" + "std 14,32(15)\n" + "std 15,40(15)\n" "lgr 10, %[restore_state]\n" /* save 'restore_state' for later */ "lgr 11, %[extra]\n" /* save 'extra' for later */ @@ -35,7 +31,7 @@ "lay 15,-160(15)\n" /* create stack frame */ "basr 14, 14\n" /* call save_state() */ - "lay 15, 160(15)\n" /* destroy stack frame */ + "lay 15,160(15)\n" "cgij 2, 0, 8, zero\n" /* skip the rest if the return value is null */ @@ -47,31 +43,24 @@ is already in r2 */ "lgr 3, 11\n" /* arg 2: extra */ - - "lay 15, -160(15)\n" /* create temp stack space for callee to use */ - "basr 14, 10\n" /* call restore_state() */ - "lay 15, 160(15)\n" /* destroy temp stack space */ + "lay 15,-160(15)\n" /* create stack frame */ + "basr 14, 10\n" /* call restore_state() */ + "lay 15,160(15)\n" /* The stack's content is now restored. */ "zero:\n" /* Epilogue */ - /* no need */ /* restore stack pointer */ + "ld 8,128(15)\n" + "ld 9,136(15)\n" + "ld 10,144(15)\n" + "ld 11,152(15)\n" - "ld 0,128(15)\n" - "ld 2,136(15)\n" - "ld 4,144(15)\n" - "ld 6,152(15)\n" - - "ld 8, 160(15)\n" - "ld 9, 168(15)\n" - "ld 10,176(15)\n" - "ld 11,184(15)\n" - "ld 12,192(15)\n" - "ld 13,200(15)\n" - "ld 14,208(15)\n" - "ld 15,216(15)\n" + "ld 12,16(15)\n" + "ld 13,24(15)\n" + "ld 14,32(15)\n" + "ld 15,40(15)\n" "lmg 6,15,48(15)\n" From pypy.commits at gmail.com Tue Feb 16 05:55:15 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 16 Feb 2016 02:55:15 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: added skip for s390x on _vmprof tests in module Message-ID: <56c30013.512f1c0a.9fdf3.ffffd32a@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82278:024d0fe16089 Date: 2016-02-16 11:53 +0100 http://bitbucket.org/pypy/pypy/changeset/024d0fe16089/ Log: added skip for s390x on _vmprof tests in module diff --git a/pypy/module/_vmprof/conftest.py b/pypy/module/_vmprof/conftest.py new file mode 100644 --- /dev/null +++ b/pypy/module/_vmprof/conftest.py @@ -0,0 +1,6 @@ +import py, os + +def pytest_collect_directory(path, parent): + if os.uname()[4] == 's390x': + py.test.skip("zarch tests skipped") +pytest_collect_file = pytest_collect_directory From pypy.commits at gmail.com Tue Feb 16 08:27:01 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 16 Feb 2016 05:27:01 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: regalloc has now one more pair, SPP is now r12 (was r11) Message-ID: <56c323a5.0c2d1c0a.3495f.16a2@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82279:9a63f13fcdbd Date: 2016-02-16 13:01 +0100 http://bitbucket.org/pypy/pypy/changeset/9a63f13fcdbd/ Log: regalloc has now one more pair, SPP is now r12 (was r11) rewritten regalloc pairs. it is now simpler and easier to understand. diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -180,7 +180,7 @@ mc.push_std_frame() RCS2 = r.r10 - RCS3 = r.r12 + RCS3 = r.r11 # r10,r11,r12,r2,f0 -> makes exactly 4 words + 8 byte extra_stack_size = 4 * WORD + 8 @@ -330,7 +330,7 @@ mc.LGR(r.r3, r.SCRATCH2) RCS2 = r.r10 - RCS3 = r.r12 + RCS3 = r.r11 self._store_and_reset_exception(mc, RCS2, RCS3) @@ -387,7 +387,7 @@ come. """ # signature of these cond_call_slowpath functions: - # * on entry, r12 contains the function to call + # * on entry, r11 contains the function to call # * r2, r3, r4, r5 contain arguments for the call # * r0 is the gcmap # * the old value of these regs must already be stored in the jitframe @@ -400,7 +400,7 @@ mc.store_link() mc.push_std_frame() - # copy registers to the frame, with the exception of r2 to r5 and r12, + # copy registers to the frame, with the exception of r2 to r5 and r11, # because these have already been saved by the caller. Note that # this is not symmetrical: these 5 registers are saved by the caller # but restored here at the end of this function. @@ -413,13 +413,13 @@ reg is not r.r3 and reg is not r.r4 and reg is not r.r5 and - reg is not r.r12] + reg is not r.r11] self._push_core_regs_to_jitframe(mc, regs) if supports_floats: self._push_fp_regs_to_jitframe(mc) # allocate a stack frame! - mc.raw_call(r.r12) + mc.raw_call(r.r11) # Finish self._reload_frame_if_necessary(mc) diff --git a/rpython/jit/backend/zarch/codebuilder.py b/rpython/jit/backend/zarch/codebuilder.py --- a/rpython/jit/backend/zarch/codebuilder.py +++ b/rpython/jit/backend/zarch/codebuilder.py @@ -195,9 +195,9 @@ def sync(self): self.BCR_rr(0xf,0) - def raw_call(self, call_reg=r.RETURN): + def raw_call(self, call_reg=r.r14): """Emit a call to the address stored in the register 'call_reg', - which must be either RAW_CALL_REG or r12. This is a regular C + which must be either RAW_CALL_REG or r11. This is a regular C function pointer, which means on big-endian that it is actually the address of a three-words descriptor. """ diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -367,7 +367,7 @@ def _find_nearby_operation(self, regalloc, delta): return regalloc.operations[regalloc.rm.position + delta] - _COND_CALL_SAVE_REGS = [r.r12, r.r2, r.r3, r.r4, r.r5] + _COND_CALL_SAVE_REGS = [r.r11, r.r2, r.r3, r.r4, r.r5] def emit_cond_call(self, op, arglocs, regalloc): fcond = self.guard_success_cc @@ -378,7 +378,7 @@ jmp_adr = self.mc.get_relative_pos() self.mc.reserve_cond_jump() # patched later to a relative branch - # save away r2, r3, r4, r5, r12 into the jitframe + # save away r2, r3, r4, r5, r11 into the jitframe should_be_saved = [ reg for reg in self._regalloc.rm.reg_bindings.itervalues() if reg in self._COND_CALL_SAVE_REGS] @@ -388,9 +388,9 @@ self.load_gcmap(self.mc, r.SCRATCH2, regalloc.get_gcmap()) # # load the 0-to-4 arguments into these registers, with the address of - # the function to call into r12 + # the function to call into r11 remap_frame_layout(self, arglocs, - [r.r12, r.r2, r.r3, r.r4, r.r5][:len(arglocs)], + [r.r11, r.r2, r.r3, r.r4, r.r5][:len(arglocs)], r.SCRATCH) # # figure out which variant of cond_call_slowpath to call, and call it diff --git a/rpython/jit/backend/zarch/regalloc.py b/rpython/jit/backend/zarch/regalloc.py --- a/rpython/jit/backend/zarch/regalloc.py +++ b/rpython/jit/backend/zarch/regalloc.py @@ -171,24 +171,29 @@ self.temp_boxes.append(box) return reg - def ensure_even_odd_pair(self, var, bindvar, bind_first=True, + def ensure_even_odd_pair(self, origvar, bindvar, bind_first=True, must_exist=True, load_loc_odd=True, move_regs=True): """ Allocates two registers that can be used by the instruction. - var: is the original register holding the value + origvar: is the original register holding the value bindvar: is the variable that will be bound (= self.reg_bindings[bindvar] = new register) bind_first: the even register will be bound to bindvar, if bind_first == False: the odd register will be bound """ - self._check_type(var) - prev_loc = self.loc(var, must_exist=must_exist) + self._check_type(origvar) + prev_loc = self.loc(origvar, must_exist=must_exist) var2 = TempVar() + if bindvar is None: + bindvar = TempVar() if bind_first: loc, loc2 = self.force_allocate_reg_pair(bindvar, var2, self.temp_boxes) else: loc, loc2 = self.force_allocate_reg_pair(var2, bindvar, self.temp_boxes) + if isinstance(bindvar, TempVar): + self.temp_boxes.append(bindvar) + self.temp_boxes.append(var2) assert loc.is_even() and loc2.is_odd() if move_regs and prev_loc is not loc2: @@ -198,148 +203,115 @@ self.assembler.regalloc_mov(prev_loc, loc) return loc, loc2 - def force_allocate_reg_pair(self, var, var2, forbidden_vars=[], selected_reg=None): - """ Forcibly allocate a register for the new variable var. - var will have an even register (var2 will have an odd register). + def force_allocate_reg_pair(self, even_var, odd_var, forbidden_vars): + """ Forcibly allocate a register for the new variable even_var. + even_var will have an even register (odd_var, you guessed it, + will have an odd register). """ - self._check_type(var) - self._check_type(var2) - if isinstance(var, TempVar): - self.longevity[var] = (self.position, self.position) - if isinstance(var2, TempVar): - self.longevity[var2] = (self.position, self.position) + self._check_type(even_var) + self._check_type(odd_var) + if isinstance(even_var, TempVar): + self.longevity[even_var] = (self.position, self.position) + if isinstance(odd_var, TempVar): + self.longevity[odd_var] = (self.position, self.position) + + # this function steps through the following: + # 1) maybe there is an even/odd pair that is always + # free, then allocate them! + # 2) try to just spill one variable in either the even + # or the odd reg + # 3) spill two variables + + # start in 1) + SPILL_EVEN = 0 + SPILL_ODD = 1 even, odd = None, None - REGS = r.registers + candidates = [] i = len(self.free_regs)-1 - candidates = {} while i >= 0: even = self.free_regs[i] if even.is_even(): # found an even registers that is actually free - odd = REGS[even.value+1] - if odd not in r.MANAGED_REGS: - # makes no sense to use this register! - i -= 1 - continue + odd = r.registers[even.value+1] if odd not in self.free_regs: # sadly odd is not free, but for spilling # we found a candidate - candidates[odd] = True + candidates.append((even, odd, SPILL_ODD)) i -= 1 continue - assert var not in self.reg_bindings - assert var2 not in self.reg_bindings - self.reg_bindings[var] = even - self.reg_bindings[var2] = odd - del self.free_regs[i] - i = self.free_regs.index(odd) - del self.free_regs[i] - assert even.is_even() and odd.is_odd() + # even is free and so is odd! allocate these + # two registers + assert even_var not in self.reg_bindings + assert odd_var not in self.reg_bindings + self.reg_bindings[even_var] = even + self.reg_bindings[odd_var] = odd + self.free_regs = [fr for fr in self.free_regs \ + if fr is not even and \ + fr is not odd] return even, odd else: # an odd free register, maybe the even one is # a candidate? odd = even - even = REGS[odd.value-1] - if even not in r.MANAGED_REGS: - # makes no sense to use this register! - i -= 1 - continue + even = r.registers[odd.value-1] if even not in self.free_regs: # yes even might be a candidate # this means that odd is free, but not even - candidates[even] = True + candidates.append((even, odd, SPILL_EVEN)) i -= 1 - if len(candidates) != 0: - cur_max_age = -1 - candidate = None - # pseudo step to find best spilling candidate - # similar to _pick_variable_to_spill, but tailored - # to take the even/odd register allocation in consideration - for next in self.reg_bindings: - if next in forbidden_vars: - continue - reg = self.reg_bindings[next] - if reg in candidates: - reg2 = None - if reg.is_even(): - reg2 = REGS[reg.value+1] - else: - reg2 = REGS[reg.value-1] - if reg2 not in r.MANAGED_REGS: - continue - max_age = self.longevity[next][1] - if cur_max_age < max_age: - cur_max_age = max_age - candidate = next - if candidate is not None: - # well, we got away with a single spill :) - reg = self.reg_bindings[candidate] - self._sync_var(candidate) - del self.reg_bindings[candidate] - if reg.is_even(): - assert var is not candidate - self.reg_bindings[var] = reg - rmfree = REGS[reg.value+1] - self.reg_bindings[var2] = rmfree - self.free_regs = [fr for fr in self.free_regs if fr is not rmfree] - return reg, rmfree - else: - assert var2 is not candidate - self.reg_bindings[var2] = reg - rmfree = REGS[reg.value-1] - self.reg_bindings[var] = rmfree - self.free_regs = [fr for fr in self.free_regs if fr is not rmfree] - return rmfree, reg + reverse_mapping = {} + for v, reg in self.reg_bindings.items(): + reverse_mapping[reg] = v + + # needs to spill one variable + for even, odd, which_to_spill in candidates: + # no heuristic, pick the first + if which_to_spill == SPILL_EVEN: + orig_var_even = reverse_mapping[even] + if orig_var_even in forbidden_vars: + continue # duh! + self._sync_var(orig_var_even) + del self.reg_bindings[orig_var_even] + elif which_to_spill == SPILL_ODD: + orig_var_odd = reverse_mapping[odd] + if orig_var_odd in forbidden_vars: + continue # duh! + self._sync_var(orig_var_odd) + del self.reg_bindings[orig_var_odd] + + # well, we got away with a single spill :) + self.free_regs = [fr for fr in self.free_regs \ + if fr is not even and \ + fr is not odd] + self.reg_bindings[even_var] = even + self.reg_bindings[odd_var] = odd + return even, odd # there is no candidate pair that only would # require one spill, thus we need to spill two! # this is a rare case! - reverse_mapping = {} - for v, reg in self.reg_bindings.items(): - reverse_mapping[reg] = v - # always take the first - for i, reg in enumerate(r.MANAGED_REGS): - if i % 2 == 1: + for even, odd in r.MANAGED_REG_PAIRS: + orig_var_even = reverse_mapping[even] + orig_var_odd = reverse_mapping[odd] + if orig_var_even in forbidden_vars or \ + orig_var_odd in forbidden_vars: continue - if i+1 < len(r.MANAGED_REGS): - reg2 = r.MANAGED_REGS[i+1] - assert reg.is_even() and reg2.is_odd() - ovar = reverse_mapping.get(reg,None) - if ovar is None: - continue - if ovar in forbidden_vars: - continue - ovar2 = reverse_mapping.get(reg2, None) - if ovar2 is not None and ovar2 in forbidden_vars: - # blocked, try other register pair - continue - even = reg - odd = reg2 - self._sync_var(ovar) - self._sync_var(ovar2) - del self.reg_bindings[ovar] - if ovar2 is not None: - del self.reg_bindings[ovar2] - # both are not added to free_regs! no need to do so - self.reg_bindings[var] = even - self.reg_bindings[var2] = odd - break + + self._sync_var(orig_var_even) + del self.reg_bindings[orig_var_even] + self._sync_var(orig_var_odd) + del self.reg_bindings[orig_var_odd] + + self.reg_bindings[even_var] = even + self.reg_bindings[odd_var] = odd + break else: # no break! this is bad. really bad raise NoVariableToSpill() - reverse_mapping = None - return even, odd - def force_result_in_even_reg(self, result_v, loc, forbidden_vars=[]): - pass - - def force_result_in_odd_reg(self, result_v, loc, forbidden_vars=[]): - pass - class ZARCHFrameManager(FrameManager): def __init__(self, base_ofs): FrameManager.__init__(self) @@ -990,11 +962,9 @@ # args: base, start, len, scale_start, scale_len itemsize, ofs, _ = unpack_arraydescr(op.getdescr()) startindex_loc = self.ensure_reg_or_16bit_imm(op.getarg(1)) - tempvar = TempInt() ofs_loc = self.ensure_reg_or_16bit_imm(ConstInt(ofs)) - base_loc, length_loc = self.rm.ensure_even_odd_pair(op.getarg(0), tempvar, + base_loc, length_loc = self.rm.ensure_even_odd_pair(op.getarg(0), None, bind_first=True, must_exist=False, load_loc_odd=False) - self.rm.temp_boxes.append(tempvar) length_box = op.getarg(2) ll = self.rm.loc(length_box) @@ -1145,13 +1115,11 @@ src_len: when entering the assembler, src_ofs_loc's value is contained in src_len register. """ - src_tmp = TempVar() src_ptr_loc, _ = \ self.rm.ensure_even_odd_pair(op.getarg(0), - src_tmp, bind_first=True, + None, bind_first=True, must_exist=False, load_loc_odd=False) src_ofs_loc = self.ensure_reg_or_any_imm(op.getarg(2)) - self.rm.temp_boxes.append(src_tmp) dst_ptr_loc = self.ensure_reg(op.getarg(1)) dst_ofs_loc = self.ensure_reg_or_any_imm(op.getarg(3)) length_loc = self.ensure_reg_or_any_imm(op.getarg(4)) diff --git a/rpython/jit/backend/zarch/registers.py b/rpython/jit/backend/zarch/registers.py --- a/rpython/jit/backend/zarch/registers.py +++ b/rpython/jit/backend/zarch/registers.py @@ -7,17 +7,18 @@ [r0,r1,r2,r3,r4,r5,r6,r7,r8, r9,r10,r11,r12,r13,r14,r15] = registers -MANAGED_REGS = [r2,r3,r4,r5,r6,r7,r8,r9,r10,r12] # keep this list sorted (asc)! +MANAGED_REGS = [r2,r3,r4,r5,r6,r7,r8,r9,r10,r11] # keep this list sorted (asc)! +MANAGED_REG_PAIRS = [(r2,r3), (r4,r5), (r6,r7), (r8,r9), (r10,r11)] VOLATILES = [r2,r3,r4,r5,r6] SP = r15 RETURN = r14 POOL = r13 -SPP = r11 +SPP = r12 SCRATCH = r1 SCRATCH2 = r0 GPR_RETURN = r2 RES = r2 -RSZ = r12 # do not use a volatile register +RSZ = r11 # do not use a volatile register [f0,f1,f2,f3,f4,f5,f6,f7,f8, f9,f10,f11,f12,f13,f14,f15] = fpregisters From pypy.commits at gmail.com Tue Feb 16 08:27:03 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 16 Feb 2016 05:27:03 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: some more tests stressing the reg allocation of register pairs Message-ID: <56c323a7.512f1c0a.9fdf3.0d96@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82280:713aaa7859d5 Date: 2016-02-16 14:26 +0100 http://bitbucket.org/pypy/pypy/changeset/713aaa7859d5/ Log: some more tests stressing the reg allocation of register pairs diff --git a/rpython/jit/backend/zarch/regalloc.py b/rpython/jit/backend/zarch/regalloc.py --- a/rpython/jit/backend/zarch/regalloc.py +++ b/rpython/jit/backend/zarch/regalloc.py @@ -292,8 +292,8 @@ # require one spill, thus we need to spill two! # this is a rare case! for even, odd in r.MANAGED_REG_PAIRS: - orig_var_even = reverse_mapping[even] - orig_var_odd = reverse_mapping[odd] + orig_var_even = reverse_mapping.get(even,None) + orig_var_odd = reverse_mapping.get(odd,None) if orig_var_even in forbidden_vars or \ orig_var_odd in forbidden_vars: continue diff --git a/rpython/jit/backend/zarch/test/test_regalloc.py b/rpython/jit/backend/zarch/test/test_regalloc.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/zarch/test/test_regalloc.py @@ -0,0 +1,246 @@ +import py +from rpython.jit.metainterp.history import JitCellToken +from rpython.jit.backend.detect_cpu import getcpuclass +from rpython.jit.backend.zarch.arch import WORD +from rpython.jit.backend.zarch.regalloc import (ZARCHRegisterManager, + ZARCHFrameManager) +import rpython.jit.backend.zarch.registers as r +from rpython.jit.backend.llsupport.regalloc import TempVar, NoVariableToSpill +from rpython.jit.tool.oparser import parse + +CPU = getcpuclass() + +class FakeAssembler(object): + def __init__(self): + self.move_count = 0 + def regalloc_mov(self, f, t): + self.move_count += 1 + +class FakeRegalloc(ZARCHRegisterManager): + def __init__(self): + ZARCHRegisterManager.__init__(self, {}, ZARCHFrameManager(0), FakeAssembler()) + + def allocate(self, *regs): + for reg,var in regs: + register = r.registers[reg] + self.reg_bindings[var] = register + self.free_regs = [fr for fr in self.free_regs if fr is not register] + +class TempInt(TempVar): + type = 'i' + def __repr__(self): + return "" % (id(self),) + +def temp_vars(count): + return [TempInt() for _ in range(count)] + +class TestRegalloc(object): + def setup_method(self, name): + self.rm = FakeRegalloc() + + def test_all_free(self): + a,b = temp_vars(2) + self.rm.force_allocate_reg_pair(a, b, []) + assert self.rm.reg_bindings[a] == r.r2 + assert self.rm.reg_bindings[b] == r.r3 + + def test_all_but_one_forbidden(self): + a,b,f1,f2,f3,f4,o = temp_vars(7) + self.rm.allocate((2,f1),(4,f2),(6,f3),(8,f4),(10,o)) + self.rm.force_allocate_reg_pair(a, b, [f1,f2,f3,f4]) + assert self.rm.reg_bindings[a] == r.r10 + assert self.rm.reg_bindings[b] == r.r11 + + def test_cannot_spill(self): + a,b,f1,f2,f3,f4,f5 = temp_vars(7) + self.rm.allocate((2,f1),(4,f2),(6,f3),(8,f4),(10,f5)) + with py.test.raises(NoVariableToSpill): + self.rm.force_allocate_reg_pair(a, b, [f1,f2,f3,f4,f5]) + + def test_all_but_one_forbidden_odd(self): + a,b,f1,f2,f3,f4,f5 = temp_vars(7) + self.rm.allocate((3,f1),(5,f2),(7,f3),(9,f4),(11,f5)) + self.rm.force_allocate_reg_pair(a, b, [f1,f3,f4,f5]) + assert self.rm.reg_bindings[a] == r.r4 + assert self.rm.reg_bindings[b] == r.r5 + + def test_ensure_reg_pair(self): + a,b,f1 = temp_vars(3) + self.rm.allocate((4,f1),(2,a)) + self.rm.temp_boxes = [f1] + re, ro = self.rm.ensure_even_odd_pair(a, b) + assert re == r.r6 + assert ro == r.r7 + assert re != self.rm.reg_bindings[a] + assert ro != self.rm.reg_bindings[a] + assert self.rm.assembler.move_count == 1 + + def test_ensure_reg_pair_bind_second(self): + a,b,f1,f2,f3,f4 = temp_vars(6) + self.rm.allocate((4,f1),(2,a),(6,f2),(8,f3),(10,f4)) + self.rm.temp_boxes = [f1,f2,f3,f4] + re, ro = self.rm.ensure_even_odd_pair(a, b, bind_first=False) + assert re == r.r2 + assert ro == r.r3 + assert ro == self.rm.reg_bindings[b] + assert a not in self.rm.reg_bindings + assert self.rm.assembler.move_count == 2 + +def run(inputargs, ops): + cpu = CPU(None, None) + cpu.setup_once() + loop = parse(ops, cpu, namespace=locals()) + looptoken = JitCellToken() + cpu.compile_loop(loop.inputargs, loop.operations, looptoken) + deadframe = cpu.execute_token(looptoken, *inputargs) + return cpu, deadframe + +def test_bug_rshift(): + cpu, deadframe = run([9], ''' + [i1] + i2 = int_add(i1, i1) + i3 = int_invert(i2) + i4 = uint_rshift(i1, 3) + i5 = int_add(i4, i3) + finish(i5) + ''') + assert cpu.get_int_value(deadframe, 0) == (9 >> 3) + (~18) + +def test_bug_int_is_true_1(): + cpu, deadframe = run([-10], ''' + [i1] + i2 = int_mul(i1, i1) + i3 = int_mul(i2, i1) + i5 = int_is_true(i2) + i4 = int_is_zero(i5) + guard_false(i5) [i4, i3] + finish(42) + ''') + assert cpu.get_int_value(deadframe, 0) == 0 + assert cpu.get_int_value(deadframe, 1) == -1000 + +def test_bug_0(): + cpu, deadframe = run([-13, 10, 10, 8, -8, -16, -18, 46, -12, 26], ''' + [i1, i2, i3, i4, i5, i6, i7, i8, i9, i10] + i11 = uint_gt(i3, -48) + i12 = int_xor(i8, i1) + i13 = int_gt(i6, -9) + i14 = int_le(i13, i2) + i15 = int_le(i11, i5) + i16 = uint_ge(i13, i13) + i17 = int_or(i9, -23) + i18 = int_lt(i10, i13) + i19 = int_or(i15, i5) + i20 = int_xor(i17, 54) + i21 = int_mul(i8, i10) + i22 = int_or(i3, i9) + i41 = int_and(i11, -4) + i42 = int_or(i41, 1) + i23 = int_mod(i12, i42) + i24 = int_is_true(i6) + i25 = uint_rshift(i15, 6) + i26 = int_or(-4, i25) + i27 = int_invert(i8) + i28 = int_sub(-113, i11) + i29 = int_neg(i7) + i30 = int_neg(i24) + i31 = int_floordiv(i3, 53) + i32 = int_mul(i28, i27) + i43 = int_and(i18, -4) + i44 = int_or(i43, 1) + i33 = int_mod(i26, i44) + i34 = int_or(i27, i19) + i35 = uint_lt(i13, 1) + i45 = int_and(i21, 31) + i36 = int_rshift(i21, i45) + i46 = int_and(i20, 31) + i37 = uint_rshift(i4, i46) + i38 = uint_gt(i33, -11) + i39 = int_neg(i7) + i40 = int_gt(i24, i32) + i99 = same_as_i(0) + guard_true(i99) [i40, i36, i37, i31, i16, i34, i35, i23, i22, i29, i14, i39, i30, i38] + finish(42) + ''') + assert cpu.get_int_value(deadframe, 0) == 0 + assert cpu.get_int_value(deadframe, 1) == 0 + assert cpu.get_int_value(deadframe, 2) == 0 + assert cpu.get_int_value(deadframe, 3) == 0 + assert cpu.get_int_value(deadframe, 4) == 1 + assert cpu.get_int_value(deadframe, 5) == -7 + assert cpu.get_int_value(deadframe, 6) == 1 + assert cpu.get_int_value(deadframe, 7) == 0 + assert cpu.get_int_value(deadframe, 8) == -2 + assert cpu.get_int_value(deadframe, 9) == 18 + assert cpu.get_int_value(deadframe, 10) == 1 + assert cpu.get_int_value(deadframe, 11) == 18 + assert cpu.get_int_value(deadframe, 12) == -1 + assert cpu.get_int_value(deadframe, 13) == 0 + +def test_bug_1(): + cpu, deadframe = run([17, -20, -6, 6, 1, 13, 13, 9, 49, 8], ''' + [i1, i2, i3, i4, i5, i6, i7, i8, i9, i10] + i11 = uint_lt(i6, 0) + i41 = int_and(i3, 31) + i12 = int_rshift(i3, i41) + i13 = int_neg(i2) + i14 = int_add(i11, i7) + i15 = int_or(i3, i2) + i16 = int_or(i12, i12) + i17 = int_ne(i2, i5) + i42 = int_and(i5, 31) + i18 = uint_rshift(i14, i42) + i43 = int_and(i14, 31) + i19 = int_lshift(7, i43) + i20 = int_neg(i19) + i21 = int_mod(i3, 1) + i22 = uint_ge(i15, i1) + i44 = int_and(i16, 31) + i23 = int_lshift(i8, i44) + i24 = int_is_true(i17) + i45 = int_and(i5, 31) + i25 = int_lshift(i14, i45) + i26 = int_lshift(i5, 17) + i27 = int_eq(i9, i15) + i28 = int_ge(0, i6) + i29 = int_neg(i15) + i30 = int_neg(i22) + i31 = int_add(i7, i16) + i32 = uint_lt(i19, i19) + i33 = int_add(i2, 1) + i34 = int_neg(i5) + i35 = int_add(i17, i24) + i36 = uint_lt(2, i16) + i37 = int_neg(i9) + i38 = int_gt(i4, i11) + i39 = int_lt(i27, i22) + i40 = int_neg(i27) + i99 = same_as_i(0) + guard_true(i99) [i40, i10, i36, i26, i13, i30, i21, i33, i18, i25, i31, i32, i28, i29, i35, i38, i20, i39, i34, i23, i37] + finish(-42) + ''') + assert cpu.get_int_value(deadframe, 0) == 0 + assert cpu.get_int_value(deadframe, 1) == 8 + assert cpu.get_int_value(deadframe, 2) == 1 + assert cpu.get_int_value(deadframe, 3) == 131072 + assert cpu.get_int_value(deadframe, 4) == 20 + assert cpu.get_int_value(deadframe, 5) == -1 + assert cpu.get_int_value(deadframe, 6) == 0 + assert cpu.get_int_value(deadframe, 7) == -19 + assert cpu.get_int_value(deadframe, 8) == 6 + assert cpu.get_int_value(deadframe, 9) == 26 + assert cpu.get_int_value(deadframe, 10) == 12 + assert cpu.get_int_value(deadframe, 11) == 0 + assert cpu.get_int_value(deadframe, 12) == 0 + assert cpu.get_int_value(deadframe, 13) == 2 + assert cpu.get_int_value(deadframe, 14) == 2 + assert cpu.get_int_value(deadframe, 15) == 1 + assert cpu.get_int_value(deadframe, 16) == -57344 + assert cpu.get_int_value(deadframe, 17) == 1 + assert cpu.get_int_value(deadframe, 18) == -1 + if WORD == 4: + assert cpu.get_int_value(deadframe, 19) == -2147483648 + elif WORD == 8: + assert cpu.get_int_value(deadframe, 19) == 19327352832 + assert cpu.get_int_value(deadframe, 20) == -49 + From pypy.commits at gmail.com Tue Feb 16 09:03:56 2016 From: pypy.commits at gmail.com (cfbolz) Date: Tue, 16 Feb 2016 06:03:56 -0800 (PST) Subject: [pypy-commit] pypy reorder-map-attributes: refactor: actually introduce a really elidable function (as opposed to a fake Message-ID: <56c32c4c.e906c20a.10a96.ffffbe24@mx.google.com> Author: Carl Friedrich Bolz Branch: reorder-map-attributes Changeset: r82281:b2cf73983594 Date: 2016-02-16 10:01 +0000 http://bitbucket.org/pypy/pypy/changeset/b2cf73983594/ Log: refactor: actually introduce a really elidable function (as opposed to a fake one) diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -188,14 +188,41 @@ obj._set_mapdict_map(self) obj._mapdict_write_storage(self.storageindex, w_value) + + @jit.elidable + def _find_branch_to_move_into(self, name, index): + # walk up the map chain to find an ancestor with lower order that + # already has the current name as a child inserted + current_order = sys.maxint + number_to_readd = 0 + current = self + key = (name, index) + while True: + attr = None + if current.cache_attrs is not None: + attr = current.cache_attrs.get(key, None) + if attr is None or attr.order > current_order: + # we reached the top, so we didn't find it anywhere, + # just add it to the top attribute + if not isinstance(current, PlainAttribute): + return 0, self._get_new_attr(name, index) + + else: + return number_to_readd, attr + # if not found try parent + number_to_readd += 1 + current_order = current.order + current = current.back + @jit.look_inside_iff(lambda self, obj, name, index, w_value: jit.isconstant(self) and jit.isconstant(name) and jit.isconstant(index)) def _reorder_and_add(self, obj, name, index, w_value): - # the idea is as follows: the subtrees of any map are ordered by insertion. - # the invariant is that subtrees that are inserted later must not contain - # the name of the attribute of any earlier inserted attribute anywhere + # the idea is as follows: the subtrees of any map are ordered by + # insertion. the invariant is that subtrees that are inserted later + # must not contain the name of the attribute of any earlier inserted + # attribute anywhere # m______ # inserted first / \ ... \ further attributes # attrname a 0/ 1\ n\ @@ -217,41 +244,23 @@ while True: current = self number_to_readd = 0 - current_order = sys.maxint - # walk up the map chain to find an ancestor with lower order that - # already has the current name as a child inserted - while True: - attr = current._get_cache_attr(name, index) - if attr is None or attr.order > current_order: - # we reached the top, so we didn't find it anywhere, - # just add it - if not isinstance(current, PlainAttribute): - self._add_attr_without_reordering(obj, name, index, w_value) - break - - # if not found try parent - else: - number_to_readd += 1 - current_order = current.order - current = current.back - else: - # we found the attributes further up, need to save the - # previous values of the attributes we passed - if number_to_readd: - if stack_maps is None: - stack_maps = [None] * self.length() - stack_values = [None] * self.length() - current = self - for i in range(number_to_readd): - assert isinstance(current, PlainAttribute) - w_self_value = obj._mapdict_read_storage( - current.storageindex) - stack_maps[stack_index] = current - stack_values[stack_index] = w_self_value - stack_index += 1 - current = current.back - attr._switch_map_and_write_storage(obj, w_value) - break + number_to_readd, attr = self._find_branch_to_move_into(name, index) + # we found the attributes further up, need to save the + # previous values of the attributes we passed + if number_to_readd: + if stack_maps is None: + stack_maps = [None] * self.length() + stack_values = [None] * self.length() + current = self + for i in range(number_to_readd): + assert isinstance(current, PlainAttribute) + w_self_value = obj._mapdict_read_storage( + current.storageindex) + stack_maps[stack_index] = current + stack_values[stack_index] = w_self_value + stack_index += 1 + current = current.back + attr._switch_map_and_write_storage(obj, w_value) if not stack_index: return From pypy.commits at gmail.com Tue Feb 16 09:03:58 2016 From: pypy.commits at gmail.com (cfbolz) Date: Tue, 16 Feb 2016 06:03:58 -0800 (PST) Subject: [pypy-commit] pypy reorder-map-attributes: only use one stack Message-ID: <56c32c4e.96941c0a.178ff.2447@mx.google.com> Author: Carl Friedrich Bolz Branch: reorder-map-attributes Changeset: r82282:b4c08e1a3819 Date: 2016-02-16 14:08 +0000 http://bitbucket.org/pypy/pypy/changeset/b4c08e1a3819/ Log: only use one stack diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -12,6 +12,11 @@ from pypy.objspace.std.typeobject import MutableCell +erase_item, unerase_item = rerased.new_erasing_pair("mapdict storage item") +erase_map, unerase_map = rerased.new_erasing_pair("map") +erase_list, unerase_list = rerased.new_erasing_pair("mapdict storage list") + + # ____________________________________________________________ # attribute shapes @@ -219,10 +224,10 @@ jit.isconstant(name) and jit.isconstant(index)) def _reorder_and_add(self, obj, name, index, w_value): - # the idea is as follows: the subtrees of any map are ordered by - # insertion. the invariant is that subtrees that are inserted later - # must not contain the name of the attribute of any earlier inserted - # attribute anywhere + # the idea is as follows: the subtrees of any map are ordered by + # insertion. the invariant is that subtrees that are inserted later + # must not contain the name of the attribute of any earlier inserted + # attribute anywhere # m______ # inserted first / \ ... \ further attributes # attrname a 0/ 1\ n\ @@ -235,11 +240,10 @@ # able to do that. They need to be re-added, which has to follow the # reordering procedure recusively. - # we store the to-be-readded attribute in stack_maps and stack_values - # those are lazily initialized to two lists large enough to store all - # current attributes - stack_maps = None - stack_values = None + # we store the to-be-readded attribute in the stack, with the map and + # the value paired up those are lazily initialized to a list large + # enough to store all current attributes + stack = None stack_index = 0 while True: current = self @@ -248,17 +252,16 @@ # we found the attributes further up, need to save the # previous values of the attributes we passed if number_to_readd: - if stack_maps is None: - stack_maps = [None] * self.length() - stack_values = [None] * self.length() + if stack is None: + stack = [erase_map(None)] * (self.length() * 2) current = self for i in range(number_to_readd): assert isinstance(current, PlainAttribute) w_self_value = obj._mapdict_read_storage( current.storageindex) - stack_maps[stack_index] = current - stack_values[stack_index] = w_self_value - stack_index += 1 + stack[stack_index] = erase_map(current) + stack[stack_index + 1] = erase_item(w_self_value) + stack_index += 2 current = current.back attr._switch_map_and_write_storage(obj, w_value) @@ -266,9 +269,9 @@ return # readd the current top of the stack - stack_index -= 1 - next_map = stack_maps[stack_index] - w_value = stack_values[stack_index] + stack_index -= 2 + next_map = unerase_map(stack[stack_index]) + w_value = unerase_item(stack[stack_index + 1]) name = next_map.name index = next_map.index self = obj._get_mapdict_map() @@ -641,9 +644,6 @@ memo_get_subclass_of_correct_size._annspecialcase_ = "specialize:memo" _subclass_cache = {} -erase_item, unerase_item = rerased.new_erasing_pair("mapdict storage item") -erase_list, unerase_list = rerased.new_erasing_pair("mapdict storage list") - def _make_subclass_size_n(supercls, n): from rpython.rlib import unroll rangen = unroll.unrolling_iterable(range(n)) From pypy.commits at gmail.com Tue Feb 16 10:59:40 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 07:59:40 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix Message-ID: <56c3476c.890bc30a.7fea6.ffffe2c7@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82283:fee405ccf63f Date: 2016-02-16 16:58 +0100 http://bitbucket.org/pypy/pypy/changeset/fee405ccf63f/ Log: fix diff --git a/pypy/module/cpyext/test/test_unicodeobject.py b/pypy/module/cpyext/test/test_unicodeobject.py --- a/pypy/module/cpyext/test/test_unicodeobject.py +++ b/pypy/module/cpyext/test/test_unicodeobject.py @@ -24,7 +24,7 @@ if(PyUnicode_GetSize(s) == 11) { result = 1; } - if(s->ob_type->tp_basicsize != sizeof(void*)*4) + if(s->ob_type->tp_basicsize != sizeof(void*)*5) result = 0; Py_DECREF(s); return PyBool_FromLong(result); From pypy.commits at gmail.com Tue Feb 16 11:05:15 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 08:05:15 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix tests Message-ID: <56c348bb.8205c20a.fec93.ffffe768@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82284:f2e9762cb0d3 Date: 2016-02-16 17:04 +0100 http://bitbucket.org/pypy/pypy/changeset/f2e9762cb0d3/ Log: fix tests diff --git a/pypy/module/cpyext/test/test_stringobject.py b/pypy/module/cpyext/test/test_stringobject.py --- a/pypy/module/cpyext/test/test_stringobject.py +++ b/pypy/module/cpyext/test/test_stringobject.py @@ -231,7 +231,9 @@ ref = make_ref(space, space.wrap('abc')) ptr = lltype.malloc(PyObjectP.TO, 1, flavor='raw') ptr[0] = ref + prev_refcnt = ref.c_ob_refcnt api.PyString_Concat(ptr, space.wrap('def')) + assert ref.c_ob_refcnt == prev_refcnt - 1 assert space.str_w(from_ref(space, ptr[0])) == 'abcdef' api.PyString_Concat(ptr, space.w_None) assert not ptr[0] @@ -244,14 +246,16 @@ ref2 = make_ref(space, space.wrap('def')) ptr = lltype.malloc(PyObjectP.TO, 1, flavor='raw') ptr[0] = ref1 + prev_refcnf = ref2.c_ob_refcnt api.PyString_ConcatAndDel(ptr, ref2) assert space.str_w(from_ref(space, ptr[0])) == 'abcdef' - assert ref2.c_ob_refcnt == 0 + assert ref2.c_ob_refcnt == prev_refcnf - 1 Py_DecRef(space, ptr[0]) ptr[0] = lltype.nullptr(PyObject.TO) ref2 = make_ref(space, space.wrap('foo')) + prev_refcnf = ref2.c_ob_refcnt api.PyString_ConcatAndDel(ptr, ref2) # should not crash - assert ref2.c_ob_refcnt == 0 + assert ref2.c_ob_refcnt == prev_refcnf - 1 lltype.free(ptr, flavor='raw') def test_format(self, space, api): From pypy.commits at gmail.com Tue Feb 16 11:33:26 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 08:33:26 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Next fix (rawrefcount debugging code takes weakrefs to the W_Root objects) Message-ID: <56c34f56.657bc20a.338d4.595c@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82285:860e2a46b04c Date: 2016-02-16 17:32 +0100 http://bitbucket.org/pypy/pypy/changeset/860e2a46b04c/ Log: Next fix (rawrefcount debugging code takes weakrefs to the W_Root objects) diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -27,7 +27,7 @@ class W_Root(object): """This is the abstract root class of all wrapped objects that live in a 'normal' object space like StdObjSpace.""" - __slots__ = () + __slots__ = ('__weakref__',) user_overridden_class = False def getdict(self, space): diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -911,7 +911,7 @@ def prepare(self, py_obj, w_obj): from pypy.module.cpyext.pyobject import track_reference - py_obj.c_ob_refcnt = 1 + py_obj.c_ob_refcnt = 1 # 1 for kept immortal track_reference(self.space, py_obj, w_obj) self.to_attach.append((py_obj, w_obj)) diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py --- a/pypy/module/cpyext/object.py +++ b/pypy/module/cpyext/object.py @@ -6,7 +6,7 @@ Py_GE, CONST_STRING, FILEP, fwrite) from pypy.module.cpyext.pyobject import ( PyObject, PyObjectP, create_ref, from_ref, Py_IncRef, Py_DecRef, - track_reference, get_typedescr, _Py_NewReference, RefcountState) + get_typedescr, _Py_NewReference, RefcountState) from pypy.module.cpyext.typeobject import PyTypeObjectPtr from pypy.module.cpyext.pyerrors import PyErr_NoMemory, PyErr_BadInternalCall from pypy.objspace.std.typeobject import W_TypeObject @@ -33,7 +33,7 @@ assert isinstance(w_type, W_TypeObject) typedescr = get_typedescr(w_type.instancetypedef) py_obj = typedescr.allocate(space, w_type, itemcount=itemcount) - py_obj.c_ob_refcnt = 0 + #py_obj.c_ob_refcnt = 0 --- will be set to 1 again by PyObject_Init{Var} if type.c_tp_itemsize == 0: w_obj = PyObject_Init(space, py_obj, type) else: diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -32,14 +32,15 @@ def allocate(self, space, w_type, itemcount=0): # similar to PyType_GenericAlloc? # except that it's not related to any pypy object. + # this returns a PyObject with ob_refcnt == 1. - pytype = make_ref(space, w_type) + pytype = as_pyobj(space, w_type) pytype = rffi.cast(PyTypeObjectPtr, pytype) assert pytype # Don't increase refcount for non-heaptypes flags = rffi.cast(lltype.Signed, pytype.c_tp_flags) - if not flags & Py_TPFLAGS_HEAPTYPE: - Py_DecRef(space, w_type) + if flags & Py_TPFLAGS_HEAPTYPE: + Py_IncRef(space, w_type) if pytype: size = pytype.c_tp_basicsize @@ -170,12 +171,22 @@ typedescr = get_typedescr(w_obj.typedef) py_obj = typedescr.allocate(space, w_type, itemcount=itemcount) track_reference(space, py_obj, w_obj) + # + # py_obj.c_ob_refcnt should be exactly REFCNT_FROM_PYPY + 1 here, + # and we want only REFCNT_FROM_PYPY, i.e. only count as attached + # to the W_Root but not with any reference from the py_obj side. + assert py_obj.c_ob_refcnt > rawrefcount.REFCNT_FROM_PYPY + py_obj.c_ob_refcnt -= 1 + # typedescr.attach(space, py_obj, w_obj) return py_obj def track_reference(space, py_obj, w_obj): """ Ties together a PyObject and an interpreter object. + The PyObject's refcnt is increased by REFCNT_FROM_PYPY. + The reference in 'py_obj' is not stolen! Remember to Py_DecRef() + it is you need to. """ # XXX looks like a PyObject_GC_TRACK assert py_obj.c_ob_refcnt < rawrefcount.REFCNT_FROM_PYPY @@ -226,7 +237,6 @@ py_obj = rawrefcount.from_obj(PyObject, w_obj) if not py_obj: py_obj = create_ref(space, w_obj) - #track_reference(space, py_obj, w_obj) -- included with create_ref() return py_obj else: return lltype.nullptr(PyObject.TO) diff --git a/pypy/module/cpyext/stringobject.py b/pypy/module/cpyext/stringobject.py --- a/pypy/module/cpyext/stringobject.py +++ b/pypy/module/cpyext/stringobject.py @@ -69,9 +69,9 @@ def new_empty_str(space, length): """ - Allocatse a PyStringObject and its buffer, but without a corresponding + Allocate a PyStringObject and its buffer, but without a corresponding interpreter object. The buffer may be mutated, until string_realize() is - called. + called. Refcount of the result is 1. """ typedescr = get_typedescr(space.w_str.instancetypedef) py_obj = typedescr.allocate(space, space.w_str) diff --git a/pypy/module/cpyext/tupleobject.py b/pypy/module/cpyext/tupleobject.py --- a/pypy/module/cpyext/tupleobject.py +++ b/pypy/module/cpyext/tupleobject.py @@ -52,7 +52,7 @@ """ Allocate a PyTupleObject and its array of PyObject *, but without a corresponding interpreter object. The array may be mutated, until - tuple_realize() is called. + tuple_realize() is called. Refcount of the result is 1. """ typedescr = get_typedescr(space.w_tuple.instancetypedef) py_obj = typedescr.allocate(space, space.w_tuple) diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -44,9 +44,9 @@ def new_empty_unicode(space, length): """ - Allocatse a PyUnicodeObject and its buffer, but without a corresponding + Allocate a PyUnicodeObject and its buffer, but without a corresponding interpreter object. The buffer may be mutated, until unicode_realize() is - called. + called. Refcount of the result is 1. """ typedescr = get_typedescr(space.w_unicode.instancetypedef) py_obj = typedescr.allocate(space, space.w_unicode) From pypy.commits at gmail.com Tue Feb 16 11:52:06 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 08:52:06 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix test Message-ID: <56c353b6.046f1c0a.665af.6df4@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82286:6e6a31a1290d Date: 2016-02-16 17:50 +0100 http://bitbucket.org/pypy/pypy/changeset/6e6a31a1290d/ Log: fix test diff --git a/pypy/module/cpyext/test/test_getargs.py b/pypy/module/cpyext/test/test_getargs.py --- a/pypy/module/cpyext/test/test_getargs.py +++ b/pypy/module/cpyext/test/test_getargs.py @@ -161,7 +161,9 @@ freed.append('x') raises(TypeError, pybuffer, freestring("string"), freestring("other string"), 42) - import gc; gc.collect() + self.debug_collect() # gc.collect() is not enough in this test: + # we need to check and free the PyObject + # linked to the freestring object as well assert freed == ['x', 'x'] From pypy.commits at gmail.com Tue Feb 16 11:52:55 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 08:52:55 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: same as 6e6a31a1290d Message-ID: <56c353e7.512f1c0a.9fdf3.5b49@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82287:b84df0662b97 Date: 2016-02-16 17:52 +0100 http://bitbucket.org/pypy/pypy/changeset/b84df0662b97/ Log: same as 6e6a31a1290d diff --git a/pypy/module/cpyext/test/test_borrow.py b/pypy/module/cpyext/test/test_borrow.py --- a/pypy/module/cpyext/test/test_borrow.py +++ b/pypy/module/cpyext/test/test_borrow.py @@ -65,4 +65,5 @@ ]) wr = module.run() # check that the set() object was deallocated + self.debug_collect() assert wr() is None From pypy.commits at gmail.com Tue Feb 16 11:54:39 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 08:54:39 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Less confusing for some tests to have the debug prints go to stderr Message-ID: <56c3544f.8e301c0a.b437a.6bdd@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82288:002c2fe86f8c Date: 2016-02-16 17:53 +0100 http://bitbucket.org/pypy/pypy/changeset/002c2fe86f8c/ Log: Less confusing for some tests to have the debug prints go to stderr diff --git a/rpython/rtyper/lltypesystem/ll2ctypes.py b/rpython/rtyper/lltypesystem/ll2ctypes.py --- a/rpython/rtyper/lltypesystem/ll2ctypes.py +++ b/rpython/rtyper/lltypesystem/ll2ctypes.py @@ -571,7 +571,7 @@ "double conversion from lltype to ctypes?") # XXX don't store here immortal structures if DEBUG_ALLOCATED: - print "LL2CTYPES:", hex(addr) + print >> sys.stderr, "LL2CTYPES:", hex(addr) ALLOCATED[addr] = self def _addressof_storage(self): @@ -585,7 +585,7 @@ # allow the ctypes object to go away now addr = ctypes.cast(self._storage, ctypes.c_void_p).value if DEBUG_ALLOCATED: - print "LL2C FREE:", hex(addr) + print >> sys.stderr, "LL2C FREE:", hex(addr) try: del ALLOCATED[addr] except KeyError: From pypy.commits at gmail.com Tue Feb 16 11:58:55 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 08:58:55 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: From RPython code, don't accept NULL (an lltype ptr typed "void *") in Message-ID: <56c3554f.48dcc20a.ffa8a.fffff39a@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82289:f782eb8ae5dd Date: 2016-02-16 17:58 +0100 http://bitbucket.org/pypy/pypy/changeset/f782eb8ae5dd/ Log: From RPython code, don't accept NULL (an lltype ptr typed "void *") in the "PyObject *" arguments. Use either a correctly-typed NULL, or simply say None diff --git a/pypy/module/cpyext/test/test_ndarrayobject.py b/pypy/module/cpyext/test/test_ndarrayobject.py --- a/pypy/module/cpyext/test/test_ndarrayobject.py +++ b/pypy/module/cpyext/test/test_ndarrayobject.py @@ -80,7 +80,7 @@ a0 = scalar(space) assert a0.get_scalar_value().value == 10. - a = api._PyArray_FromAny(a0, NULL, 0, 0, 0, NULL) + a = api._PyArray_FromAny(a0, None, 0, 0, 0, NULL) assert api._PyArray_NDIM(a) == 0 ptr = rffi.cast(rffi.DOUBLEP, api._PyArray_DATA(a)) @@ -88,10 +88,10 @@ def test_FromAny(self, space, api): a = array(space, [10, 5, 3]) - assert api._PyArray_FromAny(a, NULL, 0, 0, 0, NULL) is a - assert api._PyArray_FromAny(a, NULL, 1, 4, 0, NULL) is a + assert api._PyArray_FromAny(a, None, 0, 0, 0, NULL) is a + assert api._PyArray_FromAny(a, None, 1, 4, 0, NULL) is a self.raises(space, api, ValueError, api._PyArray_FromAny, - a, NULL, 4, 5, 0, NULL) + a, None, 4, 5, 0, NULL) def test_FromObject(self, space, api): a = array(space, [10, 5, 3]) From pypy.commits at gmail.com Tue Feb 16 12:19:10 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 09:19:10 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: fix test Message-ID: <56c35a0e.05e41c0a.48d42.72bd@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82290:5db4332316d0 Date: 2016-02-16 18:18 +0100 http://bitbucket.org/pypy/pypy/changeset/5db4332316d0/ Log: fix test diff --git a/pypy/module/cpyext/test/test_tupleobject.py b/pypy/module/cpyext/test/test_tupleobject.py --- a/pypy/module/cpyext/test/test_tupleobject.py +++ b/pypy/module/cpyext/test/test_tupleobject.py @@ -1,7 +1,6 @@ import py from pypy.module.cpyext.pyobject import PyObject, PyObjectP, make_ref, from_ref -from pypy.module.cpyext.pyobject import as_pyobj from pypy.module.cpyext.tupleobject import PyTupleObject from pypy.module.cpyext.test.test_api import BaseApiTest from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase @@ -25,7 +24,9 @@ ar = lltype.malloc(PyObjectP.TO, 1, flavor='raw') py_tuple = api.PyTuple_New(3) - rffi.cast(PyTupleObject, py_tuple).c_ob_item[0] = as_pyobj(space, w_42) + # inside py_tuple is an array of "PyObject *" items which each hold + # a reference + rffi.cast(PyTupleObject, py_tuple).c_ob_item[0] = make_ref(space, w_42) ar[0] = py_tuple api._PyTuple_Resize(ar, 2) w_tuple = from_ref(space, ar[0]) @@ -34,7 +35,7 @@ api.Py_DecRef(ar[0]) py_tuple = api.PyTuple_New(3) - rffi.cast(PyTupleObject, py_tuple).c_ob_item[0] = as_pyobj(space, w_42) + rffi.cast(PyTupleObject, py_tuple).c_ob_item[0] = make_ref(space, w_42) ar[0] = py_tuple api._PyTuple_Resize(ar, 10) w_tuple = from_ref(space, ar[0]) From pypy.commits at gmail.com Tue Feb 16 12:26:02 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 09:26:02 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Finally, remove the GOES_AWAY stubs left behind Message-ID: <56c35baa.41df1c0a.d1f8b.76cf@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82291:dfaf329934e4 Date: 2016-02-16 18:25 +0100 http://bitbucket.org/pypy/pypy/changeset/dfaf329934e4/ Log: Finally, remove the GOES_AWAY stubs left behind diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -3,7 +3,6 @@ cpython_api, CANNOT_FAIL, build_type_checkers, Py_ssize_t, Py_ssize_tP, CONST_STRING) from pypy.module.cpyext.pyobject import PyObject, PyObjectP, as_pyobj -from pypy.module.cpyext.pyobject import RefcountState from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.interpreter.error import OperationError from rpython.rlib.objectmodel import specialize diff --git a/pypy/module/cpyext/intobject.py b/pypy/module/cpyext/intobject.py --- a/pypy/module/cpyext/intobject.py +++ b/pypy/module/cpyext/intobject.py @@ -5,7 +5,7 @@ cpython_api, cpython_struct, build_type_checkers, bootstrap_function, PyObject, PyObjectFields, CONST_STRING, CANNOT_FAIL, Py_ssize_t) from pypy.module.cpyext.pyobject import ( - make_typedescr, track_reference, RefcountState, from_ref) + make_typedescr, track_reference, from_ref) from rpython.rlib.rarithmetic import r_uint, intmask, LONG_TEST, r_ulonglong from pypy.objspace.std.intobject import W_IntObject import sys @@ -38,8 +38,6 @@ w_obj = space.allocate_instance(W_IntObject, w_type) w_obj.__init__(intval) track_reference(space, obj, w_obj) - #state = space.fromcache(RefcountState) - #state.set_lifeline(w_obj, obj) return w_obj PyInt_Check, PyInt_CheckExact = build_type_checkers("Int") diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py --- a/pypy/module/cpyext/object.py +++ b/pypy/module/cpyext/object.py @@ -6,7 +6,7 @@ Py_GE, CONST_STRING, FILEP, fwrite) from pypy.module.cpyext.pyobject import ( PyObject, PyObjectP, create_ref, from_ref, Py_IncRef, Py_DecRef, - get_typedescr, _Py_NewReference, RefcountState) + get_typedescr, _Py_NewReference) from pypy.module.cpyext.typeobject import PyTypeObjectPtr from pypy.module.cpyext.pyerrors import PyErr_NoMemory, PyErr_BadInternalCall from pypy.objspace.std.typeobject import W_TypeObject diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -63,9 +63,6 @@ w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type)) w_obj = space.allocate_instance(self.W_BaseObject, w_type) track_reference(space, obj, w_obj) - #if w_type is not space.gettypefor(self.W_BaseObject): - # state = space.fromcache(RefcountState) - # state.set_lifeline(w_obj, obj) return w_obj typedescr_cache = {} @@ -160,14 +157,7 @@ Allocates a PyObject, and fills its fields with info from the given interpreter object. """ - #state = space.fromcache(RefcountState) w_type = space.type(w_obj) - #if w_type.is_cpytype(): - # py_obj = state.get_from_lifeline(w_obj) - # if py_obj: - # Py_IncRef(space, py_obj) - # return py_obj - typedescr = get_typedescr(w_obj.typedef) py_obj = typedescr.allocate(space, w_type, itemcount=itemcount) track_reference(space, py_obj, w_obj) @@ -350,11 +340,3 @@ @cpython_api([rffi.VOIDP], lltype.Signed, error=CANNOT_FAIL) def _Py_HashPointer(space, ptr): return rffi.cast(lltype.Signed, ptr) - - -class RefcountState: - def __init__(self, *args): - GOES_AWAY - -def borrow_from(container, borrowed): - GOES_AWAY diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -21,7 +21,7 @@ from pypy.module.cpyext.modsupport import convert_method_defs from pypy.module.cpyext.pyobject import ( PyObject, make_ref, create_ref, from_ref, get_typedescr, make_typedescr, - track_reference, RefcountState, Py_DecRef, as_pyobj) + track_reference, Py_DecRef, as_pyobj) from pypy.module.cpyext.slotdefs import ( slotdefs_for_tp_slots, slotdefs_for_wrappers, get_slot_tp_function) from pypy.module.cpyext.state import State From pypy.commits at gmail.com Tue Feb 16 12:27:34 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 09:27:34 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: ups Message-ID: <56c35c06.cf0b1c0a.8b231.7dc1@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82292:6846c7d63a8a Date: 2016-02-16 18:26 +0100 http://bitbucket.org/pypy/pypy/changeset/6846c7d63a8a/ Log: ups diff --git a/pypy/module/cpyext/test/test_borrow.py b/pypy/module/cpyext/test/test_borrow.py --- a/pypy/module/cpyext/test/test_borrow.py +++ b/pypy/module/cpyext/test/test_borrow.py @@ -1,7 +1,7 @@ import py from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase from pypy.module.cpyext.test.test_api import BaseApiTest -from pypy.module.cpyext.pyobject import make_ref, borrow_from, RefcountState +from pypy.module.cpyext.pyobject import make_ref class AppTestBorrow(AppTestCpythonExtensionBase): From pypy.commits at gmail.com Tue Feb 16 12:45:23 2016 From: pypy.commits at gmail.com (cfbolz) Date: Tue, 16 Feb 2016 09:45:23 -0800 (PST) Subject: [pypy-commit] pypy statistics-maps: log the module Message-ID: <56c36033.8ab71c0a.aacf4.7d74@mx.google.com> Author: Carl Friedrich Bolz Branch: statistics-maps Changeset: r82293:5d1a96a22de4 Date: 2016-02-13 21:40 +0100 http://bitbucket.org/pypy/pypy/changeset/5d1a96a22de4/ Log: log the module diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -108,6 +108,7 @@ elif isinstance(self, Terminator): if self.w_cls is not None: lines.append(_print_line('w_cls', self.w_cls.name, 1)) + lines.append(_print_line('w_cls_module', self.space.bytes_w(self.w_cls.get_module()), 1)) if self._number_reads: lines.append(' "reads": {') for key, value in self._number_reads.items(): From pypy.commits at gmail.com Tue Feb 16 12:45:25 2016 From: pypy.commits at gmail.com (cfbolz) Date: Tue, 16 Feb 2016 09:45:25 -0800 (PST) Subject: [pypy-commit] pypy statistics-maps: add some jit_debug ops to try to understand more closely what is happening Message-ID: <56c36035.a8c0c20a.f2b04.5ac1@mx.google.com> Author: Carl Friedrich Bolz Branch: statistics-maps Changeset: r82294:cf743d3a034e Date: 2016-02-16 18:44 +0100 http://bitbucket.org/pypy/pypy/changeset/cf743d3a034e/ Log: add some jit_debug ops to try to understand more closely what is happening diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -139,10 +139,12 @@ # XXX move to PlainAttribute? if jit.we_are_jitted(): if attr.can_fold_read_int(): + jit.jit_debug("map: folded read int", objectmodel.compute_unique_id(attr), attr.read_constant_int()) return W_IntObject(attr.read_constant_int()) elif attr.can_fold_read_obj(): w_res = attr.try_read_constant_obj() if w_res is not None: + jit.jit_debug("map: folded read int", objectmodel.compute_unique_id(attr)) return w_res if ( jit.isconstant(attr) and @@ -157,12 +159,15 @@ if cls is W_IntObject: # this means that the class stored in the storage is an # IntMutableCell + jit.jit_debug("map: known class int", objectmodel.compute_unique_id(attr)) assert isinstance(result, IntMutableCell) return W_IntObject(result.intvalue) if cls is W_FloatObject: # ditto + jit.jit_debug("map: known class float", objectmodel.compute_unique_id(attr)) assert isinstance(result, FloatMutableCell) return W_FloatObject(result.floatvalue) + jit.jit_debug("map: recorded exact class", objectmodel.compute_unique_id(attr)) jit.record_exact_class(result, cls) return attr._read_cell(result) @@ -601,7 +606,10 @@ self._set_mapdict_storage_and_map(new_obj.storage, new_obj.map) def _get_mapdict_map(self): - return jit.promote(self.map) + map = jit.promote(self.map) + jit.jit_debug("map: promoted map", objectmodel.compute_unique_id(self)) + return map + def _set_mapdict_map(self, map): old = self.map # don't count Object, it's just an intermediate From pypy.commits at gmail.com Tue Feb 16 12:49:58 2016 From: pypy.commits at gmail.com (fijal) Date: Tue, 16 Feb 2016 09:49:58 -0800 (PST) Subject: [pypy-commit] pypy jit-leaner-frontend: remove unused detect_cpu Message-ID: <56c36146.0cb81c0a.17fa0.7d49@mx.google.com> Author: fijal Branch: jit-leaner-frontend Changeset: r82295:5652c389a739 Date: 2016-02-16 18:49 +0100 http://bitbucket.org/pypy/pypy/changeset/5652c389a739/ Log: remove unused detect_cpu diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -39,10 +39,6 @@ "_csv", "cppyy", "_pypyjson", "_vmprof", ]) -#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') -# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): - # it's not enough that we get x86_64 -# working_modules.add('_vmprof') translation_modules = default_modules.copy() translation_modules.update([ diff --git a/rpython/jit/metainterp/opencoder.py b/rpython/jit/metainterp/opencoder.py --- a/rpython/jit/metainterp/opencoder.py +++ b/rpython/jit/metainterp/opencoder.py @@ -13,6 +13,8 @@ class TraceIterator(object): def __init__(self, trace, end): self.trace = trace + self.inputargs = [rop.inputarg_from_tp(arg.type) for + arg in self.trace.inputargs] self.pos = 0 self._count = 0 self.end = end @@ -20,7 +22,7 @@ def _get(self, i): if i < 0: - return self.trace.inputargs[-i - 1] + return self.inputargs[-i - 1] res = self._cache[i] assert res is not None return res diff --git a/rpython/jit/metainterp/optimizeopt/heap.py b/rpython/jit/metainterp/optimizeopt/heap.py --- a/rpython/jit/metainterp/optimizeopt/heap.py +++ b/rpython/jit/metainterp/optimizeopt/heap.py @@ -323,11 +323,11 @@ Optimization.emit_operation(self, op) def emitting_operation(self, op): - if OpHelpers.has_no_side_effect(op.opnum): + if rop.has_no_side_effect(op.opnum): return - if op.is_ovf(): + if rop.is_ovf(op.opnum): return - if op.is_guard(): + if rop.is_guard(op.opnum): self.optimizer.pendingfields = ( self.force_lazy_sets_for_guard()) return diff --git a/rpython/jit/metainterp/optimizeopt/optimizer.py b/rpython/jit/metainterp/optimizeopt/optimizer.py --- a/rpython/jit/metainterp/optimizeopt/optimizer.py +++ b/rpython/jit/metainterp/optimizeopt/optimizer.py @@ -10,8 +10,7 @@ from rpython.jit.metainterp.typesystem import llhelper from rpython.rlib.objectmodel import specialize, we_are_translated from rpython.rlib.debug import debug_print -from rpython.rlib.rarithmetic import r_uint -from rpython.jit.metainterp import opencoder +from rpython.jit.metainterp.optimize import SpeculativeError @@ -56,27 +55,23 @@ self.last_emitted_operation = op self.next_optimization.propagate_forward(op) - def getintbound(self, tagged): - tagged = self.get_box_replacement(tagged) - tag, val = opencoder.untag(tagged) - if tag == opencoder.TAGINT: - return ConstIntBound(val) - elif tag == opencoder.TAGCONST: - yyy - else: - assert tag == opencoder.TAGBOX - fw = self.optimizer.trace.get_info(self.optimizer.infos, val) + def getintbound(self, op): + assert op.type == 'i' + op = self.get_box_replacement(op) + if isinstance(op, ConstInt): + return ConstIntBound(op.getint()) + fw = op.get_forwarded() if fw is not None: if isinstance(fw, IntBound): return fw # rare case: fw might be a RawBufferPtrInfo return IntUnbounded() + assert op.type == 'i' intbound = IntBound(MININT, MAXINT) - self.optimizer.trace.set_info(self.optimizer.infos, val, intbound) + op.set_forwarded(intbound) return intbound def setintbound(self, op, bound): - xxx assert op.type == 'i' op = self.get_box_replacement(op) if op.is_constant(): @@ -344,20 +339,12 @@ if self.get_box_replacement(op).is_constant(): return info.FloatConstInfo(self.get_box_replacement(op)) - def get_box_replacement(self, tagged): - # tagged -> tagged - while True: - tag, v = opencoder.untag(tagged) - if tag != opencoder.TAGBOX: - return tagged - opnum = self.trace._ops[v] - if opnum >= 0: - return tagged - tagged = -opnum - 1 + def get_box_replacement(self, op): + if op is None: + return op + return op.get_box_replacement() def force_box(self, op, optforce=None): - # XXX - return op op = self.get_box_replacement(op) if optforce is None: optforce = self @@ -381,13 +368,6 @@ return True return op in self.inparg_dict - def is_constant(self, tagged): - tagged = self.get_box_replacement(tagged) - tag, value = opencoder.untag(tagged) - if tag == opencoder.TAGINT or tag == opencoder.TAGCONST: - return True - return False - def get_constant_box(self, box): box = self.get_box_replacement(box) if isinstance(box, Const): @@ -418,16 +398,7 @@ else: op.set_forwarded(newop) - def replace_op_with(self, op, newopnum, args=None, descr=None, output=False): - # recorded_op -> tagged - if not output: - newtag = self.trace.record_op_tag(newopnum, args, descr) - else: - newtag = self.output.record_op_output_tag(newopnum, args, descr) - self.trace.record_forwarding(op, newtag) - # XXX info forwarding - return newtag - + def replace_op_with(self, op, newopnum, args=None, descr=None): newop = op.copy_and_change(newopnum, args, descr) if newop.type != 'v': op = self.get_box_replacement(op) @@ -534,32 +505,25 @@ else: return CONST_0 - def propagate_all_forward(self, trace, call_pure_results=None, - rename_inputargs=True, flush=True): - self.output = opencoder.Trace([]) # <- XXXX, put inputargs - self.infos = [None] * trace._count - self.trace = trace - #if rename_inputargs: - # newargs = [] - # for inparg in inputargs: - # new_arg = OpHelpers.inputarg_from_tp(inparg.type) - # inparg.set_forwarded(new_arg) - # newargs.append(new_arg) - # self.init_inparg_dict_from(newargs) - #else: - # newargs = inputargs + def propagate_all_forward(self, trace, call_pure_results=None, flush=True): + trace = trace.get_iter() self.call_pure_results = call_pure_results - #if ops[-1].getopnum() in (rop.FINISH, rop.JUMP): - # last = len(ops) - 1 - # extra_jump = True - #else: - # extra_jump = False - # last = len(ops) - trace_iter = trace.get_iter() - while not trace_iter.done(): - op = trace_iter.next() + while not trace.done(): self._really_emitted_operation = None - self.first_optimization.propagate_forward(op) + op = trace.next() + if op.getopnum() in (rop.FINISH, rop.JUMP): + xxx + self.first_optimization.propagate_forward(trace.next()) + xxxx + if ops[-1].getopnum() in (rop.FINISH, rop.JUMP): + last = len(ops) - 1 + extra_jump = True + else: + extra_jump = False + last = len(ops) + for i in range(last): + self._really_emitted_operation = None + self.first_optimization.propagate_forward(ops[i]) # accumulate counters if flush: self.flush() @@ -582,34 +546,31 @@ dispatch_opt(self, op) def emit_operation(self, op): - if rop.returns_bool_result(op.opnum): + if op.returns_bool_result(): self.getintbound(op).make_bool() - tagged_op = self._emit_operation(op) - # XXX what is this about? looks pretty optional - #if op.type == 'i': - # opinfo = op.get_forwarded() - # if opinfo is not None: - # assert isinstance(opinfo, IntBound) - # if opinfo.is_constant(): - # op.set_forwarded(ConstInt(opinfo.getint())) + self._emit_operation(op) + op = self.get_box_replacement(op) + if op.type == 'i': + opinfo = op.get_forwarded() + if opinfo is not None: + assert isinstance(opinfo, IntBound) + if opinfo.is_constant(): + op.set_forwarded(ConstInt(opinfo.getint())) @specialize.argtype(0) def _emit_operation(self, op): - assert not rop.is_call_pure(op.opnum) + assert not op.is_call_pure() orig_op = op - tagged = self.get_box_replacement(op.get_tag()) - if self.is_constant(tagged): + op = self.get_box_replacement(op) + if op.is_constant(): return # can happen e.g. if we postpone the operation that becomes # constant - arglist = op.getarglist() - for i in range(len(arglist)): - arglist[i] = self.force_box(arglist[i]) - opnum = op.opnum - tagged_op = self.replace_op_with(op, opnum, arglist, op.getdescr(), - output=True) + op = self.replace_op_with(op, op.getopnum()) + for i in range(op.numargs()): + arg = self.force_box(op.getarg(i)) + op.setarg(i, arg) self.metainterp_sd.profiler.count(jitprof.Counters.OPT_OPS) - if rop.is_guard(opnum): - xxx + if rop.is_guard(op.opnum): assert isinstance(op, GuardResOp) self.metainterp_sd.profiler.count(jitprof.Counters.OPT_GUARDS) pendingfields = self.pendingfields @@ -620,16 +581,15 @@ return else: op = self.emit_guard_operation(op, pendingfields) - elif rop.can_raise(opnum): + elif op.can_raise(): self.exception_might_have_happened = True - #if ((op.has_no_side_effect() or op.is_guard() or op.is_jit_debug() or - # op.is_ovf()) and not self.is_call_pure_pure_canraise(op)): - # pass - #else: - # self._last_guard_op = None + if ((op.has_no_side_effect() or op.is_guard() or op.is_jit_debug() or + op.is_ovf()) and not self.is_call_pure_pure_canraise(op)): + pass + else: + self._last_guard_op = None self._really_emitted_operation = op - #self._newoperations.append(op) - return tagged_op + self._newoperations.append(op) def emit_guard_operation(self, op, pendingfields): guard_op = self.replace_op_with(op, op.getopnum()) diff --git a/rpython/jit/metainterp/resoperation.py b/rpython/jit/metainterp/resoperation.py --- a/rpython/jit/metainterp/resoperation.py +++ b/rpython/jit/metainterp/resoperation.py @@ -332,7 +332,7 @@ descr = self.getdescr() if descr is DONT_CHANGE: descr = None - newop = ResOperation(opnum, args, descr) + newop = ResOperation(opnum, args, -1, descr) if self.type != 'v': newop.copy_value_from(self) return newop @@ -1404,19 +1404,23 @@ def is_comparison(opnum): return rop.is_always_pure(opnum) and rop.returns_bool_result(opnum) - def is_foldable_guard(self): - return rop._GUARD_FOLDABLE_FIRST <= self.getopnum() <= rop._GUARD_FOLDABLE_LAST + @staticmethod + def is_foldable_guard(opnum): + return rop._GUARD_FOLDABLE_FIRST <= opnum <= rop._GUARD_FOLDABLE_LAST - def is_guard_exception(self): - return (self.getopnum() == rop.GUARD_EXCEPTION or - self.getopnum() == rop.GUARD_NO_EXCEPTION) + @staticmethod + def is_guard_exception(opnum): + return (opnum == rop.GUARD_EXCEPTION or + opnum == rop.GUARD_NO_EXCEPTION) - def is_guard_overflow(self): - return (self.getopnum() == rop.GUARD_OVERFLOW or - self.getopnum() == rop.GUARD_NO_OVERFLOW) + @staticmethod + def is_guard_overflow(opnum): + return (opnum == rop.GUARD_OVERFLOW or + opnum == rop.GUARD_NO_OVERFLOW) - def is_jit_debug(self): - return rop._JIT_DEBUG_FIRST <= self.getopnum() <= rop._JIT_DEBUG_LAST + @staticmethod + def is_jit_debug(opnum): + return rop._JIT_DEBUG_FIRST <= opnum <= rop._JIT_DEBUG_LAST @staticmethod def is_always_pure(opnum): @@ -1465,8 +1469,8 @@ opnum == rop.CALL_ASSEMBLER_N or opnum == rop.CALL_ASSEMBLER_F) - def is_call_may_force(self): - opnum = self.opnum + @staticmethod + def is_call_may_force(opnum): return (opnum == rop.CALL_MAY_FORCE_I or opnum == rop.CALL_MAY_FORCE_R or opnum == rop.CALL_MAY_FORCE_N or diff --git a/rpython/jit/metainterp/test/test_opencoder.py b/rpython/jit/metainterp/test/test_opencoder.py --- a/rpython/jit/metainterp/test/test_opencoder.py +++ b/rpython/jit/metainterp/test/test_opencoder.py @@ -4,33 +4,20 @@ from rpython.jit.metainterp.history import ConstInt from rpython.jit.metainterp.optimizeopt.optimizer import Optimizer -class SimpleOptimizer(Optimizer): - class metainterp_sd: - class profiler: - @staticmethod - def count(*args): - pass - - def __init__(self, trace): - self.trace = trace - self.optimizer = self # uh? - self.infos = [None] * trace._count - self.output = Trace([]) - class TestOpencoder(object): def unpack(self, t): iter = t.get_iter() l = [] while not iter.done(): l.append(iter.next()) - return l + return iter.inputargs, l def test_simple_iterator(self): i0, i1 = InputArgInt(), InputArgInt() t = Trace([i0, i1]) add = t.record_op(rop.INT_ADD, [i0, i1]) t.record_op(rop.INT_ADD, [add, ConstInt(1)]) - l = self.unpack(t) + (i0, i1), l = self.unpack(t) assert len(l) == 2 assert l[0].opnum == rop.INT_ADD assert l[1].opnum == rop.INT_ADD @@ -38,3 +25,9 @@ assert l[1].getarg(0) is l[0] assert l[0].getarg(0) is i0 assert l[0].getarg(1) is i1 + + def test_rd_snapshot(self): + i0, i1 = InputArgInt(), InputArgInt() + t = Trace([i0, i1]) + add = t.record_op(rop.INT_ADD, [i0, i1]) + guard_op = t.record_op(rop.GUARD_FALSE, [add]) \ No newline at end of file diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -7,8 +7,6 @@ from rpython.rtyper.tool import rffi_platform as platform from rpython.rlib import rthread -from rpython.jit.backend import detect_cpu - class VMProfPlatformUnsupported(Exception): pass From pypy.commits at gmail.com Tue Feb 16 12:53:54 2016 From: pypy.commits at gmail.com (fijal) Date: Tue, 16 Feb 2016 09:53:54 -0800 (PST) Subject: [pypy-commit] pypy default: skip _vmprof on non-x86 platforms Message-ID: <56c36232.a185c20a.c9326.ffff9e15@mx.google.com> Author: fijal Branch: Changeset: r82296:a97d68a0f18d Date: 2016-02-16 18:53 +0100 http://bitbucket.org/pypy/pypy/changeset/a97d68a0f18d/ Log: skip _vmprof on non-x86 platforms diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -36,13 +36,13 @@ "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", - "_csv", "cppyy", "_pypyjson", "_vmprof", + "_csv", "cppyy", "_pypyjson", ]) -#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') -# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): - # it's not enough that we get x86_64 -# working_modules.add('_vmprof') +from rpython.jit.backend import detect_cpu +if detect_cpu.startswith('x86'): + working_modules.add('_vmprof') + translation_modules = default_modules.copy() translation_modules.update([ diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -7,8 +7,6 @@ from rpython.rtyper.tool import rffi_platform as platform from rpython.rlib import rthread -from rpython.jit.backend import detect_cpu - class VMProfPlatformUnsupported(Exception): pass From pypy.commits at gmail.com Tue Feb 16 12:57:03 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 16 Feb 2016 09:57:03 -0800 (PST) Subject: [pypy-commit] pypy default: fix Message-ID: <56c362ef.2aacc20a.40ae0.22b6@mx.google.com> Author: Armin Rigo Branch: Changeset: r82297:da5cd52da2f8 Date: 2016-02-16 18:56 +0100 http://bitbucket.org/pypy/pypy/changeset/da5cd52da2f8/ Log: fix diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -40,8 +40,11 @@ ]) from rpython.jit.backend import detect_cpu -if detect_cpu.startswith('x86'): - working_modules.add('_vmprof') +try: + if detect_cpu.autodetect().startswith('x86'): + working_modules.add('_vmprof') +except detect_cpu.ProcessorAutodetectError: + pass translation_modules = default_modules.copy() From pypy.commits at gmail.com Tue Feb 16 13:14:03 2016 From: pypy.commits at gmail.com (rlamy) Date: Tue, 16 Feb 2016 10:14:03 -0800 (PST) Subject: [pypy-commit] pypy llimpl: SomeExternalFunction does not subclass SomeBuiltin any more Message-ID: <56c366eb.01adc20a.cbec7.1df7@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82298:25d639caec04 Date: 2016-02-16 05:02 +0000 http://bitbucket.org/pypy/pypy/changeset/25d639caec04/ Log: SomeExternalFunction does not subclass SomeBuiltin any more diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -1,9 +1,12 @@ +from rpython.annotator.model import unionof, SomeObject +from rpython.annotator.signature import annotation, SignatureError from rpython.rtyper.extregistry import ExtRegistryEntry -from rpython.rtyper.lltypesystem.lltype import typeOf, FuncType, functionptr, _ptr -from rpython.annotator.model import unionof, SomeBuiltin -from rpython.annotator.signature import annotation, SignatureError +from rpython.rtyper.lltypesystem.lltype import ( + typeOf, FuncType, functionptr, _ptr) +from rpython.rtyper.error import TyperError +from rpython.rtyper.rbuiltin import BuiltinFunctionRepr -class SomeExternalFunction(SomeBuiltin): +class SomeExternalFunction(SomeObject): def __init__(self, name, args_s, s_result): self.name = name self.args_s = args_s @@ -20,21 +23,31 @@ for i, s_param in enumerate(params_s): arg = unionof(args_s[i], s_param) if not s_param.contains(arg): - raise SignatureError("In call to external function %r:\n" - "arg %d must be %s,\n" - " got %s" % ( - self.name, i+1, s_param, args_s[i])) + raise SignatureError( + "In call to external function %r:\n" + "arg %d must be %s,\n" + " got %s" % ( + self.name, i + 1, s_param, args_s[i])) def call(self, callspec): self.check_args(callspec) return self.s_result + def rtyper_makerepr(self, rtyper): + if not self.is_constant(): + raise TyperError("Non-constant external function!") + return BuiltinFunctionRepr(self.const) + + def rtyper_makekey(self): + return self.__class__, self.const + + class ExtFuncEntry(ExtRegistryEntry): safe_not_sandboxed = False def compute_annotation(self): s_result = SomeExternalFunction( - self.name, self.signature_args, self.signature_result) + self.name, self.signature_args, self.signature_result) if (self.bookkeeper.annotator.translator.config.translation.sandbox and not self.safe_not_sandboxed): s_result.needs_sandboxing = True From pypy.commits at gmail.com Tue Feb 16 13:14:05 2016 From: pypy.commits at gmail.com (rlamy) Date: Tue, 16 Feb 2016 10:14:05 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Create ExternalFunctionRepr Message-ID: <56c366ed.878e1c0a.b486f.ffff9335@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82299:24d569eefdf9 Date: 2016-02-16 18:13 +0000 http://bitbucket.org/pypy/pypy/changeset/24d569eefdf9/ Log: Create ExternalFunctionRepr diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -1,10 +1,10 @@ from rpython.annotator.model import unionof, SomeObject from rpython.annotator.signature import annotation, SignatureError -from rpython.rtyper.extregistry import ExtRegistryEntry +from rpython.rtyper.extregistry import ExtRegistryEntry, lookup from rpython.rtyper.lltypesystem.lltype import ( - typeOf, FuncType, functionptr, _ptr) + typeOf, FuncType, functionptr, _ptr, Void) from rpython.rtyper.error import TyperError -from rpython.rtyper.rbuiltin import BuiltinFunctionRepr +from rpython.rtyper.rmodel import Repr class SomeExternalFunction(SomeObject): def __init__(self, name, args_s, s_result): @@ -36,10 +36,58 @@ def rtyper_makerepr(self, rtyper): if not self.is_constant(): raise TyperError("Non-constant external function!") - return BuiltinFunctionRepr(self.const) + entry = lookup(self.const) + impl = getattr(entry, 'lltypeimpl', None) + fakeimpl = getattr(entry, 'lltypefakeimpl', None) + return ExternalFunctionRepr(self, impl, fakeimpl) def rtyper_makekey(self): - return self.__class__, self.const + return self.__class__, self + +class ExternalFunctionRepr(Repr): + lowleveltype = Void + + def __init__(self, s_func, impl, fakeimpl): + self.s_func = s_func + self.impl = impl + self.fakeimpl = fakeimpl + + def rtype_simple_call(self, hop): + rtyper = hop.rtyper + args_r = [rtyper.getrepr(s_arg) for s_arg in self.s_func.args_s] + r_result = rtyper.getrepr(self.s_func.s_result) + obj = self.get_funcptr(rtyper, args_r, r_result) + hop2 = hop.copy() + hop2.r_s_popfirstarg() + vlist = [hop2.inputconst(typeOf(obj), obj)] + hop2.inputargs(*args_r) + hop2.exception_is_here() + return hop2.genop('direct_call', vlist, r_result) + + def get_funcptr(self, rtyper, args_r, r_result): + from rpython.rtyper.rtyper import llinterp_backend + args_ll = [r_arg.lowleveltype for r_arg in args_r] + ll_result = r_result.lowleveltype + name = self.s_func.name + fakeimpl = getattr(self, 'lltypefakeimpl', self.s_func.const) + if self.impl: + if self.fakeimpl and rtyper.backend is llinterp_backend: + FT = FuncType(args_ll, ll_result) + return functionptr( + FT, name, _external_name=name, _callable=fakeimpl) + elif isinstance(self.impl, _ptr): + return self.impl + else: + # store some attributes to the 'impl' function, where + # the eventual call to rtyper.getcallable() will find them + # and transfer them to the final lltype.functionptr(). + self.impl._llfnobjattrs_ = {'_name': name} + return rtyper.getannmixlevel().delayedfunction( + self.impl, self.s_func.args_s, self.s_func.s_result) + else: + fakeimpl = self.fakeimpl or self.s_func.const + FT = FuncType(args_ll, ll_result) + return functionptr( + FT, name, _external_name=name, _callable=fakeimpl) class ExtFuncEntry(ExtRegistryEntry): @@ -53,42 +101,6 @@ s_result.needs_sandboxing = True return s_result - def specialize_call(self, hop): - rtyper = hop.rtyper - args_r = [rtyper.getrepr(s_arg) for s_arg in self.signature_args] - r_result = rtyper.getrepr(self.signature_result) - obj = self.get_funcptr(rtyper, args_r, r_result) - vlist = [hop.inputconst(typeOf(obj), obj)] + hop.inputargs(*args_r) - hop.exception_is_here() - return hop.genop('direct_call', vlist, r_result) - - def get_funcptr(self, rtyper, args_r, r_result): - from rpython.rtyper.rtyper import llinterp_backend - args_ll = [r_arg.lowleveltype for r_arg in args_r] - ll_result = r_result.lowleveltype - impl = getattr(self, 'lltypeimpl', None) - fakeimpl = getattr(self, 'lltypefakeimpl', self.instance) - if impl: - if hasattr(self, 'lltypefakeimpl') and rtyper.backend is llinterp_backend: - FT = FuncType(args_ll, ll_result) - return functionptr( - FT, self.name, _external_name=self.name, - _callable=fakeimpl) - elif isinstance(impl, _ptr): - return impl - else: - # store some attributes to the 'impl' function, where - # the eventual call to rtyper.getcallable() will find them - # and transfer them to the final lltype.functionptr(). - impl._llfnobjattrs_ = {'_name': self.name} - return rtyper.getannmixlevel().delayedfunction( - impl, self.signature_args, self.signature_result) - else: - FT = FuncType(args_ll, ll_result) - return functionptr( - FT, self.name, _external_name=self.name, _callable=fakeimpl, - _safe_not_sandboxed=self.safe_not_sandboxed) - def register_external(function, args, result=None, export_name=None, llimpl=None, llfakeimpl=None, sandboxsafe=False): From pypy.commits at gmail.com Wed Feb 17 15:14:02 2016 From: pypy.commits at gmail.com (arigo) Date: Wed, 17 Feb 2016 12:14:02 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Add PYPY_VERSION_NUM Message-ID: <56c4d48a.c8ac1c0a.62b31.ffff9428@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82313:6fb202f87513 Date: 2016-02-17 21:13 +0100 http://bitbucket.org/pypy/pypy/changeset/6fb202f87513/ Log: Add PYPY_VERSION_NUM diff --git a/pypy/module/cpyext/include/patchlevel.h b/pypy/module/cpyext/include/patchlevel.h --- a/pypy/module/cpyext/include/patchlevel.h +++ b/pypy/module/cpyext/include/patchlevel.h @@ -30,6 +30,7 @@ /* PyPy version as a string */ #define PYPY_VERSION "4.1.0-alpha0" +#define PYPY_VERSION_NUM 0x04010000 /* Defined to mean a PyPy where cpyext holds more regular references to PyObjects, e.g. staying alive as long as the internal PyPy object diff --git a/pypy/module/cpyext/test/test_version.py b/pypy/module/cpyext/test/test_version.py --- a/pypy/module/cpyext/test/test_version.py +++ b/pypy/module/cpyext/test/test_version.py @@ -23,6 +23,7 @@ PyModule_AddIntConstant(m, "py_minor_version", PY_MINOR_VERSION); PyModule_AddIntConstant(m, "py_micro_version", PY_MICRO_VERSION); PyModule_AddStringConstant(m, "pypy_version", PYPY_VERSION); + PyModule_AddIntConstant(m, "pypy_version_num", PYPY_VERSION_NUM); } """ module = self.import_module(name='foo', init=init) @@ -35,3 +36,6 @@ if v.releaselevel != 'final': s += '-%s%d' % (v[3], v[4]) assert module.pypy_version == s + assert module.pypy_version_num == ((v[0] << 24) | + (v[1] << 16) | + (v[2] << 8)) From pypy.commits at gmail.com Wed Feb 17 15:18:23 2016 From: pypy.commits at gmail.com (arigo) Date: Wed, 17 Feb 2016 12:18:23 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Change the ".pypy-26.so" to ".pypy-41.so". This will require recompilation Message-ID: <56c4d58f.0772c20a.d5b22.2135@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82314:3613837f04a5 Date: 2016-02-17 21:17 +0100 http://bitbucket.org/pypy/pypy/changeset/3613837f04a5/ Log: Change the ".pypy-26.so" to ".pypy-41.so". This will require recompilation also of cffi modules, but unless someone has got a good idea, I think it's the least damaging solution. diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -38,7 +38,7 @@ # and cffi so's. If we do have to update it, we'd likely need a way to # split the two usages again. #DEFAULT_SOABI = 'pypy-%d%d' % PYPY_VERSION[:2] -DEFAULT_SOABI = 'pypy-26' +DEFAULT_SOABI = 'pypy-41' @specialize.memo() def get_so_extension(space): From pypy.commits at gmail.com Wed Feb 17 17:59:56 2016 From: pypy.commits at gmail.com (rlamy) Date: Wed, 17 Feb 2016 14:59:56 -0800 (PST) Subject: [pypy-commit] pypy llimpl: Close branch before merging Message-ID: <56c4fb6c.41dfc20a.937e1.4d02@mx.google.com> Author: Ronan Lamy Branch: llimpl Changeset: r82315:691635e5fdfb Date: 2016-02-17 22:56 +0000 http://bitbucket.org/pypy/pypy/changeset/691635e5fdfb/ Log: Close branch before merging From pypy.commits at gmail.com Wed Feb 17 17:59:58 2016 From: pypy.commits at gmail.com (rlamy) Date: Wed, 17 Feb 2016 14:59:58 -0800 (PST) Subject: [pypy-commit] pypy default: Merge branch 'llimpl' Message-ID: <56c4fb6e.82561c0a.32111.05bc@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82316:4c30448f0457 Date: 2016-02-17 22:59 +0000 http://bitbucket.org/pypy/pypy/changeset/4c30448f0457/ Log: Merge branch 'llimpl' Refactor register_external(), remove running_on_llinterp mechanism and apply sandbox transform on externals at the end of annotation. diff --git a/rpython/annotator/policy.py b/rpython/annotator/policy.py --- a/rpython/annotator/policy.py +++ b/rpython/annotator/policy.py @@ -3,6 +3,9 @@ from rpython.annotator.specialize import ( specialize_argvalue, specialize_argtype, specialize_arglistitemtype, specialize_arg_or_var, memo, specialize_call_location) +from rpython.flowspace.operation import op +from rpython.flowspace.model import Constant +from rpython.annotator.model import SomeTuple class AnnotatorPolicy(object): @@ -64,7 +67,34 @@ return LowLevelAnnotatorPolicy.specialize__ll_and_arg(*args) def no_more_blocks_to_annotate(pol, annotator): + bk = annotator.bookkeeper # hint to all pending specializers that we are done - for callback in annotator.bookkeeper.pending_specializations: + for callback in bk.pending_specializations: callback() - del annotator.bookkeeper.pending_specializations[:] + del bk.pending_specializations[:] + if annotator.added_blocks is not None: + all_blocks = annotator.added_blocks + else: + all_blocks = annotator.annotated + for block in list(all_blocks): + for i, instr in enumerate(block.operations): + if not isinstance(instr, (op.simple_call, op.call_args)): + continue + v_func = instr.args[0] + s_func = annotator.annotation(v_func) + if not hasattr(s_func, 'needs_sandboxing'): + continue + key = ('sandboxing', s_func.const) + if key not in bk.emulated_pbc_calls: + params_s = s_func.args_s + s_result = s_func.s_result + from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline + sandbox_trampoline = make_sandbox_trampoline( + s_func.name, params_s, s_result) + sandbox_trampoline._signature_ = [SomeTuple(items=params_s)], s_result + bk.emulate_pbc_call(key, bk.immutablevalue(sandbox_trampoline), params_s) + else: + s_trampoline = bk.emulated_pbc_calls[key][0] + sandbox_trampoline = s_trampoline.const + new = instr.replace({instr.args[0]: Constant(sandbox_trampoline)}) + block.operations[i] = new diff --git a/rpython/annotator/unaryop.py b/rpython/annotator/unaryop.py --- a/rpython/annotator/unaryop.py +++ b/rpython/annotator/unaryop.py @@ -113,8 +113,9 @@ @op.simple_call.register(SomeObject) def simple_call_SomeObject(annotator, func, *args): - return annotator.annotation(func).call( - simple_args([annotator.annotation(arg) for arg in args])) + s_func = annotator.annotation(func) + argspec = simple_args([annotator.annotation(arg) for arg in args]) + return s_func.call(argspec) @op.call_args.register_transform(SomeObject) def transform_varargs(annotator, v_func, v_shape, *data_v): diff --git a/rpython/memory/gctransform/test/test_transform.py b/rpython/memory/gctransform/test/test_transform.py --- a/rpython/memory/gctransform/test/test_transform.py +++ b/rpython/memory/gctransform/test/test_transform.py @@ -5,6 +5,7 @@ from rpython.translator.exceptiontransform import ExceptionTransformer from rpython.rtyper.lltypesystem import lltype from rpython.conftest import option +from rpython.rtyper.rtyper import llinterp_backend class LLInterpedTranformerTests: @@ -131,8 +132,10 @@ def rtype(func, inputtypes, specialize=True): t = TranslationContext() t.buildannotator().build_types(func, inputtypes) + rtyper = t.buildrtyper() + rtyper.backend = llinterp_backend if specialize: - t.buildrtyper().specialize() + rtyper.specialize() if option.view: t.view() return t diff --git a/rpython/memory/test/test_transformed_gc.py b/rpython/memory/test/test_transformed_gc.py --- a/rpython/memory/test/test_transformed_gc.py +++ b/rpython/memory/test/test_transformed_gc.py @@ -14,6 +14,7 @@ from rpython.conftest import option from rpython.rlib.rstring import StringBuilder from rpython.rlib.rarithmetic import LONG_BIT +from rpython.rtyper.rtyper import llinterp_backend WORD = LONG_BIT // 8 @@ -29,9 +30,11 @@ t.config.set(**extraconfigopts) ann = t.buildannotator() ann.build_types(func, inputtypes) + rtyper = t.buildrtyper() + rtyper.backend = llinterp_backend if specialize: - t.buildrtyper().specialize() + rtyper.specialize() if backendopt: from rpython.translator.backendopt.all import backend_optimizations backend_optimizations(t) diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py --- a/rpython/rlib/objectmodel.py +++ b/rpython/rlib/objectmodel.py @@ -275,8 +275,6 @@ return lltype.Signed malloc_zero_filled = CDefinedIntSymbolic('MALLOC_ZERO_FILLED', default=0) -running_on_llinterp = CDefinedIntSymbolic('RUNNING_ON_LLINTERP', default=1) -# running_on_llinterp is meant to have the value 0 in all backends # ____________________________________________________________ diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -1,99 +1,105 @@ -from rpython.rtyper.extregistry import ExtRegistryEntry -from rpython.rtyper.lltypesystem.lltype import typeOf, FuncType, functionptr -from rpython.annotator.model import unionof +from rpython.annotator.model import unionof, SomeObject from rpython.annotator.signature import annotation, SignatureError +from rpython.rtyper.extregistry import ExtRegistryEntry, lookup +from rpython.rtyper.lltypesystem.lltype import ( + typeOf, FuncType, functionptr, _ptr, Void) +from rpython.rtyper.error import TyperError +from rpython.rtyper.rmodel import Repr -import py +class SomeExternalFunction(SomeObject): + def __init__(self, name, args_s, s_result): + self.name = name + self.args_s = args_s + self.s_result = s_result + + def check_args(self, callspec): + params_s = self.args_s + args_s, kwargs = callspec.unpack() + if kwargs: + raise SignatureError( + "External functions cannot be called with keyword arguments") + if len(args_s) != len(params_s): + raise SignatureError("Argument number mismatch") + for i, s_param in enumerate(params_s): + arg = unionof(args_s[i], s_param) + if not s_param.contains(arg): + raise SignatureError( + "In call to external function %r:\n" + "arg %d must be %s,\n" + " got %s" % ( + self.name, i + 1, s_param, args_s[i])) + + def call(self, callspec): + self.check_args(callspec) + return self.s_result + + def rtyper_makerepr(self, rtyper): + if not self.is_constant(): + raise TyperError("Non-constant external function!") + entry = lookup(self.const) + impl = getattr(entry, 'lltypeimpl', None) + fakeimpl = getattr(entry, 'lltypefakeimpl', None) + return ExternalFunctionRepr(self, impl, fakeimpl) + + def rtyper_makekey(self): + return self.__class__, self + +class ExternalFunctionRepr(Repr): + lowleveltype = Void + + def __init__(self, s_func, impl, fakeimpl): + self.s_func = s_func + self.impl = impl + self.fakeimpl = fakeimpl + + def rtype_simple_call(self, hop): + rtyper = hop.rtyper + args_r = [rtyper.getrepr(s_arg) for s_arg in self.s_func.args_s] + r_result = rtyper.getrepr(self.s_func.s_result) + obj = self.get_funcptr(rtyper, args_r, r_result) + hop2 = hop.copy() + hop2.r_s_popfirstarg() + vlist = [hop2.inputconst(typeOf(obj), obj)] + hop2.inputargs(*args_r) + hop2.exception_is_here() + return hop2.genop('direct_call', vlist, r_result) + + def get_funcptr(self, rtyper, args_r, r_result): + from rpython.rtyper.rtyper import llinterp_backend + args_ll = [r_arg.lowleveltype for r_arg in args_r] + ll_result = r_result.lowleveltype + name = self.s_func.name + if self.fakeimpl and rtyper.backend is llinterp_backend: + FT = FuncType(args_ll, ll_result) + return functionptr( + FT, name, _external_name=name, _callable=self.fakeimpl) + elif self.impl: + if isinstance(self.impl, _ptr): + return self.impl + else: + # store some attributes to the 'impl' function, where + # the eventual call to rtyper.getcallable() will find them + # and transfer them to the final lltype.functionptr(). + self.impl._llfnobjattrs_ = {'_name': name} + return rtyper.getannmixlevel().delayedfunction( + self.impl, self.s_func.args_s, self.s_func.s_result) + else: + fakeimpl = self.fakeimpl or self.s_func.const + FT = FuncType(args_ll, ll_result) + return functionptr( + FT, name, _external_name=name, _callable=fakeimpl) + class ExtFuncEntry(ExtRegistryEntry): safe_not_sandboxed = False - # common case: args is a list of annotation or types - def normalize_args(self, *args_s): - args = self.signature_args - signature_args = [annotation(arg, None) for arg in args] - assert len(args_s) == len(signature_args),\ - "Argument number mismatch" + def compute_annotation(self): + s_result = SomeExternalFunction( + self.name, self.signature_args, self.signature_result) + if (self.bookkeeper.annotator.translator.config.translation.sandbox + and not self.safe_not_sandboxed): + s_result.needs_sandboxing = True + return s_result - for i, expected in enumerate(signature_args): - arg = unionof(args_s[i], expected) - if not expected.contains(arg): - name = getattr(self, 'name', None) - if not name: - try: - name = self.instance.__name__ - except AttributeError: - name = '?' - raise SignatureError("In call to external function %r:\n" - "arg %d must be %s,\n" - " got %s" % ( - name, i+1, expected, args_s[i])) - return signature_args - - def compute_result_annotation(self, *args_s): - self.normalize_args(*args_s) # check arguments - return self.signature_result - - def specialize_call(self, hop): - rtyper = hop.rtyper - signature_args = self.normalize_args(*hop.args_s) - args_r = [rtyper.getrepr(s_arg) for s_arg in signature_args] - args_ll = [r_arg.lowleveltype for r_arg in args_r] - s_result = hop.s_result - r_result = rtyper.getrepr(s_result) - ll_result = r_result.lowleveltype - name = getattr(self, 'name', None) or self.instance.__name__ - impl = getattr(self, 'lltypeimpl', None) - fakeimpl = getattr(self, 'lltypefakeimpl', self.instance) - if impl: - if (rtyper.annotator.translator.config.translation.sandbox - and not self.safe_not_sandboxed): - from rpython.translator.sandbox.rsandbox import ( - make_sandbox_trampoline) - impl = make_sandbox_trampoline( - self.name, signature_args, s_result) - if hasattr(self, 'lltypefakeimpl'): - # If we have both an llimpl and an llfakeimpl, - # we need a wrapper that selects the proper one and calls it - from rpython.tool.sourcetools import func_with_new_name - # Using '*args' is delicate because this wrapper is also - # created for init-time functions like llarena.arena_malloc - # which are called before the GC is fully initialized - args = ', '.join(['arg%d' % i for i in range(len(args_ll))]) - d = {'original_impl': impl, - 's_result': s_result, - 'fakeimpl': fakeimpl, - '__name__': __name__, - } - exec py.code.compile(""" - from rpython.rlib.objectmodel import running_on_llinterp - from rpython.rlib.debug import llinterpcall - from rpython.rlib.jit import dont_look_inside - # note: we say 'dont_look_inside' mostly because the - # JIT does not support 'running_on_llinterp', but in - # theory it is probably right to stop jitting anyway. - @dont_look_inside - def ll_wrapper(%s): - if running_on_llinterp: - return llinterpcall(s_result, fakeimpl, %s) - else: - return original_impl(%s) - """ % (args, args, args)) in d - impl = func_with_new_name(d['ll_wrapper'], name + '_wrapper') - # store some attributes to the 'impl' function, where - # the eventual call to rtyper.getcallable() will find them - # and transfer them to the final lltype.functionptr(). - impl._llfnobjattrs_ = {'_name': self.name} - obj = rtyper.getannmixlevel().delayedfunction( - impl, signature_args, hop.s_result) - else: - FT = FuncType(args_ll, ll_result) - obj = functionptr(FT, name, _external_name=self.name, - _callable=fakeimpl, - _safe_not_sandboxed=self.safe_not_sandboxed) - vlist = [hop.inputconst(typeOf(obj), obj)] + hop.inputargs(*args_r) - hop.exception_is_here() - return hop.genop('direct_call', vlist, r_result) def register_external(function, args, result=None, export_name=None, llimpl=None, llfakeimpl=None, sandboxsafe=False): @@ -109,32 +115,20 @@ if export_name is None: export_name = function.__name__ + params_s = [annotation(arg) for arg in args] + s_result = annotation(result) class FunEntry(ExtFuncEntry): _about_ = function safe_not_sandboxed = sandboxsafe - - if args is None: - def normalize_args(self, *args_s): - return args_s # accept any argument unmodified - elif callable(args): - # custom annotation normalizer (see e.g. os.utime()) - normalize_args = staticmethod(args) - else: # use common case behavior - signature_args = args - - signature_result = annotation(result, None) + signature_args = params_s + signature_result = s_result name = export_name if llimpl: lltypeimpl = staticmethod(llimpl) if llfakeimpl: lltypefakeimpl = staticmethod(llfakeimpl) - if export_name: - FunEntry.__name__ = export_name - else: - FunEntry.__name__ = function.func_name - def is_external(func): if hasattr(func, 'value'): func = func.value diff --git a/rpython/rtyper/rtyper.py b/rpython/rtyper/rtyper.py --- a/rpython/rtyper/rtyper.py +++ b/rpython/rtyper/rtyper.py @@ -32,11 +32,24 @@ from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline +class RTyperBackend(object): + pass + +class GenCBackend(RTyperBackend): + pass +genc_backend = GenCBackend() + +class LLInterpBackend(RTyperBackend): + pass +llinterp_backend = LLInterpBackend() + + class RPythonTyper(object): from rpython.rtyper.rmodel import log - def __init__(self, annotator): + def __init__(self, annotator, backend=genc_backend): self.annotator = annotator + self.backend = backend self.lowlevel_ann_policy = LowLevelAnnotatorPolicy(self) self.reprs = {} self._reprs_must_call_setup = [] diff --git a/rpython/rtyper/test/test_extfunc.py b/rpython/rtyper/test/test_extfunc.py --- a/rpython/rtyper/test/test_extfunc.py +++ b/rpython/rtyper/test/test_extfunc.py @@ -1,7 +1,6 @@ import py -from rpython.rtyper.extfunc import ExtFuncEntry, register_external,\ - is_external +from rpython.rtyper.extfunc import register_external from rpython.annotator.model import SomeInteger, SomeString, AnnotatorError from rpython.annotator.annrpython import RPythonAnnotator from rpython.annotator.policy import AnnotatorPolicy @@ -19,11 +18,7 @@ "NOT_RPYTHON" return eval("x+40") - class BTestFuncEntry(ExtFuncEntry): - _about_ = b - name = 'b' - signature_args = [SomeInteger()] - signature_result = SomeInteger() + register_external(b, [int], result=int) def f(): return b(2) @@ -43,15 +38,11 @@ def c(y, x): yyy - class CTestFuncEntry(ExtFuncEntry): - _about_ = c - name = 'ccc' - signature_args = [SomeInteger()] * 2 - signature_result = SomeInteger() + def llimpl(y, x): + return y + x - def lltypeimpl(y, x): - return y + x - lltypeimpl = staticmethod(lltypeimpl) + register_external(c, [int, int], result=int, llimpl=llimpl, + export_name='ccc') def f(): return c(3, 4) @@ -59,22 +50,6 @@ res = interpret(f, []) assert res == 7 - def test_register_external_signature(self): - """ - Test the standard interface for external functions. - """ - def dd(): - pass - register_external(dd, [int], int) - - def f(): - return dd(3) - - policy = AnnotatorPolicy() - a = RPythonAnnotator(policy=policy) - s = a.build_types(f, []) - assert isinstance(s, SomeInteger) - def test_register_external_tuple_args(self): """ Verify the annotation of a registered external function which takes a @@ -121,23 +96,6 @@ s = a.build_types(f, []) assert isinstance(s, SomeInteger) - def test_register_external_specialcase(self): - """ - When args=None, the external function accepts any arguments unmodified. - """ - def function_withspecialcase(arg): - return repr(arg) - register_external(function_withspecialcase, args=None, result=str) - - def f(): - x = function_withspecialcase - return x(33) + x("aaa") + x([]) + "\n" - - policy = AnnotatorPolicy() - a = RPythonAnnotator(policy=policy) - s = a.build_types(f, []) - assert isinstance(s, SomeString) - def test_str0(self): str0 = SomeString(no_nul=True) def os_open(s): @@ -182,3 +140,22 @@ # fails with TooLateForChange a.build_types(g, [[str]]) a.build_types(g, [[str0]]) # Does not raise + + def test_register_external_llfakeimpl(self): + def a(i): + return i + def a_llimpl(i): + return i * 2 + def a_llfakeimpl(i): + return i * 3 + register_external(a, [int], int, llimpl=a_llimpl, + llfakeimpl=a_llfakeimpl) + def f(i): + return a(i) + + res = interpret(f, [7]) + assert res == 21 + + from rpython.translator.c.test.test_genc import compile + fc = compile(f, [int]) + assert fc(7) == 14 diff --git a/rpython/rtyper/test/test_llinterp.py b/rpython/rtyper/test/test_llinterp.py --- a/rpython/rtyper/test/test_llinterp.py +++ b/rpython/rtyper/test/test_llinterp.py @@ -13,7 +13,7 @@ from rpython.rlib.rarithmetic import r_uint, ovfcheck from rpython.tool import leakfinder from rpython.conftest import option - +from rpython.rtyper.rtyper import llinterp_backend # switch on logging of interp to show more info on failing tests @@ -39,6 +39,7 @@ t.view() global typer # we need it for find_exception typer = t.buildrtyper() + typer.backend = llinterp_backend typer.specialize() #t.view() t.checkgraphs() diff --git a/rpython/rtyper/test/test_rbuiltin.py b/rpython/rtyper/test/test_rbuiltin.py --- a/rpython/rtyper/test/test_rbuiltin.py +++ b/rpython/rtyper/test/test_rbuiltin.py @@ -3,8 +3,7 @@ import py -from rpython.rlib.debug import llinterpcall -from rpython.rlib.objectmodel import instantiate, running_on_llinterp, compute_unique_id, current_object_addr_as_int +from rpython.rlib.objectmodel import instantiate, compute_unique_id, current_object_addr_as_int from rpython.rlib.rarithmetic import (intmask, longlongmask, r_int64, is_valid_int, r_int, r_uint, r_longlong, r_ulonglong) from rpython.rlib.rstring import StringBuilder, UnicodeBuilder @@ -456,26 +455,6 @@ res = self.interpret(fn, [3.25]) assert res == 7.25 - def test_debug_llinterpcall(self): - S = lltype.Struct('S', ('m', lltype.Signed)) - SPTR = lltype.Ptr(S) - def foo(n): - "NOT_RPYTHON" - s = lltype.malloc(S, immortal=True) - s.m = eval("n*6", locals()) - return s - def fn(n): - if running_on_llinterp: - return llinterpcall(SPTR, foo, n).m - else: - return 321 - res = self.interpret(fn, [7]) - assert res == 42 - from rpython.translator.c.test.test_genc import compile - f = compile(fn, [int]) - res = f(7) - assert res == 321 - def test_id(self): class A: pass diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -913,6 +913,7 @@ return [] def new_funcnode(db, T, obj, forcename=None): + from rpython.rtyper.rtyper import llinterp_backend if db.sandbox: if (getattr(obj, 'external', None) is not None and not obj._safe_not_sandboxed): @@ -934,6 +935,9 @@ return ExternalFuncNode(db, T, obj, name) elif hasattr(obj._callable, "c_name"): return ExternalFuncNode(db, T, obj, name) # this case should only be used for entrypoints + elif db.translator.rtyper.backend is llinterp_backend: + # on llinterp, anything goes + return ExternalFuncNode(db, T, obj, name) else: raise ValueError("don't know how to generate code for %r" % (obj,)) diff --git a/rpython/translator/sandbox/test/test_sandbox.py b/rpython/translator/sandbox/test/test_sandbox.py --- a/rpython/translator/sandbox/test/test_sandbox.py +++ b/rpython/translator/sandbox/test/test_sandbox.py @@ -292,6 +292,21 @@ rescode = pipe.wait() assert rescode == 0 +def test_environ_items(): + def entry_point(argv): + print os.environ.items() + return 0 + + exe = compile(entry_point) + g, f = run_in_subprocess(exe) + expect(f, g, "ll_os.ll_os_envitems", (), []) + expect(f, g, "ll_os.ll_os_write", (1, "[]\n"), 3) + g.close() + tail = f.read() + f.close() + assert tail == "" + + class TestPrintedResults: def run(self, entry_point, args, expected): From pypy.commits at gmail.com Wed Feb 17 18:00:53 2016 From: pypy.commits at gmail.com (rlamy) Date: Wed, 17 Feb 2016 15:00:53 -0800 (PST) Subject: [pypy-commit] pypy default: update whatsnew Message-ID: <56c4fba5.89bd1c0a.72559.05f1@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82317:1b11375c5116 Date: 2016-02-17 23:00 +0000 http://bitbucket.org/pypy/pypy/changeset/1b11375c5116/ Log: update whatsnew diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -152,3 +152,9 @@ Seperate structmember.h from Python.h Also enhance creating api functions to specify which header file they appear in (previously only pypy_decl.h) + +.. branch: llimpl + +Refactor register_external(), remove running_on_llinterp mechanism and +apply sandbox transform on externals at the end of annotation. + From pypy.commits at gmail.com Wed Feb 17 18:19:25 2016 From: pypy.commits at gmail.com (rlamy) Date: Wed, 17 Feb 2016 15:19:25 -0800 (PST) Subject: [pypy-commit] pypy default: kill unused make_constgraphbuilder() Message-ID: <56c4fffd.4c181c0a.d330b.07b7@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82318:6f907027fbb2 Date: 2016-02-17 23:18 +0000 http://bitbucket.org/pypy/pypy/changeset/6f907027fbb2/ Log: kill unused make_constgraphbuilder() diff --git a/rpython/annotator/specialize.py b/rpython/annotator/specialize.py --- a/rpython/annotator/specialize.py +++ b/rpython/annotator/specialize.py @@ -317,18 +317,6 @@ yield (value,) + tuple_tail -def make_constgraphbuilder(n, v=None, factory=None, srcmodule=None): - def constgraphbuilder(translator, ignore): - args = ','.join(["arg%d" % i for i in range(n)]) - if factory is not None: - computed_v = factory() - else: - computed_v = v - miniglobals = {'v': computed_v, '__name__': srcmodule} - exec py.code.Source("constf = lambda %s: v" % args).compile() in miniglobals - return translator.buildflowgraph(miniglobals['constf']) - return constgraphbuilder - def maybe_star_args(funcdesc, key, args_s): args_s, key1, builder = flatten_star_args(funcdesc, args_s) if key1 is not None: diff --git a/rpython/rtyper/test/test_rpbc.py b/rpython/rtyper/test/test_rpbc.py --- a/rpython/rtyper/test/test_rpbc.py +++ b/rpython/rtyper/test/test_rpbc.py @@ -1,7 +1,7 @@ import py from rpython.annotator import model as annmodel -from rpython.annotator import policy, specialize +from rpython.annotator import specialize from rpython.rtyper.lltypesystem.lltype import typeOf from rpython.rtyper.test.tool import BaseRtypingTest from rpython.rtyper.llannotation import SomePtr, lltype_to_annotation @@ -1690,59 +1690,6 @@ # ____________________________________________________________ -class TestRPBCExtra(BaseRtypingTest): - - def test_folding_specialize_support(self): - - class S(object): - - def w(s, x): - if isinstance(x, int): - return x - if isinstance(x, str): - return len(x) - return -1 - w._annspecialcase_ = "specialize:w" - - def _freeze_(self): - return True - - s = S() - - def f(i, n): - w = s.w - if i == 0: - return w(0) - elif i == 1: - return w("abc") - elif i == 2: - return w(3*n) - elif i == 3: - return w(str(n)) - return -1 - - class P(policy.AnnotatorPolicy): - def specialize__w(pol, funcdesc, args_s): - typ = args_s[1].knowntype - if args_s[0].is_constant() and args_s[1].is_constant(): - x = args_s[1].const - v = s.w(x) - builder = specialize.make_constgraphbuilder(2, v) - return funcdesc.cachedgraph(x, builder=builder) - return funcdesc.cachedgraph(typ) - - p = P() - - res = self.interpret(f, [0, 66], policy=p) - assert res == 0 - res = self.interpret(f, [1, 66], policy=p) - assert res == 3 - res = self.interpret(f, [2, 4], policy=p) - assert res == 12 - res = self.interpret(f, [3, 5555], policy=p) - assert res == 4 - - def test_hlinvoke_simple(): def f(a,b): return a + b From pypy.commits at gmail.com Wed Feb 17 19:33:17 2016 From: pypy.commits at gmail.com (stefanor) Date: Wed, 17 Feb 2016 16:33:17 -0800 (PST) Subject: [pypy-commit] cffi default: distribute folded back into setuptools a while ago. This option has no effect any more Message-ID: <56c5114d.e6bbc20a.d2a3a.159e@mx.google.com> Author: Stefano Rivera Branch: Changeset: r2639:d7cbf9223795 Date: 2016-02-17 16:28 -0800 http://bitbucket.org/cffi/cffi/changeset/d7cbf9223795/ Log: distribute folded back into setuptools a while ago. This option has no effect any more diff --git a/testing/cffi0/test_zintegration.py b/testing/cffi0/test_zintegration.py --- a/testing/cffi0/test_zintegration.py +++ b/testing/cffi0/test_zintegration.py @@ -11,7 +11,7 @@ def create_venv(name): tmpdir = udir.join(name) try: - subprocess.check_call(['virtualenv', '--distribute', + subprocess.check_call(['virtualenv', '-p', os.path.abspath(sys.executable), str(tmpdir)]) except OSError as e: From pypy.commits at gmail.com Wed Feb 17 19:33:19 2016 From: pypy.commits at gmail.com (stefanor) Date: Wed, 17 Feb 2016 16:33:19 -0800 (PST) Subject: [pypy-commit] cffi default: Recent releases of virtualenv default to downloading new pip, etc. We don't need to do that Message-ID: <56c5114f.657bc20a.4c046.5bbd@mx.google.com> Author: Stefano Rivera Branch: Changeset: r2640:d4815257b3cb Date: 2016-02-17 16:33 -0800 http://bitbucket.org/cffi/cffi/changeset/d4815257b3cb/ Log: Recent releases of virtualenv default to downloading new pip, etc. We don't need to do that diff --git a/testing/cffi0/test_zintegration.py b/testing/cffi0/test_zintegration.py --- a/testing/cffi0/test_zintegration.py +++ b/testing/cffi0/test_zintegration.py @@ -11,7 +11,7 @@ def create_venv(name): tmpdir = udir.join(name) try: - subprocess.check_call(['virtualenv', + subprocess.check_call(['virtualenv', '--never-download', '-p', os.path.abspath(sys.executable), str(tmpdir)]) except OSError as e: From pypy.commits at gmail.com Thu Feb 18 09:17:52 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 18 Feb 2016 06:17:52 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: some more fixes, wrong offset was calculated by push/pop to jit frame Message-ID: <56c5d290.8ee61c0a.6c72b.fffffd21@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82319:705395ae74a3 Date: 2016-02-18 15:16 +0100 http://bitbucket.org/pypy/pypy/changeset/705395ae74a3/ Log: some more fixes, wrong offset was calculated by push/pop to jit frame diff --git a/rpython/jit/backend/zarch/assembler.py b/rpython/jit/backend/zarch/assembler.py --- a/rpython/jit/backend/zarch/assembler.py +++ b/rpython/jit/backend/zarch/assembler.py @@ -182,7 +182,7 @@ RCS2 = r.r10 RCS3 = r.r11 - # r10,r11,r12,r2,f0 -> makes exactly 4 words + 8 byte + # r9,r10,r11,r2,f0 -> makes exactly 4 words + 8 byte extra_stack_size = 4 * WORD + 8 if for_frame: # NOTE: don't save registers on the jitframe here! It might @@ -201,7 +201,7 @@ mc.LG(r.SCRATCH, l.addr(0, r.SP)) mc.STG(r.SCRATCH, l.addr(-extra_stack_size, r.SP)) mc.LAY(r.SP, l.addr(-extra_stack_size, r.SP)) - mc.STMG(r.r10, r.r12, l.addr(off, r.SP)) + mc.STMG(r.r9, r.r11, l.addr(off, r.SP)) mc.STG(r.r2, l.addr(off+3*WORD, r.SP)) # OK to use STD, because offset is not negative mc.STD(r.f0, l.addr(off+4*WORD, r.SP)) @@ -253,7 +253,7 @@ if for_frame: off = STD_FRAME_SIZE_IN_BYTES - mc.LMG(r.r10, r.r12, l.addr(off, r.SP)) + mc.LMG(r.r9, r.r11, l.addr(off, r.SP)) mc.LG(r.r2, l.addr(off+3*WORD, r.SP)) mc.LD(r.f0, l.addr(off+4*WORD, r.SP)) mc.LAY(r.SP, l.addr(extra_stack_size, r.SP)) @@ -418,7 +418,6 @@ if supports_floats: self._push_fp_regs_to_jitframe(mc) - # allocate a stack frame! mc.raw_call(r.r11) # Finish @@ -1092,7 +1091,8 @@ base_ofs = self.cpu.get_baseofs_of_frame_field() if len(includes) == 1: iv = includes[0] - addr = l.addr(base_ofs + iv.value * WORD, r.SPP) + v = r.ALL_REG_INDEXES[iv] + addr = l.addr(base_ofs + v * WORD, r.SPP) if store: mc.STG(iv, addr) else: diff --git a/rpython/jit/backend/zarch/locations.py b/rpython/jit/backend/zarch/locations.py --- a/rpython/jit/backend/zarch/locations.py +++ b/rpython/jit/backend/zarch/locations.py @@ -75,8 +75,8 @@ def is_fp_reg(self): return True - def as_key(self): # 20 <= as_key <= 35 - return self.value + 20 + def as_key(self): # 16 <= as_key <= 32 + return self.value + 16 def is_float(self): return True @@ -125,34 +125,11 @@ return True def as_key(self): # an aligned word + 10000 - return self.position + 10000 + return -self.position + 10000 def is_float(self): return self.type == FLOAT -class RawSPStackLocation(AssemblerLocation): - _immutable_ = True - - def __init__(self, sp_offset, type=INT): - if type == FLOAT: - self.width = DOUBLE_WORD - else: - self.width = WORD - self.value = sp_offset - self.type = type - - def __repr__(self): - return 'SP(%s)+%d' % (self.type, self.value,) - - def is_raw_sp(self): - return True - - def is_float(self): - return self.type == FLOAT - - def as_key(self): # a word >= 1000, and < 1000 + size of SP frame - return self.value + 1000 - class AddressLocation(AssemblerLocation): _immutable_ = True @@ -173,9 +150,6 @@ if length: self.length = length.value - def as_key(self): - return self.displace + 100000 - class PoolLoc(AddressLocation): _immutable_ = True width = WORD @@ -205,6 +179,9 @@ def __repr__(self): return "pool(i,%d)" % self.displace + def as_key(self): + return -self.displace // 8 + 20000 + def addr(displace, basereg=None, indexreg=None, length=None): return AddressLocation(basereg, indexreg, displace, length) diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -404,8 +404,8 @@ if regalloc.fprm.reg_bindings: floats = True cond_call_adr = self.cond_call_slowpath[floats * 2 + callee_only] - self.mc.load_imm(r.SCRATCH, cond_call_adr) - self.mc.BASR(r.RETURN, r.SCRATCH) + self.mc.load_imm(r.r14, cond_call_adr) + self.mc.BASR(r.r14, r.r14) # restoring the registers saved above, and doing pop_gcmap(), is left # to the cond_call_slowpath helper. We never have any result value. relative_target = self.mc.currpos() - jmp_adr diff --git a/rpython/jit/backend/zarch/registers.py b/rpython/jit/backend/zarch/registers.py --- a/rpython/jit/backend/zarch/registers.py +++ b/rpython/jit/backend/zarch/registers.py @@ -38,7 +38,9 @@ ALL_REG_INDEXES[_r] = len(ALL_REG_INDEXES) for _r in MANAGED_FP_REGS: ALL_REG_INDEXES[_r] = len(ALL_REG_INDEXES) -JITFRAME_FIXED_SIZE = len(ALL_REG_INDEXES) + 1 # plus one word to have an even number +# NOT used, but keeps JITFRAME_FIXED_SIZE even +ALL_REG_INDEXES[f15] = len(ALL_REG_INDEXES) +JITFRAME_FIXED_SIZE = len(ALL_REG_INDEXES) def odd_reg(r): assert r.value % 2 == 0 From pypy.commits at gmail.com Thu Feb 18 16:40:35 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 18 Feb 2016 13:40:35 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: update the values Message-ID: <56c63a53.05e41c0a.86363.ffff98a0@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r702:9104b91ed33d Date: 2016-02-18 22:40 +0100 http://bitbucket.org/pypy/pypy.org/changeset/9104b91ed33d/ Log: update the values diff --git a/don1.html b/don1.html --- a/don1.html +++ b/don1.html @@ -15,7 +15,7 @@ - $62850 of $105000 (59.9%) + $62855 of $105000 (59.9%)
    @@ -23,7 +23,7 @@
  • From pypy.commits at gmail.com Thu Feb 18 17:33:35 2016 From: pypy.commits at gmail.com (mjacob) Date: Thu, 18 Feb 2016 14:33:35 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Add target_is_directory parameter to os.symlink, which is ignored on non-Windows platforms. Message-ID: <56c646bf.02931c0a.1a676.ffffa279@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82320:ed8bc259dde5 Date: 2016-02-18 23:23 +0100 http://bitbucket.org/pypy/pypy/changeset/ed8bc259dde5/ Log: Add target_is_directory parameter to os.symlink, which is ignored on non-Windows platforms. diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py --- a/pypy/module/posix/interp_posix.py +++ b/pypy/module/posix/interp_posix.py @@ -731,8 +731,9 @@ except OSError, e: raise wrap_oserror(space, e) -def symlink(space, w_src, w_dst): +def symlink(space, w_src, w_dst, w_target_is_directory=None): "Create a symbolic link pointing to src named dst." + # TODO: target_is_directory has a meaning on Windows try: dispatch_filename_2(rposix.symlink)(space, w_src, w_dst) except OSError, e: From pypy.commits at gmail.com Thu Feb 18 20:54:52 2016 From: pypy.commits at gmail.com (mjacob) Date: Thu, 18 Feb 2016 17:54:52 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix test_structseq.py by not checking _structseq's __file__. Message-ID: <56c675ec.42711c0a.a3701.ffffcc92@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82321:040589830b1d Date: 2016-02-19 02:54 +0100 http://bitbucket.org/pypy/pypy/changeset/040589830b1d/ Log: Fix test_structseq.py by not checking _structseq's __file__. The __file__ attribute is not present on the _structseq module because of import bootstrap issues. diff --git a/pypy/module/test_lib_pypy/test_structseq.py b/pypy/module/test_lib_pypy/test_structseq.py --- a/pypy/module/test_lib_pypy/test_structseq.py +++ b/pypy/module/test_lib_pypy/test_structseq.py @@ -6,7 +6,8 @@ spaceconfig = dict(usemodules=('binascii', 'struct',)) def setup_class(cls): - cls.w__structseq = import_lib_pypy(cls.space, '_structseq') + cls.w__structseq = cls.space.appexec( + [], "(): import _structseq; return _structseq") def w_get_mydata(self): _structseq = self._structseq From pypy.commits at gmail.com Fri Feb 19 03:39:48 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 19 Feb 2016 00:39:48 -0800 (PST) Subject: [pypy-commit] pypy default: issue #2241: oops Message-ID: <56c6d4d4.41df1c0a.d319e.1c4b@mx.google.com> Author: Armin Rigo Branch: Changeset: r82322:eb4764c87513 Date: 2016-02-19 09:39 +0100 http://bitbucket.org/pypy/pypy/changeset/eb4764c87513/ Log: issue #2241: oops diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -57,7 +57,7 @@ # pypy_init_embedded_cffi_module(). if not glob.patched_sys: space.appexec([], """(): - import os + import os, sys sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) From pypy.commits at gmail.com Fri Feb 19 03:56:59 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 19 Feb 2016 00:56:59 -0800 (PST) Subject: [pypy-commit] pypy default: Document branches Message-ID: <56c6d8db.8916c20a.1c6c7.6c44@mx.google.com> Author: Armin Rigo Branch: Changeset: r82323:fa47de37ef9b Date: 2016-02-19 09:56 +0100 http://bitbucket.org/pypy/pypy/changeset/fa47de37ef9b/ Log: Document branches diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -158,3 +158,8 @@ Refactor register_external(), remove running_on_llinterp mechanism and apply sandbox transform on externals at the end of annotation. +.. branch: cffi-embedding-win32 + +.. branch: windows-vmprof-support + +vmprof should work on Windows. From pypy.commits at gmail.com Fri Feb 19 04:38:32 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 19 Feb 2016 01:38:32 -0800 (PST) Subject: [pypy-commit] pypy default: import cffi/b4991ae7ce3a Message-ID: <56c6e298.6bb8c20a.27bc4.7b32@mx.google.com> Author: Armin Rigo Branch: Changeset: r82324:e79f457ab18f Date: 2016-02-19 10:37 +0100 http://bitbucket.org/pypy/pypy/changeset/e79f457ab18f/ Log: import cffi/b4991ae7ce3a diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -550,6 +550,7 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: + import os if sys.platform == "win32": # we need 'libpypy-c.lib'. Current distributions of # pypy (>= 4.1) contain it as 'libs/python27.lib'. @@ -558,11 +559,15 @@ ensure('library_dirs', os.path.join(sys.prefix, 'libs')) else: # we need 'libpypy-c.{so,dylib}', which should be by - # default located in 'sys.prefix/bin' + # default located in 'sys.prefix/bin' for installed + # systems. pythonlib = "pypy-c" if hasattr(sys, 'prefix'): - import os ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) else: if sys.platform == "win32": template = "python%d%d" diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py @@ -28,11 +28,14 @@ def prefix_pythonpath(): cffi_base = os.path.dirname(os.path.dirname(local_dir)) - pythonpath = os.environ.get('PYTHONPATH', '').split(os.pathsep) + pythonpath = org_env.get('PYTHONPATH', '').split(os.pathsep) if cffi_base not in pythonpath: pythonpath.insert(0, cffi_base) return os.pathsep.join(pythonpath) +def setup_module(mod): + mod.org_env = os.environ.copy() + class EmbeddingTests: _compiled_modules = {} @@ -46,14 +49,12 @@ def get_path(self): return str(self._path.ensure(dir=1)) - def _run_base(self, args, env_extra={}, **kwds): - print('RUNNING:', args, env_extra, kwds) - env = os.environ.copy() - env.update(env_extra) - return subprocess.Popen(args, env=env, **kwds) + def _run_base(self, args, **kwds): + print('RUNNING:', args, kwds) + return subprocess.Popen(args, **kwds) - def _run(self, args, env_extra={}): - popen = self._run_base(args, env_extra, cwd=self.get_path(), + def _run(self, args): + popen = self._run_base(args, cwd=self.get_path(), stdout=subprocess.PIPE, universal_newlines=True) output = popen.stdout.read() @@ -65,6 +66,7 @@ return output def prepare_module(self, name): + self.patch_environment() if name not in self._compiled_modules: path = self.get_path() filename = '%s.py' % name @@ -74,9 +76,8 @@ # find a solution to that: we could hack sys.path inside the # script run here, but we can't hack it in the same way in # execute(). - env_extra = {'PYTHONPATH': prefix_pythonpath()} - output = self._run([sys.executable, os.path.join(local_dir, filename)], - env_extra=env_extra) + output = self._run([sys.executable, + os.path.join(local_dir, filename)]) match = re.compile(r"\bFILENAME: (.+)").search(output) assert match dynamic_lib_name = match.group(1) @@ -120,28 +121,35 @@ finally: os.chdir(curdir) + def patch_environment(self): + path = self.get_path() + # for libpypy-c.dll or Python27.dll + path = os.path.split(sys.executable)[0] + os.path.pathsep + path + env_extra = {'PYTHONPATH': prefix_pythonpath()} + if sys.platform == 'win32': + envname = 'PATH' + else: + envname = 'LD_LIBRARY_PATH' + libpath = org_env.get(envname) + if libpath: + libpath = path + os.path.pathsep + libpath + else: + libpath = path + env_extra[envname] = libpath + for key, value in sorted(env_extra.items()): + if os.environ.get(key) != value: + print '* setting env var %r to %r' % (key, value) + os.environ[key] = value + def execute(self, name): path = self.get_path() - env_extra = {'PYTHONPATH': prefix_pythonpath()} - if sys.platform == 'win32': - _path = os.environ.get('PATH') - # for libpypy-c.dll or Python27.dll - _path = os.path.split(sys.executable)[0] + ';' + _path - env_extra['PATH'] = _path - else: - libpath = os.environ.get('LD_LIBRARY_PATH') - if libpath: - libpath = path + ':' + libpath - else: - libpath = path - env_extra['LD_LIBRARY_PATH'] = libpath print('running %r in %r' % (name, path)) executable_name = name if sys.platform == 'win32': executable_name = os.path.join(path, executable_name + '.exe') else: executable_name = os.path.join('.', executable_name) - popen = self._run_base([executable_name], env_extra, cwd=path, + popen = self._run_base([executable_name], cwd=path, stdout=subprocess.PIPE, universal_newlines=True) result = popen.stdout.read() From pypy.commits at gmail.com Fri Feb 19 04:38:53 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 19 Feb 2016 01:38:53 -0800 (PST) Subject: [pypy-commit] cffi default: more pypy tweaks Message-ID: <56c6e2ad.05e41c0a.86363.3a11@mx.google.com> Author: Armin Rigo Branch: Changeset: r2641:b4991ae7ce3a Date: 2016-02-19 10:37 +0100 http://bitbucket.org/cffi/cffi/changeset/b4991ae7ce3a/ Log: more pypy tweaks diff --git a/cffi/api.py b/cffi/api.py --- a/cffi/api.py +++ b/cffi/api.py @@ -550,6 +550,7 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: + import os if sys.platform == "win32": # we need 'libpypy-c.lib'. Current distributions of # pypy (>= 4.1) contain it as 'libs/python27.lib'. @@ -558,11 +559,15 @@ ensure('library_dirs', os.path.join(sys.prefix, 'libs')) else: # we need 'libpypy-c.{so,dylib}', which should be by - # default located in 'sys.prefix/bin' + # default located in 'sys.prefix/bin' for installed + # systems. pythonlib = "pypy-c" if hasattr(sys, 'prefix'): - import os ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) else: if sys.platform == "win32": template = "python%d%d" diff --git a/testing/embedding/test_basic.py b/testing/embedding/test_basic.py --- a/testing/embedding/test_basic.py +++ b/testing/embedding/test_basic.py @@ -27,11 +27,14 @@ def prefix_pythonpath(): cffi_base = os.path.dirname(os.path.dirname(local_dir)) - pythonpath = os.environ.get('PYTHONPATH', '').split(os.pathsep) + pythonpath = org_env.get('PYTHONPATH', '').split(os.pathsep) if cffi_base not in pythonpath: pythonpath.insert(0, cffi_base) return os.pathsep.join(pythonpath) +def setup_module(mod): + mod.org_env = os.environ.copy() + class EmbeddingTests: _compiled_modules = {} @@ -45,14 +48,12 @@ def get_path(self): return str(self._path.ensure(dir=1)) - def _run_base(self, args, env_extra={}, **kwds): - print('RUNNING:', args, env_extra, kwds) - env = os.environ.copy() - env.update(env_extra) - return subprocess.Popen(args, env=env, **kwds) + def _run_base(self, args, **kwds): + print('RUNNING:', args, kwds) + return subprocess.Popen(args, **kwds) - def _run(self, args, env_extra={}): - popen = self._run_base(args, env_extra, cwd=self.get_path(), + def _run(self, args): + popen = self._run_base(args, cwd=self.get_path(), stdout=subprocess.PIPE, universal_newlines=True) output = popen.stdout.read() @@ -64,6 +65,7 @@ return output def prepare_module(self, name): + self.patch_environment() if name not in self._compiled_modules: path = self.get_path() filename = '%s.py' % name @@ -73,9 +75,8 @@ # find a solution to that: we could hack sys.path inside the # script run here, but we can't hack it in the same way in # execute(). - env_extra = {'PYTHONPATH': prefix_pythonpath()} - output = self._run([sys.executable, os.path.join(local_dir, filename)], - env_extra=env_extra) + output = self._run([sys.executable, + os.path.join(local_dir, filename)]) match = re.compile(r"\bFILENAME: (.+)").search(output) assert match dynamic_lib_name = match.group(1) @@ -119,28 +120,35 @@ finally: os.chdir(curdir) + def patch_environment(self): + path = self.get_path() + # for libpypy-c.dll or Python27.dll + path = os.path.split(sys.executable)[0] + os.path.pathsep + path + env_extra = {'PYTHONPATH': prefix_pythonpath()} + if sys.platform == 'win32': + envname = 'PATH' + else: + envname = 'LD_LIBRARY_PATH' + libpath = org_env.get(envname) + if libpath: + libpath = path + os.path.pathsep + libpath + else: + libpath = path + env_extra[envname] = libpath + for key, value in sorted(env_extra.items()): + if os.environ.get(key) != value: + print '* setting env var %r to %r' % (key, value) + os.environ[key] = value + def execute(self, name): path = self.get_path() - env_extra = {'PYTHONPATH': prefix_pythonpath()} - if sys.platform == 'win32': - _path = os.environ.get('PATH') - # for libpypy-c.dll or Python27.dll - _path = os.path.split(sys.executable)[0] + ';' + _path - env_extra['PATH'] = _path - else: - libpath = os.environ.get('LD_LIBRARY_PATH') - if libpath: - libpath = path + ':' + libpath - else: - libpath = path - env_extra['LD_LIBRARY_PATH'] = libpath print('running %r in %r' % (name, path)) executable_name = name if sys.platform == 'win32': executable_name = os.path.join(path, executable_name + '.exe') else: executable_name = os.path.join('.', executable_name) - popen = self._run_base([executable_name], env_extra, cwd=path, + popen = self._run_base([executable_name], cwd=path, stdout=subprocess.PIPE, universal_newlines=True) result = popen.stdout.read() From pypy.commits at gmail.com Fri Feb 19 04:38:54 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 19 Feb 2016 01:38:54 -0800 (PST) Subject: [pypy-commit] cffi default: merge heads Message-ID: <56c6e2ae.890bc30a.2fecc.7be7@mx.google.com> Author: Armin Rigo Branch: Changeset: r2642:d2a90d323791 Date: 2016-02-19 10:38 +0100 http://bitbucket.org/cffi/cffi/changeset/d2a90d323791/ Log: merge heads diff --git a/testing/cffi0/test_zintegration.py b/testing/cffi0/test_zintegration.py --- a/testing/cffi0/test_zintegration.py +++ b/testing/cffi0/test_zintegration.py @@ -11,7 +11,7 @@ def create_venv(name): tmpdir = udir.join(name) try: - subprocess.check_call(['virtualenv', '--distribute', + subprocess.check_call(['virtualenv', '--never-download', '-p', os.path.abspath(sys.executable), str(tmpdir)]) except OSError as e: From pypy.commits at gmail.com Fri Feb 19 10:23:04 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 19 Feb 2016 07:23:04 -0800 (PST) Subject: [pypy-commit] cffi default: A dump of my experiences with rpaths and $ORIGIN messes Message-ID: <56c73358.8ee61c0a.6c72b.ffffba8a@mx.google.com> Author: Armin Rigo Branch: Changeset: r2643:96df07b3e9ba Date: 2016-02-19 16:22 +0100 http://bitbucket.org/cffi/cffi/changeset/96df07b3e9ba/ Log: A dump of my experiences with rpaths and $ORIGIN messes diff --git a/doc/source/embedding.rst b/doc/source/embedding.rst --- a/doc/source/embedding.rst +++ b/doc/source/embedding.rst @@ -76,7 +76,10 @@ library. It is a file with the extension ``.dll`` on Windows, ``.dylib`` on Mac OS/X, or ``.so`` on other platforms. As usual, it is produced by generating some intermediate ``.c`` code and then -calling the regular platform-specific C compiler. +calling the regular platform-specific C compiler. See below__ for +some pointers to C-level issues with using the probuced library. + +.. __: `Issues about using the .so`_ Here are some details about the methods used above: @@ -231,6 +234,79 @@ than 1.5. CFFI 1.5 or newer must be installed in the running Python. +Issues about using the .so +-------------------------- + +This paragraph describes issues that are not necessarily specific to +CFFI. It assumes that you have obtained the ``.so/.dylib/.dll`` file as +described above, but that you have troubles using it. (In summary: it +is a mess. This is my own experience, slowly built by using Google and +by listening to reports from various platforms. Please report any +inaccuracies in this paragraph or better ways to do things.) + +* The file produced by CFFI should follow this naming pattern: + ``libmy_plugin.so`` on Linux, ``libmy_plugin.dylib`` on Mac, or + ``my_plugin.dll`` on Windows (no ``lib`` prefix on Windows). + +* First note that this file does not contain the Python interpreter + nor the standard library of Python. You still need it to be + somewhere. There are ways to compact it to a smaller number of files, + but this is outside the scope of CFFI (please report if you used some + of these ways successfully so that I can add some links here). + +* In what we'll call the "main program", the ``.so`` can be either + used dynamically (e.g. by calling ``dlopen()`` or ``LoadLibrary()`` + inside the main program), or at compile-time (e.g. by compiling it + with ``gcc -lmy_plugin``). The former case is always used if you're + building a plugin for a program, and the program itself doesn't need + to be recompiled. The latter case is for making a CFFI library that + is more tightly integrated inside the main program. + +* In the case of compile-time usage: you can add the gcc + option ``-Lsome/path/`` before ``-lmy_plugin`` to describe where the + ``libmy_plugin.so`` is. On some platforms, notably Linux, ``gcc`` + will complain if it can find ``libmy_plugin.so`` but not + ``libpython27.so`` or ``libpypy-c.so``. To fix it, you need to call + ``LD_LIBRARY_PATH=/some/path/to/libpypy gcc``. + +* When actually executing the main program, it needs to find the + ``libmy_plugin.so`` but also ``libpython27.so`` or ``libpypy-c.so``. + For PyPy, unpack a PyPy distribution and you get a full directory + structure with ``libpypy-c.so`` inside a ``bin`` subdirectory, or on + Windows ``pypy-c.dll`` inside the top directory; you must not move + this file around, but just point to it. One way to point to it is by + running the main program with some environment variable: + ``LD_LIBRARY_PATH=/some/path/to/libpypy`` on Linux, + ``DYLD_LIBRARY_PATH=/some/path/to/libpypy`` on OS/X. + +* You can avoid the ``LD_LIBRARY_PATH`` issue if you compile + ``libmy_plugin.so`` with the path hard-coded inside in the first + place. On Linux, this is done by ``gcc -Wl,-rpath=/some/path``. You + would put this option in ``ffi.set_source("my_plugin", ..., + extra_link_args=['-Wl,-rpath=/some/path/to/libpypy'])``. The path can + start with ``$ORIGIN`` to mean "the directory where + ``libmy_plugin.so`` is". You can then specify a path relative to that + place, like ``extra_link_args=['-Wl,-rpath=$ORIGIN/../venv/bin']``. + Use ``ldd libmy_plugin.so`` to look at what path is currently compiled + in after the expansion of ``$ORIGIN``.) + + After this, you don't need ``LD_LIBRARY_PATH`` any more to locate + ``libpython27.so`` or ``libpypy-c.so`` at runtime. In theory it + should also cover the call to ``gcc`` for the main program. I wasn't + able to make ``gcc`` happy without ``LD_LIBRARY_PATH`` on Linux if + the rpath starts with ``$ORIGIN``, though. + +* The same rpath trick might be used to let the main program find + ``libmy_plugin.so`` in the first place without ``LD_LIBRARY_PATH``. + (This doesn't apply if the main program uses ``dlopen()`` to load it + as a dynamic plugin.) You'd make the main program with ``gcc + -Wl,-rpath=/path/to/libmyplugin``, possibly with ``$ORIGIN``. The + ``$`` in ``$ORIGIN`` causes various shell problems on its own: if + using a common shell you need to say ``gcc + -Wl,-rpath=\$ORIGIN/../venv/bin``. From a Makefile, you need to say + something like ``gcc -Wl,-rpath=\$$ORIGIN/../venv/bin``. + + Using multiple CFFI-made DLLs ----------------------------- From pypy.commits at gmail.com Fri Feb 19 10:27:51 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 19 Feb 2016 07:27:51 -0800 (PST) Subject: [pypy-commit] cffi default: update Message-ID: <56c73477.080a1c0a.f73ae.ffffbb23@mx.google.com> Author: Armin Rigo Branch: Changeset: r2644:5d4960993342 Date: 2016-02-19 16:27 +0100 http://bitbucket.org/cffi/cffi/changeset/5d4960993342/ Log: update diff --git a/doc/source/embedding.rst b/doc/source/embedding.rst --- a/doc/source/embedding.rst +++ b/doc/source/embedding.rst @@ -303,8 +303,8 @@ -Wl,-rpath=/path/to/libmyplugin``, possibly with ``$ORIGIN``. The ``$`` in ``$ORIGIN`` causes various shell problems on its own: if using a common shell you need to say ``gcc - -Wl,-rpath=\$ORIGIN/../venv/bin``. From a Makefile, you need to say - something like ``gcc -Wl,-rpath=\$$ORIGIN/../venv/bin``. + -Wl,-rpath=\$ORIGIN``. From a Makefile, you need to say + something like ``gcc -Wl,-rpath=\$$ORIGIN``. Using multiple CFFI-made DLLs From pypy.commits at gmail.com Fri Feb 19 12:09:23 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 19 Feb 2016 09:09:23 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: Test and fix for immortal objects on which we attach a pyobj Message-ID: <56c74c43.e83cc20a.b6d39.47bc@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82325:1f5a3c24e736 Date: 2016-02-19 18:08 +0100 http://bitbucket.org/pypy/pypy/changeset/1f5a3c24e736/ Log: Test and fix for immortal objects on which we attach a pyobj diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py --- a/rpython/memory/gc/incminimark.py +++ b/rpython/memory/gc/incminimark.py @@ -2973,9 +2973,13 @@ self.rrc_o_list_old = new_o_list def _rrc_major_free(self, pyobject, surviving_list, surviving_dict): + # The pyobject survives if the corresponding obj survives. + # This is true if the obj has one of the following two flags: + # * GCFLAG_VISITED: was seen during tracing + # * GCFLAG_NO_HEAP_PTRS: immortal object never traced (so far) intobj = self._pyobj(pyobject).ob_pypy_link obj = llmemory.cast_int_to_adr(intobj) - if self.header(obj).tid & GCFLAG_VISITED: + if self.header(obj).tid & (GCFLAG_VISITED | GCFLAG_NO_HEAP_PTRS): surviving_list.append(pyobject) if surviving_dict: surviving_dict.insertclean(obj, pyobject) diff --git a/rpython/memory/gc/test/test_rawrefcount.py b/rpython/memory/gc/test/test_rawrefcount.py --- a/rpython/memory/gc/test/test_rawrefcount.py +++ b/rpython/memory/gc/test/test_rawrefcount.py @@ -29,7 +29,7 @@ assert count2 - count1 == expected_trigger def _rawrefcount_pair(self, intval, is_light=False, is_pyobj=False, - create_old=False): + create_old=False, create_immortal=False): if is_light: rc = REFCNT_FROM_PYPY_LIGHT else: @@ -37,14 +37,19 @@ self.trigger = [] self.gc.rawrefcount_init(lambda: self.trigger.append(1)) # - p1 = self.malloc(S) + if create_immortal: + p1 = lltype.malloc(S, immortal=True) + else: + p1 = self.malloc(S) p1.x = intval - if create_old: + if create_immortal: + self.consider_constant(p1) + elif create_old: self.stackroots.append(p1) self._collect(major=False) p1 = self.stackroots.pop() p1ref = lltype.cast_opaque_ptr(llmemory.GCREF, p1) - r1 = lltype.malloc(PYOBJ_HDR, flavor='raw') + r1 = lltype.malloc(PYOBJ_HDR, flavor='raw', immortal=create_immortal) r1.ob_refcnt = rc r1.ob_pypy_link = 0 r1addr = llmemory.cast_ptr_to_adr(r1) @@ -268,3 +273,10 @@ self.test_pyobject_dies(old=True) def test_pyobject_survives_from_obj_old(self): self.test_pyobject_survives_from_obj(old=True) + + def test_pyobject_attached_to_prebuilt_obj(self): + p1, p1ref, r1, r1addr, check_alive = ( + self._rawrefcount_pair(42, create_immortal=True)) + check_alive(0) + self._collect(major=True) + check_alive(0) From pypy.commits at gmail.com Fri Feb 19 12:12:05 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 19 Feb 2016 09:12:05 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: update the values Message-ID: <56c74ce5.8916c20a.1c6c7.2481@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r703:61c91eaec11c Date: 2016-02-19 18:11 +0100 http://bitbucket.org/pypy/pypy.org/changeset/61c91eaec11c/ Log: update the values diff --git a/don1.html b/don1.html --- a/don1.html +++ b/don1.html @@ -15,7 +15,7 @@ - $62855 of $105000 (59.9%) + $62898 of $105000 (59.9%)
    @@ -23,7 +23,7 @@
  • From pypy.commits at gmail.com Fri Feb 19 12:18:42 2016 From: pypy.commits at gmail.com (rlamy) Date: Fri, 19 Feb 2016 09:18:42 -0800 (PST) Subject: [pypy-commit] pypy desc-specialize: Extract method init_specializer() Message-ID: <56c74e72.8e811c0a.91fcf.ffffdf64@mx.google.com> Author: Ronan Lamy Branch: desc-specialize Changeset: r82326:0b89e9760cd8 Date: 2016-02-18 05:23 +0000 http://bitbucket.org/pypy/pypy/changeset/0b89e9760cd8/ Log: Extract method init_specializer() diff --git a/rpython/annotator/description.py b/rpython/annotator/description.py --- a/rpython/annotator/description.py +++ b/rpython/annotator/description.py @@ -283,17 +283,20 @@ (self.name, e.getmsg())) return inputcells - def specialize(self, inputcells, op=None): - if (op is None and - getattr(self.bookkeeper, "position_key", None) is not None): - _, block, i = self.bookkeeper.position_key - op = block.operations[i] + def init_specializer(self): if self.specializer is None: # get the specializer based on the tag of the 'pyobj' # (if any), according to the current policy tag = getattr(self.pyobj, '_annspecialcase_', None) policy = self.bookkeeper.annotator.policy self.specializer = policy.get_specializer(tag) + + def specialize(self, inputcells, op=None): + if (op is None and + getattr(self.bookkeeper, "position_key", None) is not None): + _, block, i = self.bookkeeper.position_key + op = block.operations[i] + self.init_specializer() enforceargs = getattr(self.pyobj, '_annenforceargs_', None) signature = getattr(self.pyobj, '_signature_', None) if enforceargs and signature: From pypy.commits at gmail.com Fri Feb 19 12:18:44 2016 From: pypy.commits at gmail.com (rlamy) Date: Fri, 19 Feb 2016 09:18:44 -0800 (PST) Subject: [pypy-commit] pypy desc-specialize: Create annotator.using_policy() context manager Message-ID: <56c74e74.88c8c20a.c0cce.27e9@mx.google.com> Author: Ronan Lamy Branch: desc-specialize Changeset: r82327:f979a9068595 Date: 2016-02-19 17:16 +0000 http://bitbucket.org/pypy/pypy/changeset/f979a9068595/ Log: Create annotator.using_policy() context manager diff --git a/rpython/annotator/annrpython.py b/rpython/annotator/annrpython.py --- a/rpython/annotator/annrpython.py +++ b/rpython/annotator/annrpython.py @@ -2,6 +2,7 @@ import types from collections import defaultdict +from contextlib import contextmanager from rpython.tool.ansi_print import ansi_log from rpython.tool.pairtype import pair @@ -89,14 +90,9 @@ def get_call_parameters(self, function, args_s, policy): desc = self.bookkeeper.getdesc(function) - prevpolicy = self.policy - self.policy = policy - self.bookkeeper.enter(None) - try: - return desc.get_call_parameters(args_s) - finally: - self.bookkeeper.leave() - self.policy = prevpolicy + with self.using_policy(policy): + with self.bookkeeper.at_position(None): + return desc.get_call_parameters(args_s) def annotate_helper(self, function, args_s, policy=None): if policy is None: @@ -111,15 +107,23 @@ return graph def complete_helpers(self, policy): - saved = self.policy, self.added_blocks + saved = self.added_blocks + self.added_blocks = {} + with self.using_policy(policy): + try: + self.complete() + # invoke annotation simplifications for the new blocks + self.simplify(block_subset=self.added_blocks) + finally: + self.added_blocks = saved + + @contextmanager + def using_policy(self, policy): + """A context manager that temporarily replaces the annotator policy""" + old_policy = self.policy self.policy = policy - try: - self.added_blocks = {} - self.complete() - # invoke annotation simplifications for the new blocks - self.simplify(block_subset=self.added_blocks) - finally: - self.policy, self.added_blocks = saved + yield + self.policy = old_policy def build_graph_types(self, flowgraph, inputcells, complete_now=True): checkgraph(flowgraph) From pypy.commits at gmail.com Fri Feb 19 12:47:49 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 19 Feb 2016 09:47:49 -0800 (PST) Subject: [pypy-commit] pypy llvm-translation-backend: Use GCTransformer's get_prebuilt_hash() method. Message-ID: <56c75545.8abb1c0a.92707.ffffecac@mx.google.com> Author: Manuel Jacob Branch: llvm-translation-backend Changeset: r82328:ee28a2e5145b Date: 2016-02-17 00:56 +0100 http://bitbucket.org/pypy/pypy/changeset/ee28a2e5145b/ Log: Use GCTransformer's get_prebuilt_hash() method. diff --git a/rpython/translator/llvm/genllvm.py b/rpython/translator/llvm/genllvm.py --- a/rpython/translator/llvm/genllvm.py +++ b/rpython/translator/llvm/genllvm.py @@ -98,7 +98,7 @@ else: global_attrs += 'global' - hash_ = database.genllvm.gcpolicy.get_prebuilt_hash(obj) + hash_ = database.genllvm.gcpolicy.gctransformer.get_prebuilt_hash(obj) if hash_ is None: if self.varsize: extra_len = self.get_extra_len(obj) @@ -1653,9 +1653,6 @@ def get_gc_fields(self): return [(database.get_type(self.gctransformer.HDR), '_gc_header')] - def get_prebuilt_hash(self, obj): - pass - def finish(self): genllvm = self.genllvm while self.delayed_ptrs: @@ -1696,23 +1693,10 @@ def get_gc_field_values(self, obj): obj = lltype.top_container(obj) - needs_hash = self.get_prebuilt_hash(obj) is not None + needs_hash = self.gctransformer.get_prebuilt_hash(obj) is not None hdr = self.gctransformer.gc_header_for(obj, needs_hash) return [hdr._obj] - # from c backend - def get_prebuilt_hash(self, obj): - # for prebuilt objects that need to have their hash stored and - # restored. Note that only structures that are StructNodes all - # the way have their hash stored (and not e.g. structs with var- - # sized arrays at the end). 'obj' must be the top_container. - TYPE = lltype.typeOf(obj) - if not isinstance(TYPE, lltype.GcStruct): - return None - if TYPE._is_varsize(): - return None - return getattr(obj, '_hash_cache_', None) - class RefcountGCPolicy(GCPolicy): class RttiType(FuncType): From pypy.commits at gmail.com Fri Feb 19 13:05:11 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 19 Feb 2016 10:05:11 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default Message-ID: <56c75957.e6bbc20a.d2a3a.fffff4c2@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82329:4634d8e8bf6e Date: 2016-02-19 19:04 +0100 http://bitbucket.org/pypy/pypy/changeset/4634d8e8bf6e/ Log: hg merge default diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -188,7 +188,7 @@ # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'include')) + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py --- a/lib_pypy/_pypy_testcapi.py +++ b/lib_pypy/_pypy_testcapi.py @@ -62,7 +62,7 @@ if sys.platform == 'win32': # XXX pyconfig.h uses a pragma to link to the import library, # which is currently python3.lib - library = os.path.join(thisdir, '..', 'include', 'python32') + library = os.path.join(thisdir, '..', 'libs', 'python32') if not os.path.exists(library + '.lib'): # For a local translation or nightly build library = os.path.join(thisdir, '..', 'pypy', 'goal', 'python32') diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.1 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.1" -__version_info__ = (1, 5, 1) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h --- a/lib_pypy/cffi/_embedding.h +++ b/lib_pypy/cffi/_embedding.h @@ -233,7 +233,7 @@ f = PySys_GetObject((char *)"stderr"); if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.5.1" + "\ncompiled with cffi version: 1.5.2" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, sysconfig, types +import sys, types from .lock import allocate_lock try: @@ -550,16 +550,34 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. if hasattr(sys, 'prefix'): - import os - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - pythonlib = "pypy-c" + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) else: if sys.platform == "win32": template = "python%d%d" if hasattr(sys, 'gettotalrefcount'): template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" if sysconfig.get_config_var('DEBUG_EXT'): template += sysconfig.get_config_var('DEBUG_EXT') diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py --- a/lib_pypy/cffi/vengine_cpy.py +++ b/lib_pypy/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py --- a/lib_pypy/cffi/vengine_gen.py +++ b/lib_pypy/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py --- a/lib_pypy/cffi/verifier.py +++ b/lib_pypy/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -37,13 +37,16 @@ "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "_continuation", "_cffi_backend", - "_csv", "_pypyjson", "_vmprof", "_posixsubprocess", # "cppyy", "micronumpy" + "_csv", "_pypyjson", "_posixsubprocess", # "cppyy", "micronumpy" ]) -#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') -# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): - # it's not enough that we get x86_64 -# working_modules.add('_vmprof') +from rpython.jit.backend import detect_cpu +try: + if detect_cpu.autodetect().startswith('x86'): + working_modules.add('_vmprof') +except detect_cpu.ProcessorAutodetectError: + pass + translation_modules = default_modules.copy() translation_modules.update([ diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -7,6 +7,9 @@ Fixed ``_PyLong_FromByteArray()``, which was buggy. +Fixed a crash with stacklets (or greenlets) on non-Linux machines +which showed up if you forget stacklets without resuming them. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy @@ -38,7 +41,8 @@ .. branch: compress-numbering -Improve the memory signature of numbering instances in the JIT. +Improve the memory signature of numbering instances in the JIT. This should massively +decrease the amount of memory consumed by the JIT, which is significant for most programs. .. branch: fix-trace-too-long-heuristic @@ -148,3 +152,14 @@ Seperate structmember.h from Python.h Also enhance creating api functions to specify which header file they appear in (previously only pypy_decl.h) + +.. branch: llimpl + +Refactor register_external(), remove running_on_llinterp mechanism and +apply sandbox transform on externals at the end of annotation. + +.. branch: cffi-embedding-win32 + +.. branch: windows-vmprof-support + +vmprof should work on Windows. diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -246,6 +246,9 @@ raise Exception("Cannot use the --output option with PyPy " "when --shared is on (it is by default). " "See issue #1971.") + if sys.platform == 'win32': + config.translation.libname = '..\\..\\libs\\python27.lib' + thisdir.join('..', '..', 'libs').ensure(dir=1) if config.translation.thread: config.objspace.usemodules.thread = True diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.1" +VERSION = "1.5.2" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -57,7 +57,7 @@ # pypy_init_embedded_cffi_module(). if not glob.patched_sys: space.appexec([], """(): - import os + import os, sys sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/sys/interp_encoding.py b/pypy/module/sys/interp_encoding.py --- a/pypy/module/sys/interp_encoding.py +++ b/pypy/module/sys/interp_encoding.py @@ -33,7 +33,7 @@ def _getfilesystemencoding(space): encoding = base_encoding - if rlocale.HAVE_LANGINFO and rlocale.CODESET: + if rlocale.HAVE_LANGINFO: try: oldlocale = rlocale.setlocale(rlocale.LC_CTYPE, None) rlocale.setlocale(rlocale.LC_CTYPE, "") diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py @@ -5,6 +5,9 @@ if sys.platform == 'win32': py.test.skip('snippets do not run on win32') +if sys.version_info < (2, 7): + py.test.skip('fails e.g. on a Debian/Ubuntu which patches virtualenv' + ' in a non-2.6-friendly way') def create_venv(name): tmpdir = udir.join(name) diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py @@ -28,11 +28,14 @@ def prefix_pythonpath(): cffi_base = os.path.dirname(os.path.dirname(local_dir)) - pythonpath = os.environ.get('PYTHONPATH', '').split(os.pathsep) + pythonpath = org_env.get('PYTHONPATH', '').split(os.pathsep) if cffi_base not in pythonpath: pythonpath.insert(0, cffi_base) return os.pathsep.join(pythonpath) +def setup_module(mod): + mod.org_env = os.environ.copy() + class EmbeddingTests: _compiled_modules = {} @@ -46,14 +49,12 @@ def get_path(self): return str(self._path.ensure(dir=1)) - def _run_base(self, args, env_extra={}, **kwds): - print('RUNNING:', args, env_extra, kwds) - env = os.environ.copy() - env.update(env_extra) - return subprocess.Popen(args, env=env, **kwds) + def _run_base(self, args, **kwds): + print('RUNNING:', args, kwds) + return subprocess.Popen(args, **kwds) - def _run(self, args, env_extra={}): - popen = self._run_base(args, env_extra, cwd=self.get_path(), + def _run(self, args): + popen = self._run_base(args, cwd=self.get_path(), stdout=subprocess.PIPE, universal_newlines=True) output = popen.stdout.read() @@ -65,6 +66,7 @@ return output def prepare_module(self, name): + self.patch_environment() if name not in self._compiled_modules: path = self.get_path() filename = '%s.py' % name @@ -74,9 +76,8 @@ # find a solution to that: we could hack sys.path inside the # script run here, but we can't hack it in the same way in # execute(). - env_extra = {'PYTHONPATH': prefix_pythonpath()} - output = self._run([sys.executable, os.path.join(local_dir, filename)], - env_extra=env_extra) + output = self._run([sys.executable, + os.path.join(local_dir, filename)]) match = re.compile(r"\bFILENAME: (.+)").search(output) assert match dynamic_lib_name = match.group(1) @@ -101,6 +102,7 @@ c = distutils.ccompiler.new_compiler() print('compiling %s with %r' % (name, modules)) extra_preargs = [] + debug = True if sys.platform == 'win32': libfiles = [] for m in modules: @@ -109,29 +111,45 @@ libfiles.append('Release\\%s.lib' % m[:-4]) modules = libfiles extra_preargs.append('/MANIFEST') + debug = False # you need to install extra stuff + # for this to work elif threads: extra_preargs.append('-pthread') - objects = c.compile([filename], macros=sorted(defines.items()), debug=True) + objects = c.compile([filename], macros=sorted(defines.items()), + debug=debug) c.link_executable(objects + modules, name, extra_preargs=extra_preargs) finally: os.chdir(curdir) + def patch_environment(self): + path = self.get_path() + # for libpypy-c.dll or Python27.dll + path = os.path.split(sys.executable)[0] + os.path.pathsep + path + env_extra = {'PYTHONPATH': prefix_pythonpath()} + if sys.platform == 'win32': + envname = 'PATH' + else: + envname = 'LD_LIBRARY_PATH' + libpath = org_env.get(envname) + if libpath: + libpath = path + os.path.pathsep + libpath + else: + libpath = path + env_extra[envname] = libpath + for key, value in sorted(env_extra.items()): + if os.environ.get(key) != value: + print '* setting env var %r to %r' % (key, value) + os.environ[key] = value + def execute(self, name): path = self.get_path() - env_extra = {'PYTHONPATH': prefix_pythonpath()} - libpath = os.environ.get('LD_LIBRARY_PATH') - if libpath: - libpath = path + ':' + libpath - else: - libpath = path - env_extra['LD_LIBRARY_PATH'] = libpath print('running %r in %r' % (name, path)) executable_name = name if sys.platform == 'win32': executable_name = os.path.join(path, executable_name + '.exe') else: executable_name = os.path.join('.', executable_name) - popen = self._run_base([executable_name], env_extra, cwd=path, + popen = self._run_base([executable_name], cwd=path, stdout=subprocess.PIPE, universal_newlines=True) result = popen.stdout.read() diff --git a/pypy/tool/release/package.py b/pypy/tool/release/package.py --- a/pypy/tool/release/package.py +++ b/pypy/tool/release/package.py @@ -111,13 +111,8 @@ # builddir = py.path.local(options.builddir) pypydir = builddir.ensure(name, dir=True) + includedir = basedir.join('include') - # Recursively copy all headers, shutil has only ignore - # so we do a double-negative to include what we want - def copyonly(dirpath, contents): - return set(contents) - set( # XXX function not used? - shutil.ignore_patterns('*.h', '*.incl')(dirpath, contents), - ) shutil.copytree(str(includedir), str(pypydir.join('include'))) pypydir.ensure('include', dir=True) @@ -132,9 +127,6 @@ win_extras = ['libpypy-c.dll', 'sqlite3.dll'] if not options.no_tk: win_extras += ['tcl85.dll', 'tk85.dll'] - # add the .lib too, which is convenient to compile other programs - # that use the .dll (and for cffi's embedding mode) - win_extras.append('libpypy-c.lib') for extra in win_extras: p = pypy_c.dirpath().join(extra) @@ -145,32 +137,27 @@ continue print "Picking %s" % p binaries.append((p, p.basename)) - importlib_name = 'libpypy-c.lib' - if pypy_c.dirpath().join(importlib_name).check(): - try: - ver = subprocess.check_output([r'pypy\goal\pypy-c','-c', - "import sys;print(sys.version)"]) - importlib_target = 'python%s%s.lib' % (ver[0], ver[2]) - shutil.copyfile(str(pypy_c.dirpath().join(importlib_name)), - str(pypydir.join(importlib_target))) - # XXX fix this, either an additional build step or rename - # both DLL and LIB to versioned names, like cpython - shutil.copyfile(str(pypy_c.dirpath().join(importlib_name)), - str(pypy_c.dirpath().join(importlib_target))) - print "Picking %s as %s" % (pypy_c.dirpath().join(importlib_name), - pypydir.join('include', importlib_target)) - except: - pass + libsdir = basedir.join('libs') + if libsdir.exists(): + print 'Picking %s (and contents)' % libsdir + shutil.copytree(str(libsdir), str(pypydir.join('libs'))) else: - pass - # XXX users will complain that they cannot compile cpyext - # modules for windows, has the lib moved or are there no - # exported functions in the dll so no import library is created? + print '"libs" dir with import library not found.' + print 'You have to create %r' % (str(libsdir),) + print 'and copy libpypy-c.lib in there, renamed to python32.lib' + # XXX users will complain that they cannot compile capi (cpyext) + # modules for windows, also embedding pypy (i.e. in cffi) + # will fail. + # Has the lib moved, was translation not 'shared', or are + # there no exported functions in the dll so no import + # library was created? if not options.no_tk: try: p = pypy_c.dirpath().join('tcl85.dll') if not p.check(): p = py.path.local.sysfind('tcl85.dll') + if p is None: + raise WindowsError("tcl85.dll not found") tktcldir = p.dirpath().join('..').join('lib') shutil.copytree(str(tktcldir), str(pypydir.join('tcl'))) except WindowsError: diff --git a/rpython/annotator/bookkeeper.py b/rpython/annotator/bookkeeper.py --- a/rpython/annotator/bookkeeper.py +++ b/rpython/annotator/bookkeeper.py @@ -551,20 +551,6 @@ emulated = callback return self.pbc_call(pbc, args, emulated=emulated) - def _find_current_op(self, opname=None, arity=None, pos=None, s_type=None): - """ Find operation that is currently being annotated. Do some - sanity checks to see whether the correct op was found.""" - # XXX XXX HACK HACK HACK - fn, block, i = self.position_key - op = block.operations[i] - if opname is not None: - assert op.opname == opname - if arity is not None: - assert len(op.args) == arity - if pos is not None: - assert self.annotator.binding(op.args[pos]) == s_type - return op - def whereami(self): return self.annotator.whereami(self.position_key) diff --git a/rpython/annotator/policy.py b/rpython/annotator/policy.py --- a/rpython/annotator/policy.py +++ b/rpython/annotator/policy.py @@ -3,6 +3,9 @@ from rpython.annotator.specialize import ( specialize_argvalue, specialize_argtype, specialize_arglistitemtype, specialize_arg_or_var, memo, specialize_call_location) +from rpython.flowspace.operation import op +from rpython.flowspace.model import Constant +from rpython.annotator.model import SomeTuple class AnnotatorPolicy(object): @@ -64,7 +67,34 @@ return LowLevelAnnotatorPolicy.specialize__ll_and_arg(*args) def no_more_blocks_to_annotate(pol, annotator): + bk = annotator.bookkeeper # hint to all pending specializers that we are done - for callback in annotator.bookkeeper.pending_specializations: + for callback in bk.pending_specializations: callback() - del annotator.bookkeeper.pending_specializations[:] + del bk.pending_specializations[:] + if annotator.added_blocks is not None: + all_blocks = annotator.added_blocks + else: + all_blocks = annotator.annotated + for block in list(all_blocks): + for i, instr in enumerate(block.operations): + if not isinstance(instr, (op.simple_call, op.call_args)): + continue + v_func = instr.args[0] + s_func = annotator.annotation(v_func) + if not hasattr(s_func, 'needs_sandboxing'): + continue + key = ('sandboxing', s_func.const) + if key not in bk.emulated_pbc_calls: + params_s = s_func.args_s + s_result = s_func.s_result + from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline + sandbox_trampoline = make_sandbox_trampoline( + s_func.name, params_s, s_result) + sandbox_trampoline._signature_ = [SomeTuple(items=params_s)], s_result + bk.emulate_pbc_call(key, bk.immutablevalue(sandbox_trampoline), params_s) + else: + s_trampoline = bk.emulated_pbc_calls[key][0] + sandbox_trampoline = s_trampoline.const + new = instr.replace({instr.args[0]: Constant(sandbox_trampoline)}) + block.operations[i] = new diff --git a/rpython/annotator/specialize.py b/rpython/annotator/specialize.py --- a/rpython/annotator/specialize.py +++ b/rpython/annotator/specialize.py @@ -317,18 +317,6 @@ yield (value,) + tuple_tail -def make_constgraphbuilder(n, v=None, factory=None, srcmodule=None): - def constgraphbuilder(translator, ignore): - args = ','.join(["arg%d" % i for i in range(n)]) - if factory is not None: - computed_v = factory() - else: - computed_v = v - miniglobals = {'v': computed_v, '__name__': srcmodule} - exec py.code.Source("constf = lambda %s: v" % args).compile() in miniglobals - return translator.buildflowgraph(miniglobals['constf']) - return constgraphbuilder - def maybe_star_args(funcdesc, key, args_s): args_s, key1, builder = flatten_star_args(funcdesc, args_s) if key1 is not None: diff --git a/rpython/annotator/unaryop.py b/rpython/annotator/unaryop.py --- a/rpython/annotator/unaryop.py +++ b/rpython/annotator/unaryop.py @@ -113,8 +113,9 @@ @op.simple_call.register(SomeObject) def simple_call_SomeObject(annotator, func, *args): - return annotator.annotation(func).call( - simple_args([annotator.annotation(arg) for arg in args])) + s_func = annotator.annotation(func) + argspec = simple_args([annotator.annotation(arg) for arg in args]) + return s_func.call(argspec) @op.call_args.register_transform(SomeObject) def transform_varargs(annotator, v_func, v_shape, *data_v): diff --git a/rpython/config/translationoption.py b/rpython/config/translationoption.py --- a/rpython/config/translationoption.py +++ b/rpython/config/translationoption.py @@ -192,6 +192,8 @@ "If true, makes an lldebug0 build", default=False, cmdline="--lldebug0"), StrOption("icon", "Path to the (Windows) icon to use for the executable"), + StrOption("libname", + "Windows: name and possibly location of the lib file to create"), OptionDescription("backendopt", "Backend Optimization Options", [ # control inlining diff --git a/rpython/jit/metainterp/optimizeopt/rewrite.py b/rpython/jit/metainterp/optimizeopt/rewrite.py --- a/rpython/jit/metainterp/optimizeopt/rewrite.py +++ b/rpython/jit/metainterp/optimizeopt/rewrite.py @@ -380,7 +380,7 @@ raise InvalidLoop("promote of a virtual") old_guard_op = info.get_last_guard(self.optimizer) if old_guard_op is not None: - op = self.replace_guard_class_with_guard_value(op, info, + op = self.replace_old_guard_with_guard_value(op, info, old_guard_op) elif arg0.type == 'f': arg0 = self.get_box_replacement(arg0) @@ -390,11 +390,26 @@ assert isinstance(constbox, Const) self.optimize_guard(op, constbox) - def replace_guard_class_with_guard_value(self, op, info, old_guard_op): - if old_guard_op.opnum != rop.GUARD_NONNULL: - previous_classbox = info.get_known_class(self.optimizer.cpu) - expected_classbox = self.optimizer.cpu.ts.cls_of_box(op.getarg(1)) - assert previous_classbox is not None + def replace_old_guard_with_guard_value(self, op, info, old_guard_op): + # there already has been a guard_nonnull or guard_class or + # guard_nonnull_class on this value, which is rather silly. + # This function replaces the original guard with a + # guard_value. Must be careful: doing so is unsafe if the + # original guard checks for something inconsistent, + # i.e. different than what it would give if the guard_value + # passed (this is a rare case, but possible). If we get + # inconsistent results in this way, then we must not do the + # replacement, otherwise we'd put guard_value up there but all + # intermediate ops might be executed by assuming something + # different, from the old guard that is now removed... + + c_value = op.getarg(1) + if not c_value.nonnull(): + raise InvalidLoop('A GUARD_VALUE(..., NULL) follows some other ' + 'guard that it is not NULL') + previous_classbox = info.get_known_class(self.optimizer.cpu) + if previous_classbox is not None: + expected_classbox = self.optimizer.cpu.ts.cls_of_box(c_value) assert expected_classbox is not None if not previous_classbox.same_constant( expected_classbox): diff --git a/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py b/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py --- a/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py +++ b/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py @@ -3063,6 +3063,16 @@ self.optimize_loop(ops, expected, preamble) #self.check_expanded_fail_descr("i0", rop.GUARD_VALUE) + def test_invalid_guard_value_after_guard_class(self): + ops = """ + [p1, i0, i1, i2, p2] + guard_class(p1, ConstClass(node_vtable)) [i0] + i3 = int_add(i1, i2) + guard_value(p1, NULL) [i1] + jump(p2, i0, i1, i3, p2) + """ + self.raises(InvalidLoop, self.optimize_loop, ops, ops) + def test_guard_class_oois(self): ops = """ [p1] diff --git a/rpython/memory/gctransform/test/test_transform.py b/rpython/memory/gctransform/test/test_transform.py --- a/rpython/memory/gctransform/test/test_transform.py +++ b/rpython/memory/gctransform/test/test_transform.py @@ -5,6 +5,7 @@ from rpython.translator.exceptiontransform import ExceptionTransformer from rpython.rtyper.lltypesystem import lltype from rpython.conftest import option +from rpython.rtyper.rtyper import llinterp_backend class LLInterpedTranformerTests: @@ -131,8 +132,10 @@ def rtype(func, inputtypes, specialize=True): t = TranslationContext() t.buildannotator().build_types(func, inputtypes) + rtyper = t.buildrtyper() + rtyper.backend = llinterp_backend if specialize: - t.buildrtyper().specialize() + rtyper.specialize() if option.view: t.view() return t diff --git a/rpython/memory/test/test_transformed_gc.py b/rpython/memory/test/test_transformed_gc.py --- a/rpython/memory/test/test_transformed_gc.py +++ b/rpython/memory/test/test_transformed_gc.py @@ -14,6 +14,7 @@ from rpython.conftest import option from rpython.rlib.rstring import StringBuilder from rpython.rlib.rarithmetic import LONG_BIT +from rpython.rtyper.rtyper import llinterp_backend WORD = LONG_BIT // 8 @@ -29,9 +30,11 @@ t.config.set(**extraconfigopts) ann = t.buildannotator() ann.build_types(func, inputtypes) + rtyper = t.buildrtyper() + rtyper.backend = llinterp_backend if specialize: - t.buildrtyper().specialize() + rtyper.specialize() if backendopt: from rpython.translator.backendopt.all import backend_optimizations backend_optimizations(t) diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py --- a/rpython/rlib/objectmodel.py +++ b/rpython/rlib/objectmodel.py @@ -275,8 +275,6 @@ return lltype.Signed malloc_zero_filled = CDefinedIntSymbolic('MALLOC_ZERO_FILLED', default=0) -running_on_llinterp = CDefinedIntSymbolic('RUNNING_ON_LLINTERP', default=1) -# running_on_llinterp is meant to have the value 0 in all backends # ____________________________________________________________ diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -7,8 +7,6 @@ from rpython.rtyper.tool import rffi_platform as platform from rpython.rlib import rthread -from rpython.jit.backend import detect_cpu - class VMProfPlatformUnsupported(Exception): pass diff --git a/rpython/rlib/rvmprof/test/test_rvmprof.py b/rpython/rlib/rvmprof/test/test_rvmprof.py --- a/rpython/rlib/rvmprof/test/test_rvmprof.py +++ b/rpython/rlib/rvmprof/test/test_rvmprof.py @@ -101,7 +101,7 @@ s = 0 for i in range(num): s += (i << 1) - if s % 32423423423 == 0: + if s % 2123423423 == 0: print s return s diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -1,99 +1,105 @@ -from rpython.rtyper.extregistry import ExtRegistryEntry -from rpython.rtyper.lltypesystem.lltype import typeOf, FuncType, functionptr -from rpython.annotator.model import unionof +from rpython.annotator.model import unionof, SomeObject from rpython.annotator.signature import annotation, SignatureError +from rpython.rtyper.extregistry import ExtRegistryEntry, lookup +from rpython.rtyper.lltypesystem.lltype import ( + typeOf, FuncType, functionptr, _ptr, Void) +from rpython.rtyper.error import TyperError +from rpython.rtyper.rmodel import Repr -import py +class SomeExternalFunction(SomeObject): + def __init__(self, name, args_s, s_result): + self.name = name + self.args_s = args_s + self.s_result = s_result + + def check_args(self, callspec): + params_s = self.args_s + args_s, kwargs = callspec.unpack() + if kwargs: + raise SignatureError( + "External functions cannot be called with keyword arguments") + if len(args_s) != len(params_s): + raise SignatureError("Argument number mismatch") + for i, s_param in enumerate(params_s): + arg = unionof(args_s[i], s_param) + if not s_param.contains(arg): + raise SignatureError( + "In call to external function %r:\n" + "arg %d must be %s,\n" + " got %s" % ( + self.name, i + 1, s_param, args_s[i])) + + def call(self, callspec): + self.check_args(callspec) + return self.s_result + + def rtyper_makerepr(self, rtyper): + if not self.is_constant(): + raise TyperError("Non-constant external function!") + entry = lookup(self.const) + impl = getattr(entry, 'lltypeimpl', None) + fakeimpl = getattr(entry, 'lltypefakeimpl', None) + return ExternalFunctionRepr(self, impl, fakeimpl) + + def rtyper_makekey(self): + return self.__class__, self + +class ExternalFunctionRepr(Repr): + lowleveltype = Void + + def __init__(self, s_func, impl, fakeimpl): + self.s_func = s_func + self.impl = impl + self.fakeimpl = fakeimpl + + def rtype_simple_call(self, hop): + rtyper = hop.rtyper + args_r = [rtyper.getrepr(s_arg) for s_arg in self.s_func.args_s] + r_result = rtyper.getrepr(self.s_func.s_result) + obj = self.get_funcptr(rtyper, args_r, r_result) + hop2 = hop.copy() + hop2.r_s_popfirstarg() + vlist = [hop2.inputconst(typeOf(obj), obj)] + hop2.inputargs(*args_r) + hop2.exception_is_here() + return hop2.genop('direct_call', vlist, r_result) + + def get_funcptr(self, rtyper, args_r, r_result): + from rpython.rtyper.rtyper import llinterp_backend + args_ll = [r_arg.lowleveltype for r_arg in args_r] + ll_result = r_result.lowleveltype + name = self.s_func.name + if self.fakeimpl and rtyper.backend is llinterp_backend: + FT = FuncType(args_ll, ll_result) + return functionptr( + FT, name, _external_name=name, _callable=self.fakeimpl) + elif self.impl: + if isinstance(self.impl, _ptr): + return self.impl + else: + # store some attributes to the 'impl' function, where + # the eventual call to rtyper.getcallable() will find them + # and transfer them to the final lltype.functionptr(). + self.impl._llfnobjattrs_ = {'_name': name} + return rtyper.getannmixlevel().delayedfunction( + self.impl, self.s_func.args_s, self.s_func.s_result) + else: + fakeimpl = self.fakeimpl or self.s_func.const + FT = FuncType(args_ll, ll_result) + return functionptr( + FT, name, _external_name=name, _callable=fakeimpl) + class ExtFuncEntry(ExtRegistryEntry): safe_not_sandboxed = False - # common case: args is a list of annotation or types - def normalize_args(self, *args_s): - args = self.signature_args - signature_args = [annotation(arg, None) for arg in args] - assert len(args_s) == len(signature_args),\ - "Argument number mismatch" + def compute_annotation(self): + s_result = SomeExternalFunction( + self.name, self.signature_args, self.signature_result) + if (self.bookkeeper.annotator.translator.config.translation.sandbox + and not self.safe_not_sandboxed): + s_result.needs_sandboxing = True + return s_result - for i, expected in enumerate(signature_args): - arg = unionof(args_s[i], expected) - if not expected.contains(arg): - name = getattr(self, 'name', None) - if not name: - try: - name = self.instance.__name__ - except AttributeError: - name = '?' - raise SignatureError("In call to external function %r:\n" - "arg %d must be %s,\n" - " got %s" % ( - name, i+1, expected, args_s[i])) - return signature_args - - def compute_result_annotation(self, *args_s): - self.normalize_args(*args_s) # check arguments - return self.signature_result - - def specialize_call(self, hop): - rtyper = hop.rtyper - signature_args = self.normalize_args(*hop.args_s) - args_r = [rtyper.getrepr(s_arg) for s_arg in signature_args] - args_ll = [r_arg.lowleveltype for r_arg in args_r] - s_result = hop.s_result - r_result = rtyper.getrepr(s_result) - ll_result = r_result.lowleveltype - name = getattr(self, 'name', None) or self.instance.__name__ - impl = getattr(self, 'lltypeimpl', None) - fakeimpl = getattr(self, 'lltypefakeimpl', self.instance) - if impl: - if (rtyper.annotator.translator.config.translation.sandbox - and not self.safe_not_sandboxed): - from rpython.translator.sandbox.rsandbox import ( - make_sandbox_trampoline) - impl = make_sandbox_trampoline( - self.name, signature_args, s_result) - if hasattr(self, 'lltypefakeimpl'): - # If we have both an llimpl and an llfakeimpl, - # we need a wrapper that selects the proper one and calls it - from rpython.tool.sourcetools import func_with_new_name - # Using '*args' is delicate because this wrapper is also - # created for init-time functions like llarena.arena_malloc - # which are called before the GC is fully initialized - args = ', '.join(['arg%d' % i for i in range(len(args_ll))]) - d = {'original_impl': impl, - 's_result': s_result, - 'fakeimpl': fakeimpl, - '__name__': __name__, - } - exec py.code.compile(""" - from rpython.rlib.objectmodel import running_on_llinterp - from rpython.rlib.debug import llinterpcall - from rpython.rlib.jit import dont_look_inside - # note: we say 'dont_look_inside' mostly because the - # JIT does not support 'running_on_llinterp', but in - # theory it is probably right to stop jitting anyway. - @dont_look_inside - def ll_wrapper(%s): - if running_on_llinterp: - return llinterpcall(s_result, fakeimpl, %s) - else: - return original_impl(%s) - """ % (args, args, args)) in d - impl = func_with_new_name(d['ll_wrapper'], name + '_wrapper') - # store some attributes to the 'impl' function, where - # the eventual call to rtyper.getcallable() will find them - # and transfer them to the final lltype.functionptr(). - impl._llfnobjattrs_ = {'_name': self.name} - obj = rtyper.getannmixlevel().delayedfunction( - impl, signature_args, hop.s_result) - else: - FT = FuncType(args_ll, ll_result) - obj = functionptr(FT, name, _external_name=self.name, - _callable=fakeimpl, - _safe_not_sandboxed=self.safe_not_sandboxed) - vlist = [hop.inputconst(typeOf(obj), obj)] + hop.inputargs(*args_r) - hop.exception_is_here() - return hop.genop('direct_call', vlist, r_result) def register_external(function, args, result=None, export_name=None, llimpl=None, llfakeimpl=None, sandboxsafe=False): @@ -109,32 +115,20 @@ if export_name is None: export_name = function.__name__ + params_s = [annotation(arg) for arg in args] + s_result = annotation(result) class FunEntry(ExtFuncEntry): _about_ = function safe_not_sandboxed = sandboxsafe - - if args is None: - def normalize_args(self, *args_s): - return args_s # accept any argument unmodified - elif callable(args): - # custom annotation normalizer (see e.g. os.utime()) - normalize_args = staticmethod(args) - else: # use common case behavior - signature_args = args - - signature_result = annotation(result, None) + signature_args = params_s + signature_result = s_result name = export_name if llimpl: lltypeimpl = staticmethod(llimpl) if llfakeimpl: lltypefakeimpl = staticmethod(llfakeimpl) - if export_name: - FunEntry.__name__ = export_name - else: - FunEntry.__name__ = function.func_name - def is_external(func): if hasattr(func, 'value'): func = func.value diff --git a/rpython/rtyper/rtyper.py b/rpython/rtyper/rtyper.py --- a/rpython/rtyper/rtyper.py +++ b/rpython/rtyper/rtyper.py @@ -32,11 +32,24 @@ from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline +class RTyperBackend(object): + pass + +class GenCBackend(RTyperBackend): + pass +genc_backend = GenCBackend() + +class LLInterpBackend(RTyperBackend): + pass +llinterp_backend = LLInterpBackend() + + class RPythonTyper(object): from rpython.rtyper.rmodel import log - def __init__(self, annotator): + def __init__(self, annotator, backend=genc_backend): self.annotator = annotator + self.backend = backend self.lowlevel_ann_policy = LowLevelAnnotatorPolicy(self) self.reprs = {} self._reprs_must_call_setup = [] diff --git a/rpython/rtyper/test/test_extfunc.py b/rpython/rtyper/test/test_extfunc.py --- a/rpython/rtyper/test/test_extfunc.py +++ b/rpython/rtyper/test/test_extfunc.py @@ -1,7 +1,6 @@ import py -from rpython.rtyper.extfunc import ExtFuncEntry, register_external,\ - is_external +from rpython.rtyper.extfunc import register_external from rpython.annotator.model import SomeInteger, SomeString, AnnotatorError from rpython.annotator.annrpython import RPythonAnnotator from rpython.annotator.policy import AnnotatorPolicy @@ -19,11 +18,7 @@ "NOT_RPYTHON" return eval("x+40") - class BTestFuncEntry(ExtFuncEntry): - _about_ = b - name = 'b' - signature_args = [SomeInteger()] - signature_result = SomeInteger() + register_external(b, [int], result=int) def f(): return b(2) @@ -43,15 +38,11 @@ def c(y, x): yyy - class CTestFuncEntry(ExtFuncEntry): - _about_ = c - name = 'ccc' - signature_args = [SomeInteger()] * 2 - signature_result = SomeInteger() + def llimpl(y, x): + return y + x - def lltypeimpl(y, x): - return y + x - lltypeimpl = staticmethod(lltypeimpl) + register_external(c, [int, int], result=int, llimpl=llimpl, + export_name='ccc') def f(): return c(3, 4) @@ -59,22 +50,6 @@ res = interpret(f, []) assert res == 7 - def test_register_external_signature(self): - """ - Test the standard interface for external functions. - """ - def dd(): - pass - register_external(dd, [int], int) - - def f(): - return dd(3) - - policy = AnnotatorPolicy() - a = RPythonAnnotator(policy=policy) - s = a.build_types(f, []) - assert isinstance(s, SomeInteger) - def test_register_external_tuple_args(self): """ Verify the annotation of a registered external function which takes a @@ -121,23 +96,6 @@ s = a.build_types(f, []) assert isinstance(s, SomeInteger) - def test_register_external_specialcase(self): - """ - When args=None, the external function accepts any arguments unmodified. - """ - def function_withspecialcase(arg): - return repr(arg) - register_external(function_withspecialcase, args=None, result=str) - - def f(): - x = function_withspecialcase - return x(33) + x("aaa") + x([]) + "\n" - - policy = AnnotatorPolicy() - a = RPythonAnnotator(policy=policy) - s = a.build_types(f, []) - assert isinstance(s, SomeString) - def test_str0(self): str0 = SomeString(no_nul=True) def os_open(s): @@ -182,3 +140,22 @@ # fails with TooLateForChange a.build_types(g, [[str]]) a.build_types(g, [[str0]]) # Does not raise + + def test_register_external_llfakeimpl(self): + def a(i): + return i + def a_llimpl(i): + return i * 2 + def a_llfakeimpl(i): + return i * 3 + register_external(a, [int], int, llimpl=a_llimpl, + llfakeimpl=a_llfakeimpl) + def f(i): + return a(i) + + res = interpret(f, [7]) + assert res == 21 + + from rpython.translator.c.test.test_genc import compile + fc = compile(f, [int]) + assert fc(7) == 14 diff --git a/rpython/rtyper/test/test_llinterp.py b/rpython/rtyper/test/test_llinterp.py --- a/rpython/rtyper/test/test_llinterp.py +++ b/rpython/rtyper/test/test_llinterp.py @@ -13,7 +13,7 @@ from rpython.rlib.rarithmetic import r_uint, ovfcheck from rpython.tool import leakfinder from rpython.conftest import option - +from rpython.rtyper.rtyper import llinterp_backend # switch on logging of interp to show more info on failing tests @@ -39,6 +39,7 @@ t.view() global typer # we need it for find_exception typer = t.buildrtyper() + typer.backend = llinterp_backend typer.specialize() #t.view() t.checkgraphs() diff --git a/rpython/rtyper/test/test_rbuiltin.py b/rpython/rtyper/test/test_rbuiltin.py --- a/rpython/rtyper/test/test_rbuiltin.py +++ b/rpython/rtyper/test/test_rbuiltin.py @@ -3,8 +3,7 @@ import py -from rpython.rlib.debug import llinterpcall -from rpython.rlib.objectmodel import instantiate, running_on_llinterp, compute_unique_id, current_object_addr_as_int +from rpython.rlib.objectmodel import instantiate, compute_unique_id, current_object_addr_as_int from rpython.rlib.rarithmetic import (intmask, longlongmask, r_int64, is_valid_int, r_int, r_uint, r_longlong, r_ulonglong) from rpython.rlib.rstring import StringBuilder, UnicodeBuilder @@ -456,26 +455,6 @@ res = self.interpret(fn, [3.25]) assert res == 7.25 - def test_debug_llinterpcall(self): - S = lltype.Struct('S', ('m', lltype.Signed)) - SPTR = lltype.Ptr(S) - def foo(n): - "NOT_RPYTHON" - s = lltype.malloc(S, immortal=True) - s.m = eval("n*6", locals()) - return s - def fn(n): - if running_on_llinterp: - return llinterpcall(SPTR, foo, n).m - else: - return 321 - res = self.interpret(fn, [7]) - assert res == 42 - from rpython.translator.c.test.test_genc import compile - f = compile(fn, [int]) - res = f(7) - assert res == 321 - def test_id(self): class A: pass diff --git a/rpython/rtyper/test/test_rpbc.py b/rpython/rtyper/test/test_rpbc.py --- a/rpython/rtyper/test/test_rpbc.py +++ b/rpython/rtyper/test/test_rpbc.py @@ -1,7 +1,7 @@ import py from rpython.annotator import model as annmodel -from rpython.annotator import policy, specialize +from rpython.annotator import specialize from rpython.rtyper.lltypesystem.lltype import typeOf from rpython.rtyper.test.tool import BaseRtypingTest from rpython.rtyper.llannotation import SomePtr, lltype_to_annotation @@ -1690,59 +1690,6 @@ # ____________________________________________________________ -class TestRPBCExtra(BaseRtypingTest): - - def test_folding_specialize_support(self): - - class S(object): - - def w(s, x): - if isinstance(x, int): - return x - if isinstance(x, str): - return len(x) - return -1 - w._annspecialcase_ = "specialize:w" - - def _freeze_(self): - return True - - s = S() - - def f(i, n): - w = s.w - if i == 0: - return w(0) - elif i == 1: - return w("abc") - elif i == 2: - return w(3*n) - elif i == 3: - return w(str(n)) - return -1 - - class P(policy.AnnotatorPolicy): - def specialize__w(pol, funcdesc, args_s): - typ = args_s[1].knowntype - if args_s[0].is_constant() and args_s[1].is_constant(): - x = args_s[1].const - v = s.w(x) - builder = specialize.make_constgraphbuilder(2, v) - return funcdesc.cachedgraph(x, builder=builder) - return funcdesc.cachedgraph(typ) - - p = P() - - res = self.interpret(f, [0, 66], policy=p) - assert res == 0 - res = self.interpret(f, [1, 66], policy=p) - assert res == 3 - res = self.interpret(f, [2, 4], policy=p) - assert res == 12 - res = self.interpret(f, [3, 5555], policy=p) - assert res == 4 - - def test_hlinvoke_simple(): def f(a,b): return a + b diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -913,6 +913,7 @@ return [] def new_funcnode(db, T, obj, forcename=None): + from rpython.rtyper.rtyper import llinterp_backend if db.sandbox: if (getattr(obj, 'external', None) is not None and not obj._safe_not_sandboxed): @@ -934,6 +935,9 @@ return ExternalFuncNode(db, T, obj, name) elif hasattr(obj._callable, "c_name"): return ExternalFuncNode(db, T, obj, name) # this case should only be used for entrypoints + elif db.translator.rtyper.backend is llinterp_backend: + # on llinterp, anything goes + return ExternalFuncNode(db, T, obj, name) else: raise ValueError("don't know how to generate code for %r" % (obj,)) diff --git a/rpython/translator/driver.py b/rpython/translator/driver.py --- a/rpython/translator/driver.py +++ b/rpython/translator/driver.py @@ -487,7 +487,15 @@ exe = py.path.local(exename) exename = exe.new(purebasename=exe.purebasename + 'w') shutil_copy(str(exename), str(newexename)) - ext_to_copy = ['lib', 'pdb'] + # for pypy, the import library is renamed and moved to + # libs/python32.lib, according to the pragma in pyconfig.h + libname = self.config.translation.libname + libname = libname or soname.new(ext='lib').basename + libname = str(newsoname.dirpath().join(libname)) + shutil.copyfile(str(soname.new(ext='lib')), libname) + self.log.info("copied: %s" % (libname,)) + # the pdb file goes in the same place as pypy(w).exe + ext_to_copy = ['pdb',] for ext in ext_to_copy: name = soname.new(ext=ext) newname = newexename.new(basename=soname.basename) diff --git a/rpython/translator/sandbox/test/test_sandbox.py b/rpython/translator/sandbox/test/test_sandbox.py --- a/rpython/translator/sandbox/test/test_sandbox.py +++ b/rpython/translator/sandbox/test/test_sandbox.py @@ -292,6 +292,21 @@ rescode = pipe.wait() assert rescode == 0 +def test_environ_items(): + def entry_point(argv): + print os.environ.items() + return 0 + + exe = compile(entry_point) + g, f = run_in_subprocess(exe) + expect(f, g, "ll_os.ll_os_envitems", (), []) + expect(f, g, "ll_os.ll_os_write", (1, "[]\n"), 3) + g.close() + tail = f.read() + f.close() + assert tail == "" + + class TestPrintedResults: def run(self, entry_point, args, expected): From pypy.commits at gmail.com Fri Feb 19 16:23:37 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 19 Feb 2016 13:23:37 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix _imp module direct app tests. Message-ID: <56c787d9.6614c20a.28564.0dd7@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82331:7aac73fa16ed Date: 2016-02-19 21:52 +0100 http://bitbucket.org/pypy/pypy/changeset/7aac73fa16ed/ Log: Fix _imp module direct app tests. diff --git a/pypy/module/imp/test/test_app.py b/pypy/module/imp/test/test_app.py --- a/pypy/module/imp/test/test_app.py +++ b/pypy/module/imp/test/test_app.py @@ -8,7 +8,6 @@ } def setup_class(cls): - cls.w_imp = cls.space.getbuiltinmodule('_imp') cls.w_file_module = cls.space.wrap(__file__) latin1 = udir.join('latin1.py') latin1.write("# -*- coding: iso-8859-1 -*\n") @@ -75,7 +74,8 @@ assert type == 'rb' def test_ext_suffixes(self): - for suffix in self.imp.extension_suffixes(): + import _imp + for suffix in _imp.extension_suffixes(): assert suffix.endswith(('.pyd', '.so')) def test_obscure_functions(self): From pypy.commits at gmail.com Fri Feb 19 16:23:35 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 19 Feb 2016 13:23:35 -0800 (PST) Subject: [pypy-commit] pypy py3k: Add exec() workaround for running on top of old CPython 2.7 versions. Message-ID: <56c787d7.a118c20a.2a649.7a2f@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82330:1bf2ad223b9c Date: 2016-02-19 20:58 +0100 http://bitbucket.org/pypy/pypy/changeset/1bf2ad223b9c/ Log: Add exec() workaround for running on top of old CPython 2.7 versions. diff --git a/pypy/module/__pypy__/test/test_magic.py b/pypy/module/__pypy__/test/test_magic.py --- a/pypy/module/__pypy__/test/test_magic.py +++ b/pypy/module/__pypy__/test/test_magic.py @@ -15,6 +15,10 @@ __pypy__.save_module_content_for_future_reload(sys) def test_new_code_hook(self): + # workaround for running on top of old CPython 2.7 versions + def exec_(code, d): + exec(code, d) + l = [] def callable(code): @@ -24,7 +28,7 @@ __pypy__.set_code_callback(callable) d = {} try: - exec(""" + exec_(""" def f(): pass """, d) From pypy.commits at gmail.com Fri Feb 19 17:29:09 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 19 Feb 2016 14:29:09 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56c79735.654fc20a.4e7ec.ffff9335@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82332:58f595f70a58 Date: 2016-02-19 22:26 +0100 http://bitbucket.org/pypy/pypy/changeset/58f595f70a58/ Log: hg merge py3k diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -188,7 +188,7 @@ # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'include')) + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py --- a/lib_pypy/_pypy_testcapi.py +++ b/lib_pypy/_pypy_testcapi.py @@ -62,7 +62,7 @@ if sys.platform == 'win32': # XXX pyconfig.h uses a pragma to link to the import library, # which is currently python3.lib - library = os.path.join(thisdir, '..', 'include', 'python32') + library = os.path.join(thisdir, '..', 'libs', 'python32') if not os.path.exists(library + '.lib'): # For a local translation or nightly build library = os.path.join(thisdir, '..', 'pypy', 'goal', 'python32') diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.1 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.1" -__version_info__ = (1, 5, 1) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h --- a/lib_pypy/cffi/_embedding.h +++ b/lib_pypy/cffi/_embedding.h @@ -233,7 +233,7 @@ f = PySys_GetObject((char *)"stderr"); if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.5.1" + "\ncompiled with cffi version: 1.5.2" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, sysconfig, types +import sys, types from .lock import allocate_lock try: @@ -550,16 +550,34 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. if hasattr(sys, 'prefix'): - import os - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - pythonlib = "pypy-c" + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) else: if sys.platform == "win32": template = "python%d%d" if hasattr(sys, 'gettotalrefcount'): template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" if sysconfig.get_config_var('DEBUG_EXT'): template += sysconfig.get_config_var('DEBUG_EXT') diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py --- a/lib_pypy/cffi/vengine_cpy.py +++ b/lib_pypy/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py --- a/lib_pypy/cffi/vengine_gen.py +++ b/lib_pypy/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py --- a/lib_pypy/cffi/verifier.py +++ b/lib_pypy/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -37,14 +37,17 @@ "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "_continuation", "_cffi_backend", - "_csv", "_pypyjson", "_vmprof", "_posixsubprocess", # "cppyy", "micronumpy" + "_csv", "_pypyjson", "_posixsubprocess", # "cppyy", "micronumpy" "faulthandler", ]) -#if ((sys.platform.startswith('linux') or sys.platform == 'darwin') -# and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): - # it's not enough that we get x86_64 -# working_modules.add('_vmprof') +from rpython.jit.backend import detect_cpu +try: + if detect_cpu.autodetect().startswith('x86'): + working_modules.add('_vmprof') +except detect_cpu.ProcessorAutodetectError: + pass + translation_modules = default_modules.copy() translation_modules.update([ diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -7,6 +7,9 @@ Fixed ``_PyLong_FromByteArray()``, which was buggy. +Fixed a crash with stacklets (or greenlets) on non-Linux machines +which showed up if you forget stacklets without resuming them. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy @@ -38,7 +41,8 @@ .. branch: compress-numbering -Improve the memory signature of numbering instances in the JIT. +Improve the memory signature of numbering instances in the JIT. This should massively +decrease the amount of memory consumed by the JIT, which is significant for most programs. .. branch: fix-trace-too-long-heuristic @@ -148,3 +152,14 @@ Seperate structmember.h from Python.h Also enhance creating api functions to specify which header file they appear in (previously only pypy_decl.h) + +.. branch: llimpl + +Refactor register_external(), remove running_on_llinterp mechanism and +apply sandbox transform on externals at the end of annotation. + +.. branch: cffi-embedding-win32 + +.. branch: windows-vmprof-support + +vmprof should work on Windows. diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -246,6 +246,9 @@ raise Exception("Cannot use the --output option with PyPy " "when --shared is on (it is by default). " "See issue #1971.") + if sys.platform == 'win32': + config.translation.libname = '..\\..\\libs\\python27.lib' + thisdir.join('..', '..', 'libs').ensure(dir=1) if config.translation.thread: config.objspace.usemodules.thread = True diff --git a/pypy/module/__pypy__/test/test_magic.py b/pypy/module/__pypy__/test/test_magic.py --- a/pypy/module/__pypy__/test/test_magic.py +++ b/pypy/module/__pypy__/test/test_magic.py @@ -15,6 +15,10 @@ __pypy__.save_module_content_for_future_reload(sys) def test_new_code_hook(self): + # workaround for running on top of old CPython 2.7 versions + def exec_(code, d): + exec(code, d) + l = [] def callable(code): @@ -24,7 +28,7 @@ __pypy__.set_code_callback(callable) d = {} try: - exec(""" + exec_(""" def f(): pass """, d) diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.1" +VERSION = "1.5.2" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -57,7 +57,7 @@ # pypy_init_embedded_cffi_module(). if not glob.patched_sys: space.appexec([], """(): - import os + import os, sys sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/sys/interp_encoding.py b/pypy/module/sys/interp_encoding.py --- a/pypy/module/sys/interp_encoding.py +++ b/pypy/module/sys/interp_encoding.py @@ -22,7 +22,7 @@ def _getfilesystemencoding(space): encoding = base_encoding - if rlocale.HAVE_LANGINFO and rlocale.CODESET: + if rlocale.HAVE_LANGINFO: try: oldlocale = rlocale.setlocale(rlocale.LC_CTYPE, None) rlocale.setlocale(rlocale.LC_CTYPE, "") diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_zintegration.py @@ -5,6 +5,9 @@ if sys.platform == 'win32': py.test.skip('snippets do not run on win32') +if sys.version_info < (2, 7): + py.test.skip('fails e.g. on a Debian/Ubuntu which patches virtualenv' + ' in a non-2.6-friendly way') def create_venv(name): tmpdir = udir.join(name) diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/test_basic.py @@ -28,11 +28,14 @@ def prefix_pythonpath(): cffi_base = os.path.dirname(os.path.dirname(local_dir)) - pythonpath = os.environ.get('PYTHONPATH', '').split(os.pathsep) + pythonpath = org_env.get('PYTHONPATH', '').split(os.pathsep) if cffi_base not in pythonpath: pythonpath.insert(0, cffi_base) return os.pathsep.join(pythonpath) +def setup_module(mod): + mod.org_env = os.environ.copy() + class EmbeddingTests: _compiled_modules = {} @@ -46,14 +49,12 @@ def get_path(self): return str(self._path.ensure(dir=1)) - def _run_base(self, args, env_extra={}, **kwds): - print('RUNNING:', args, env_extra, kwds) - env = os.environ.copy() - env.update(env_extra) - return subprocess.Popen(args, env=env, **kwds) + def _run_base(self, args, **kwds): + print('RUNNING:', args, kwds) + return subprocess.Popen(args, **kwds) - def _run(self, args, env_extra={}): - popen = self._run_base(args, env_extra, cwd=self.get_path(), + def _run(self, args): + popen = self._run_base(args, cwd=self.get_path(), stdout=subprocess.PIPE, universal_newlines=True) output = popen.stdout.read() @@ -65,6 +66,7 @@ return output def prepare_module(self, name): + self.patch_environment() if name not in self._compiled_modules: path = self.get_path() filename = '%s.py' % name @@ -74,9 +76,8 @@ # find a solution to that: we could hack sys.path inside the # script run here, but we can't hack it in the same way in # execute(). - env_extra = {'PYTHONPATH': prefix_pythonpath()} - output = self._run([sys.executable, os.path.join(local_dir, filename)], - env_extra=env_extra) + output = self._run([sys.executable, + os.path.join(local_dir, filename)]) match = re.compile(r"\bFILENAME: (.+)").search(output) assert match dynamic_lib_name = match.group(1) @@ -101,6 +102,7 @@ c = distutils.ccompiler.new_compiler() print('compiling %s with %r' % (name, modules)) extra_preargs = [] + debug = True if sys.platform == 'win32': libfiles = [] for m in modules: @@ -109,29 +111,45 @@ libfiles.append('Release\\%s.lib' % m[:-4]) modules = libfiles extra_preargs.append('/MANIFEST') + debug = False # you need to install extra stuff + # for this to work elif threads: extra_preargs.append('-pthread') - objects = c.compile([filename], macros=sorted(defines.items()), debug=True) + objects = c.compile([filename], macros=sorted(defines.items()), + debug=debug) c.link_executable(objects + modules, name, extra_preargs=extra_preargs) finally: os.chdir(curdir) + def patch_environment(self): + path = self.get_path() + # for libpypy-c.dll or Python27.dll + path = os.path.split(sys.executable)[0] + os.path.pathsep + path + env_extra = {'PYTHONPATH': prefix_pythonpath()} + if sys.platform == 'win32': + envname = 'PATH' + else: + envname = 'LD_LIBRARY_PATH' + libpath = org_env.get(envname) + if libpath: + libpath = path + os.path.pathsep + libpath + else: + libpath = path + env_extra[envname] = libpath + for key, value in sorted(env_extra.items()): + if os.environ.get(key) != value: + print '* setting env var %r to %r' % (key, value) + os.environ[key] = value + def execute(self, name): path = self.get_path() - env_extra = {'PYTHONPATH': prefix_pythonpath()} - libpath = os.environ.get('LD_LIBRARY_PATH') - if libpath: - libpath = path + ':' + libpath - else: - libpath = path - env_extra['LD_LIBRARY_PATH'] = libpath print('running %r in %r' % (name, path)) executable_name = name if sys.platform == 'win32': executable_name = os.path.join(path, executable_name + '.exe') else: executable_name = os.path.join('.', executable_name) - popen = self._run_base([executable_name], env_extra, cwd=path, + popen = self._run_base([executable_name], cwd=path, stdout=subprocess.PIPE, universal_newlines=True) result = popen.stdout.read() diff --git a/pypy/tool/release/package.py b/pypy/tool/release/package.py --- a/pypy/tool/release/package.py +++ b/pypy/tool/release/package.py @@ -111,13 +111,8 @@ # builddir = py.path.local(options.builddir) pypydir = builddir.ensure(name, dir=True) + includedir = basedir.join('include') - # Recursively copy all headers, shutil has only ignore - # so we do a double-negative to include what we want - def copyonly(dirpath, contents): - return set(contents) - set( # XXX function not used? - shutil.ignore_patterns('*.h', '*.incl')(dirpath, contents), - ) shutil.copytree(str(includedir), str(pypydir.join('include'))) pypydir.ensure('include', dir=True) @@ -132,9 +127,6 @@ win_extras = ['libpypy-c.dll', 'sqlite3.dll'] if not options.no_tk: win_extras += ['tcl85.dll', 'tk85.dll'] - # add the .lib too, which is convenient to compile other programs - # that use the .dll (and for cffi's embedding mode) - win_extras.append('libpypy-c.lib') for extra in win_extras: p = pypy_c.dirpath().join(extra) @@ -145,32 +137,27 @@ continue print "Picking %s" % p binaries.append((p, p.basename)) - importlib_name = 'libpypy-c.lib' - if pypy_c.dirpath().join(importlib_name).check(): - try: - ver = subprocess.check_output([r'pypy\goal\pypy-c','-c', - "import sys;print(sys.version)"]) - importlib_target = 'python%s%s.lib' % (ver[0], ver[2]) - shutil.copyfile(str(pypy_c.dirpath().join(importlib_name)), - str(pypydir.join(importlib_target))) - # XXX fix this, either an additional build step or rename - # both DLL and LIB to versioned names, like cpython - shutil.copyfile(str(pypy_c.dirpath().join(importlib_name)), - str(pypy_c.dirpath().join(importlib_target))) - print "Picking %s as %s" % (pypy_c.dirpath().join(importlib_name), - pypydir.join('include', importlib_target)) - except: - pass + libsdir = basedir.join('libs') + if libsdir.exists(): + print 'Picking %s (and contents)' % libsdir + shutil.copytree(str(libsdir), str(pypydir.join('libs'))) else: - pass - # XXX users will complain that they cannot compile cpyext - # modules for windows, has the lib moved or are there no - # exported functions in the dll so no import library is created? + print '"libs" dir with import library not found.' + print 'You have to create %r' % (str(libsdir),) + print 'and copy libpypy-c.lib in there, renamed to python32.lib' + # XXX users will complain that they cannot compile capi (cpyext) + # modules for windows, also embedding pypy (i.e. in cffi) + # will fail. + # Has the lib moved, was translation not 'shared', or are + # there no exported functions in the dll so no import + # library was created? if not options.no_tk: try: p = pypy_c.dirpath().join('tcl85.dll') if not p.check(): p = py.path.local.sysfind('tcl85.dll') + if p is None: + raise WindowsError("tcl85.dll not found") tktcldir = p.dirpath().join('..').join('lib') shutil.copytree(str(tktcldir), str(pypydir.join('tcl'))) except WindowsError: diff --git a/rpython/annotator/bookkeeper.py b/rpython/annotator/bookkeeper.py --- a/rpython/annotator/bookkeeper.py +++ b/rpython/annotator/bookkeeper.py @@ -551,20 +551,6 @@ emulated = callback return self.pbc_call(pbc, args, emulated=emulated) - def _find_current_op(self, opname=None, arity=None, pos=None, s_type=None): - """ Find operation that is currently being annotated. Do some - sanity checks to see whether the correct op was found.""" - # XXX XXX HACK HACK HACK - fn, block, i = self.position_key - op = block.operations[i] - if opname is not None: - assert op.opname == opname - if arity is not None: - assert len(op.args) == arity - if pos is not None: - assert self.annotator.binding(op.args[pos]) == s_type - return op - def whereami(self): return self.annotator.whereami(self.position_key) diff --git a/rpython/annotator/policy.py b/rpython/annotator/policy.py --- a/rpython/annotator/policy.py +++ b/rpython/annotator/policy.py @@ -3,6 +3,9 @@ from rpython.annotator.specialize import ( specialize_argvalue, specialize_argtype, specialize_arglistitemtype, specialize_arg_or_var, memo, specialize_call_location) +from rpython.flowspace.operation import op +from rpython.flowspace.model import Constant +from rpython.annotator.model import SomeTuple class AnnotatorPolicy(object): @@ -64,7 +67,34 @@ return LowLevelAnnotatorPolicy.specialize__ll_and_arg(*args) def no_more_blocks_to_annotate(pol, annotator): + bk = annotator.bookkeeper # hint to all pending specializers that we are done - for callback in annotator.bookkeeper.pending_specializations: + for callback in bk.pending_specializations: callback() - del annotator.bookkeeper.pending_specializations[:] + del bk.pending_specializations[:] + if annotator.added_blocks is not None: + all_blocks = annotator.added_blocks + else: + all_blocks = annotator.annotated + for block in list(all_blocks): + for i, instr in enumerate(block.operations): + if not isinstance(instr, (op.simple_call, op.call_args)): + continue + v_func = instr.args[0] + s_func = annotator.annotation(v_func) + if not hasattr(s_func, 'needs_sandboxing'): + continue + key = ('sandboxing', s_func.const) + if key not in bk.emulated_pbc_calls: + params_s = s_func.args_s + s_result = s_func.s_result + from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline + sandbox_trampoline = make_sandbox_trampoline( + s_func.name, params_s, s_result) + sandbox_trampoline._signature_ = [SomeTuple(items=params_s)], s_result + bk.emulate_pbc_call(key, bk.immutablevalue(sandbox_trampoline), params_s) + else: + s_trampoline = bk.emulated_pbc_calls[key][0] + sandbox_trampoline = s_trampoline.const + new = instr.replace({instr.args[0]: Constant(sandbox_trampoline)}) + block.operations[i] = new diff --git a/rpython/annotator/specialize.py b/rpython/annotator/specialize.py --- a/rpython/annotator/specialize.py +++ b/rpython/annotator/specialize.py @@ -317,18 +317,6 @@ yield (value,) + tuple_tail -def make_constgraphbuilder(n, v=None, factory=None, srcmodule=None): - def constgraphbuilder(translator, ignore): - args = ','.join(["arg%d" % i for i in range(n)]) - if factory is not None: - computed_v = factory() - else: - computed_v = v - miniglobals = {'v': computed_v, '__name__': srcmodule} - exec py.code.Source("constf = lambda %s: v" % args).compile() in miniglobals - return translator.buildflowgraph(miniglobals['constf']) - return constgraphbuilder - def maybe_star_args(funcdesc, key, args_s): args_s, key1, builder = flatten_star_args(funcdesc, args_s) if key1 is not None: diff --git a/rpython/annotator/unaryop.py b/rpython/annotator/unaryop.py --- a/rpython/annotator/unaryop.py +++ b/rpython/annotator/unaryop.py @@ -113,8 +113,9 @@ @op.simple_call.register(SomeObject) def simple_call_SomeObject(annotator, func, *args): - return annotator.annotation(func).call( - simple_args([annotator.annotation(arg) for arg in args])) + s_func = annotator.annotation(func) + argspec = simple_args([annotator.annotation(arg) for arg in args]) + return s_func.call(argspec) @op.call_args.register_transform(SomeObject) def transform_varargs(annotator, v_func, v_shape, *data_v): diff --git a/rpython/config/translationoption.py b/rpython/config/translationoption.py --- a/rpython/config/translationoption.py +++ b/rpython/config/translationoption.py @@ -192,6 +192,8 @@ "If true, makes an lldebug0 build", default=False, cmdline="--lldebug0"), StrOption("icon", "Path to the (Windows) icon to use for the executable"), + StrOption("libname", + "Windows: name and possibly location of the lib file to create"), OptionDescription("backendopt", "Backend Optimization Options", [ # control inlining diff --git a/rpython/jit/metainterp/optimizeopt/rewrite.py b/rpython/jit/metainterp/optimizeopt/rewrite.py --- a/rpython/jit/metainterp/optimizeopt/rewrite.py +++ b/rpython/jit/metainterp/optimizeopt/rewrite.py @@ -380,7 +380,7 @@ raise InvalidLoop("promote of a virtual") old_guard_op = info.get_last_guard(self.optimizer) if old_guard_op is not None: - op = self.replace_guard_class_with_guard_value(op, info, + op = self.replace_old_guard_with_guard_value(op, info, old_guard_op) elif arg0.type == 'f': arg0 = self.get_box_replacement(arg0) @@ -390,11 +390,26 @@ assert isinstance(constbox, Const) self.optimize_guard(op, constbox) - def replace_guard_class_with_guard_value(self, op, info, old_guard_op): - if old_guard_op.opnum != rop.GUARD_NONNULL: - previous_classbox = info.get_known_class(self.optimizer.cpu) - expected_classbox = self.optimizer.cpu.ts.cls_of_box(op.getarg(1)) - assert previous_classbox is not None + def replace_old_guard_with_guard_value(self, op, info, old_guard_op): + # there already has been a guard_nonnull or guard_class or + # guard_nonnull_class on this value, which is rather silly. + # This function replaces the original guard with a + # guard_value. Must be careful: doing so is unsafe if the + # original guard checks for something inconsistent, + # i.e. different than what it would give if the guard_value + # passed (this is a rare case, but possible). If we get + # inconsistent results in this way, then we must not do the + # replacement, otherwise we'd put guard_value up there but all + # intermediate ops might be executed by assuming something + # different, from the old guard that is now removed... + + c_value = op.getarg(1) + if not c_value.nonnull(): + raise InvalidLoop('A GUARD_VALUE(..., NULL) follows some other ' + 'guard that it is not NULL') + previous_classbox = info.get_known_class(self.optimizer.cpu) + if previous_classbox is not None: + expected_classbox = self.optimizer.cpu.ts.cls_of_box(c_value) assert expected_classbox is not None if not previous_classbox.same_constant( expected_classbox): diff --git a/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py b/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py --- a/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py +++ b/rpython/jit/metainterp/optimizeopt/test/test_optimizeopt.py @@ -3063,6 +3063,16 @@ self.optimize_loop(ops, expected, preamble) #self.check_expanded_fail_descr("i0", rop.GUARD_VALUE) + def test_invalid_guard_value_after_guard_class(self): + ops = """ + [p1, i0, i1, i2, p2] + guard_class(p1, ConstClass(node_vtable)) [i0] + i3 = int_add(i1, i2) + guard_value(p1, NULL) [i1] + jump(p2, i0, i1, i3, p2) + """ + self.raises(InvalidLoop, self.optimize_loop, ops, ops) + def test_guard_class_oois(self): ops = """ [p1] diff --git a/rpython/memory/gctransform/test/test_transform.py b/rpython/memory/gctransform/test/test_transform.py --- a/rpython/memory/gctransform/test/test_transform.py +++ b/rpython/memory/gctransform/test/test_transform.py @@ -5,6 +5,7 @@ from rpython.translator.exceptiontransform import ExceptionTransformer from rpython.rtyper.lltypesystem import lltype from rpython.conftest import option +from rpython.rtyper.rtyper import llinterp_backend class LLInterpedTranformerTests: @@ -131,8 +132,10 @@ def rtype(func, inputtypes, specialize=True): t = TranslationContext() t.buildannotator().build_types(func, inputtypes) + rtyper = t.buildrtyper() + rtyper.backend = llinterp_backend if specialize: - t.buildrtyper().specialize() + rtyper.specialize() if option.view: t.view() return t diff --git a/rpython/memory/test/test_transformed_gc.py b/rpython/memory/test/test_transformed_gc.py --- a/rpython/memory/test/test_transformed_gc.py +++ b/rpython/memory/test/test_transformed_gc.py @@ -14,6 +14,7 @@ from rpython.conftest import option from rpython.rlib.rstring import StringBuilder from rpython.rlib.rarithmetic import LONG_BIT +from rpython.rtyper.rtyper import llinterp_backend WORD = LONG_BIT // 8 @@ -29,9 +30,11 @@ t.config.set(**extraconfigopts) ann = t.buildannotator() ann.build_types(func, inputtypes) + rtyper = t.buildrtyper() + rtyper.backend = llinterp_backend if specialize: - t.buildrtyper().specialize() + rtyper.specialize() if backendopt: from rpython.translator.backendopt.all import backend_optimizations backend_optimizations(t) diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py --- a/rpython/rlib/objectmodel.py +++ b/rpython/rlib/objectmodel.py @@ -275,8 +275,6 @@ return lltype.Signed malloc_zero_filled = CDefinedIntSymbolic('MALLOC_ZERO_FILLED', default=0) -running_on_llinterp = CDefinedIntSymbolic('RUNNING_ON_LLINTERP', default=1) -# running_on_llinterp is meant to have the value 0 in all backends # ____________________________________________________________ diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -7,8 +7,6 @@ from rpython.rtyper.tool import rffi_platform as platform from rpython.rlib import rthread -from rpython.jit.backend import detect_cpu - class VMProfPlatformUnsupported(Exception): pass diff --git a/rpython/rlib/rvmprof/test/test_rvmprof.py b/rpython/rlib/rvmprof/test/test_rvmprof.py --- a/rpython/rlib/rvmprof/test/test_rvmprof.py +++ b/rpython/rlib/rvmprof/test/test_rvmprof.py @@ -101,7 +101,7 @@ s = 0 for i in range(num): s += (i << 1) - if s % 32423423423 == 0: + if s % 2123423423 == 0: print s return s diff --git a/rpython/rtyper/extfunc.py b/rpython/rtyper/extfunc.py --- a/rpython/rtyper/extfunc.py +++ b/rpython/rtyper/extfunc.py @@ -1,99 +1,105 @@ -from rpython.rtyper.extregistry import ExtRegistryEntry -from rpython.rtyper.lltypesystem.lltype import typeOf, FuncType, functionptr -from rpython.annotator.model import unionof +from rpython.annotator.model import unionof, SomeObject from rpython.annotator.signature import annotation, SignatureError +from rpython.rtyper.extregistry import ExtRegistryEntry, lookup +from rpython.rtyper.lltypesystem.lltype import ( + typeOf, FuncType, functionptr, _ptr, Void) +from rpython.rtyper.error import TyperError +from rpython.rtyper.rmodel import Repr -import py +class SomeExternalFunction(SomeObject): + def __init__(self, name, args_s, s_result): + self.name = name + self.args_s = args_s + self.s_result = s_result + + def check_args(self, callspec): + params_s = self.args_s + args_s, kwargs = callspec.unpack() + if kwargs: + raise SignatureError( + "External functions cannot be called with keyword arguments") + if len(args_s) != len(params_s): + raise SignatureError("Argument number mismatch") + for i, s_param in enumerate(params_s): + arg = unionof(args_s[i], s_param) + if not s_param.contains(arg): + raise SignatureError( + "In call to external function %r:\n" + "arg %d must be %s,\n" + " got %s" % ( + self.name, i + 1, s_param, args_s[i])) + + def call(self, callspec): + self.check_args(callspec) + return self.s_result + + def rtyper_makerepr(self, rtyper): + if not self.is_constant(): + raise TyperError("Non-constant external function!") + entry = lookup(self.const) + impl = getattr(entry, 'lltypeimpl', None) + fakeimpl = getattr(entry, 'lltypefakeimpl', None) + return ExternalFunctionRepr(self, impl, fakeimpl) + + def rtyper_makekey(self): + return self.__class__, self + +class ExternalFunctionRepr(Repr): + lowleveltype = Void + + def __init__(self, s_func, impl, fakeimpl): + self.s_func = s_func + self.impl = impl + self.fakeimpl = fakeimpl + + def rtype_simple_call(self, hop): + rtyper = hop.rtyper + args_r = [rtyper.getrepr(s_arg) for s_arg in self.s_func.args_s] + r_result = rtyper.getrepr(self.s_func.s_result) + obj = self.get_funcptr(rtyper, args_r, r_result) + hop2 = hop.copy() + hop2.r_s_popfirstarg() + vlist = [hop2.inputconst(typeOf(obj), obj)] + hop2.inputargs(*args_r) + hop2.exception_is_here() + return hop2.genop('direct_call', vlist, r_result) + + def get_funcptr(self, rtyper, args_r, r_result): + from rpython.rtyper.rtyper import llinterp_backend + args_ll = [r_arg.lowleveltype for r_arg in args_r] + ll_result = r_result.lowleveltype + name = self.s_func.name + if self.fakeimpl and rtyper.backend is llinterp_backend: + FT = FuncType(args_ll, ll_result) + return functionptr( + FT, name, _external_name=name, _callable=self.fakeimpl) + elif self.impl: + if isinstance(self.impl, _ptr): + return self.impl + else: + # store some attributes to the 'impl' function, where + # the eventual call to rtyper.getcallable() will find them + # and transfer them to the final lltype.functionptr(). + self.impl._llfnobjattrs_ = {'_name': name} + return rtyper.getannmixlevel().delayedfunction( + self.impl, self.s_func.args_s, self.s_func.s_result) + else: + fakeimpl = self.fakeimpl or self.s_func.const + FT = FuncType(args_ll, ll_result) + return functionptr( + FT, name, _external_name=name, _callable=fakeimpl) + class ExtFuncEntry(ExtRegistryEntry): safe_not_sandboxed = False - # common case: args is a list of annotation or types - def normalize_args(self, *args_s): - args = self.signature_args - signature_args = [annotation(arg, None) for arg in args] - assert len(args_s) == len(signature_args),\ - "Argument number mismatch" + def compute_annotation(self): + s_result = SomeExternalFunction( + self.name, self.signature_args, self.signature_result) + if (self.bookkeeper.annotator.translator.config.translation.sandbox + and not self.safe_not_sandboxed): + s_result.needs_sandboxing = True + return s_result - for i, expected in enumerate(signature_args): - arg = unionof(args_s[i], expected) - if not expected.contains(arg): - name = getattr(self, 'name', None) - if not name: - try: - name = self.instance.__name__ - except AttributeError: - name = '?' - raise SignatureError("In call to external function %r:\n" - "arg %d must be %s,\n" - " got %s" % ( - name, i+1, expected, args_s[i])) - return signature_args - - def compute_result_annotation(self, *args_s): - self.normalize_args(*args_s) # check arguments - return self.signature_result - - def specialize_call(self, hop): - rtyper = hop.rtyper - signature_args = self.normalize_args(*hop.args_s) - args_r = [rtyper.getrepr(s_arg) for s_arg in signature_args] - args_ll = [r_arg.lowleveltype for r_arg in args_r] - s_result = hop.s_result - r_result = rtyper.getrepr(s_result) - ll_result = r_result.lowleveltype - name = getattr(self, 'name', None) or self.instance.__name__ - impl = getattr(self, 'lltypeimpl', None) - fakeimpl = getattr(self, 'lltypefakeimpl', self.instance) - if impl: - if (rtyper.annotator.translator.config.translation.sandbox - and not self.safe_not_sandboxed): - from rpython.translator.sandbox.rsandbox import ( - make_sandbox_trampoline) - impl = make_sandbox_trampoline( - self.name, signature_args, s_result) - if hasattr(self, 'lltypefakeimpl'): - # If we have both an llimpl and an llfakeimpl, - # we need a wrapper that selects the proper one and calls it - from rpython.tool.sourcetools import func_with_new_name - # Using '*args' is delicate because this wrapper is also - # created for init-time functions like llarena.arena_malloc - # which are called before the GC is fully initialized - args = ', '.join(['arg%d' % i for i in range(len(args_ll))]) - d = {'original_impl': impl, - 's_result': s_result, - 'fakeimpl': fakeimpl, - '__name__': __name__, - } - exec py.code.compile(""" - from rpython.rlib.objectmodel import running_on_llinterp - from rpython.rlib.debug import llinterpcall - from rpython.rlib.jit import dont_look_inside - # note: we say 'dont_look_inside' mostly because the - # JIT does not support 'running_on_llinterp', but in - # theory it is probably right to stop jitting anyway. - @dont_look_inside - def ll_wrapper(%s): - if running_on_llinterp: - return llinterpcall(s_result, fakeimpl, %s) - else: - return original_impl(%s) - """ % (args, args, args)) in d - impl = func_with_new_name(d['ll_wrapper'], name + '_wrapper') - # store some attributes to the 'impl' function, where - # the eventual call to rtyper.getcallable() will find them - # and transfer them to the final lltype.functionptr(). - impl._llfnobjattrs_ = {'_name': self.name} - obj = rtyper.getannmixlevel().delayedfunction( - impl, signature_args, hop.s_result) - else: - FT = FuncType(args_ll, ll_result) - obj = functionptr(FT, name, _external_name=self.name, - _callable=fakeimpl, - _safe_not_sandboxed=self.safe_not_sandboxed) - vlist = [hop.inputconst(typeOf(obj), obj)] + hop.inputargs(*args_r) - hop.exception_is_here() - return hop.genop('direct_call', vlist, r_result) def register_external(function, args, result=None, export_name=None, llimpl=None, llfakeimpl=None, sandboxsafe=False): @@ -109,32 +115,20 @@ if export_name is None: export_name = function.__name__ + params_s = [annotation(arg) for arg in args] + s_result = annotation(result) class FunEntry(ExtFuncEntry): _about_ = function safe_not_sandboxed = sandboxsafe - - if args is None: - def normalize_args(self, *args_s): - return args_s # accept any argument unmodified - elif callable(args): - # custom annotation normalizer (see e.g. os.utime()) - normalize_args = staticmethod(args) - else: # use common case behavior - signature_args = args - - signature_result = annotation(result, None) + signature_args = params_s + signature_result = s_result name = export_name if llimpl: lltypeimpl = staticmethod(llimpl) if llfakeimpl: lltypefakeimpl = staticmethod(llfakeimpl) - if export_name: - FunEntry.__name__ = export_name - else: - FunEntry.__name__ = function.func_name - def is_external(func): if hasattr(func, 'value'): func = func.value diff --git a/rpython/rtyper/rtyper.py b/rpython/rtyper/rtyper.py --- a/rpython/rtyper/rtyper.py +++ b/rpython/rtyper/rtyper.py @@ -32,11 +32,24 @@ from rpython.translator.sandbox.rsandbox import make_sandbox_trampoline +class RTyperBackend(object): + pass + +class GenCBackend(RTyperBackend): + pass +genc_backend = GenCBackend() + +class LLInterpBackend(RTyperBackend): + pass +llinterp_backend = LLInterpBackend() + + class RPythonTyper(object): from rpython.rtyper.rmodel import log - def __init__(self, annotator): + def __init__(self, annotator, backend=genc_backend): self.annotator = annotator + self.backend = backend self.lowlevel_ann_policy = LowLevelAnnotatorPolicy(self) self.reprs = {} self._reprs_must_call_setup = [] diff --git a/rpython/rtyper/test/test_extfunc.py b/rpython/rtyper/test/test_extfunc.py --- a/rpython/rtyper/test/test_extfunc.py +++ b/rpython/rtyper/test/test_extfunc.py @@ -1,7 +1,6 @@ import py -from rpython.rtyper.extfunc import ExtFuncEntry, register_external,\ - is_external +from rpython.rtyper.extfunc import register_external from rpython.annotator.model import SomeInteger, SomeString, AnnotatorError from rpython.annotator.annrpython import RPythonAnnotator from rpython.annotator.policy import AnnotatorPolicy @@ -19,11 +18,7 @@ "NOT_RPYTHON" return eval("x+40") - class BTestFuncEntry(ExtFuncEntry): - _about_ = b - name = 'b' - signature_args = [SomeInteger()] - signature_result = SomeInteger() + register_external(b, [int], result=int) def f(): return b(2) @@ -43,15 +38,11 @@ def c(y, x): yyy - class CTestFuncEntry(ExtFuncEntry): - _about_ = c - name = 'ccc' - signature_args = [SomeInteger()] * 2 - signature_result = SomeInteger() + def llimpl(y, x): + return y + x - def lltypeimpl(y, x): - return y + x - lltypeimpl = staticmethod(lltypeimpl) + register_external(c, [int, int], result=int, llimpl=llimpl, + export_name='ccc') def f(): return c(3, 4) @@ -59,22 +50,6 @@ res = interpret(f, []) assert res == 7 - def test_register_external_signature(self): - """ - Test the standard interface for external functions. - """ - def dd(): - pass - register_external(dd, [int], int) - - def f(): - return dd(3) - - policy = AnnotatorPolicy() - a = RPythonAnnotator(policy=policy) - s = a.build_types(f, []) - assert isinstance(s, SomeInteger) - def test_register_external_tuple_args(self): """ Verify the annotation of a registered external function which takes a @@ -121,23 +96,6 @@ s = a.build_types(f, []) assert isinstance(s, SomeInteger) - def test_register_external_specialcase(self): - """ - When args=None, the external function accepts any arguments unmodified. - """ - def function_withspecialcase(arg): - return repr(arg) - register_external(function_withspecialcase, args=None, result=str) - - def f(): - x = function_withspecialcase - return x(33) + x("aaa") + x([]) + "\n" - - policy = AnnotatorPolicy() - a = RPythonAnnotator(policy=policy) - s = a.build_types(f, []) - assert isinstance(s, SomeString) - def test_str0(self): str0 = SomeString(no_nul=True) def os_open(s): @@ -182,3 +140,22 @@ # fails with TooLateForChange a.build_types(g, [[str]]) a.build_types(g, [[str0]]) # Does not raise + + def test_register_external_llfakeimpl(self): + def a(i): + return i + def a_llimpl(i): + return i * 2 + def a_llfakeimpl(i): + return i * 3 + register_external(a, [int], int, llimpl=a_llimpl, + llfakeimpl=a_llfakeimpl) + def f(i): + return a(i) + + res = interpret(f, [7]) + assert res == 21 + + from rpython.translator.c.test.test_genc import compile + fc = compile(f, [int]) + assert fc(7) == 14 diff --git a/rpython/rtyper/test/test_llinterp.py b/rpython/rtyper/test/test_llinterp.py --- a/rpython/rtyper/test/test_llinterp.py +++ b/rpython/rtyper/test/test_llinterp.py @@ -13,7 +13,7 @@ from rpython.rlib.rarithmetic import r_uint, ovfcheck from rpython.tool import leakfinder from rpython.conftest import option - +from rpython.rtyper.rtyper import llinterp_backend # switch on logging of interp to show more info on failing tests @@ -39,6 +39,7 @@ t.view() global typer # we need it for find_exception typer = t.buildrtyper() + typer.backend = llinterp_backend typer.specialize() #t.view() t.checkgraphs() diff --git a/rpython/rtyper/test/test_rbuiltin.py b/rpython/rtyper/test/test_rbuiltin.py --- a/rpython/rtyper/test/test_rbuiltin.py +++ b/rpython/rtyper/test/test_rbuiltin.py @@ -3,8 +3,7 @@ import py -from rpython.rlib.debug import llinterpcall -from rpython.rlib.objectmodel import instantiate, running_on_llinterp, compute_unique_id, current_object_addr_as_int +from rpython.rlib.objectmodel import instantiate, compute_unique_id, current_object_addr_as_int from rpython.rlib.rarithmetic import (intmask, longlongmask, r_int64, is_valid_int, r_int, r_uint, r_longlong, r_ulonglong) from rpython.rlib.rstring import StringBuilder, UnicodeBuilder @@ -456,26 +455,6 @@ res = self.interpret(fn, [3.25]) assert res == 7.25 - def test_debug_llinterpcall(self): - S = lltype.Struct('S', ('m', lltype.Signed)) - SPTR = lltype.Ptr(S) - def foo(n): - "NOT_RPYTHON" - s = lltype.malloc(S, immortal=True) - s.m = eval("n*6", locals()) - return s - def fn(n): - if running_on_llinterp: - return llinterpcall(SPTR, foo, n).m - else: - return 321 - res = self.interpret(fn, [7]) - assert res == 42 - from rpython.translator.c.test.test_genc import compile - f = compile(fn, [int]) - res = f(7) - assert res == 321 - def test_id(self): class A: pass diff --git a/rpython/rtyper/test/test_rpbc.py b/rpython/rtyper/test/test_rpbc.py --- a/rpython/rtyper/test/test_rpbc.py +++ b/rpython/rtyper/test/test_rpbc.py @@ -1,7 +1,7 @@ import py from rpython.annotator import model as annmodel -from rpython.annotator import policy, specialize +from rpython.annotator import specialize from rpython.rtyper.lltypesystem.lltype import typeOf from rpython.rtyper.test.tool import BaseRtypingTest from rpython.rtyper.llannotation import SomePtr, lltype_to_annotation @@ -1690,59 +1690,6 @@ # ____________________________________________________________ -class TestRPBCExtra(BaseRtypingTest): - - def test_folding_specialize_support(self): - - class S(object): - - def w(s, x): - if isinstance(x, int): - return x - if isinstance(x, str): - return len(x) - return -1 - w._annspecialcase_ = "specialize:w" - - def _freeze_(self): - return True - - s = S() - - def f(i, n): - w = s.w - if i == 0: - return w(0) - elif i == 1: - return w("abc") - elif i == 2: - return w(3*n) - elif i == 3: - return w(str(n)) - return -1 - - class P(policy.AnnotatorPolicy): - def specialize__w(pol, funcdesc, args_s): - typ = args_s[1].knowntype - if args_s[0].is_constant() and args_s[1].is_constant(): - x = args_s[1].const - v = s.w(x) - builder = specialize.make_constgraphbuilder(2, v) - return funcdesc.cachedgraph(x, builder=builder) - return funcdesc.cachedgraph(typ) - - p = P() - - res = self.interpret(f, [0, 66], policy=p) - assert res == 0 - res = self.interpret(f, [1, 66], policy=p) - assert res == 3 - res = self.interpret(f, [2, 4], policy=p) - assert res == 12 - res = self.interpret(f, [3, 5555], policy=p) - assert res == 4 - - def test_hlinvoke_simple(): def f(a,b): return a + b diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -913,6 +913,7 @@ return [] def new_funcnode(db, T, obj, forcename=None): + from rpython.rtyper.rtyper import llinterp_backend if db.sandbox: if (getattr(obj, 'external', None) is not None and not obj._safe_not_sandboxed): @@ -934,6 +935,9 @@ return ExternalFuncNode(db, T, obj, name) elif hasattr(obj._callable, "c_name"): return ExternalFuncNode(db, T, obj, name) # this case should only be used for entrypoints + elif db.translator.rtyper.backend is llinterp_backend: + # on llinterp, anything goes + return ExternalFuncNode(db, T, obj, name) else: raise ValueError("don't know how to generate code for %r" % (obj,)) diff --git a/rpython/translator/driver.py b/rpython/translator/driver.py --- a/rpython/translator/driver.py +++ b/rpython/translator/driver.py @@ -487,7 +487,15 @@ exe = py.path.local(exename) exename = exe.new(purebasename=exe.purebasename + 'w') shutil_copy(str(exename), str(newexename)) - ext_to_copy = ['lib', 'pdb'] + # for pypy, the import library is renamed and moved to + # libs/python32.lib, according to the pragma in pyconfig.h + libname = self.config.translation.libname + libname = libname or soname.new(ext='lib').basename + libname = str(newsoname.dirpath().join(libname)) + shutil.copyfile(str(soname.new(ext='lib')), libname) + self.log.info("copied: %s" % (libname,)) + # the pdb file goes in the same place as pypy(w).exe + ext_to_copy = ['pdb',] for ext in ext_to_copy: name = soname.new(ext=ext) newname = newexename.new(basename=soname.basename) diff --git a/rpython/translator/sandbox/test/test_sandbox.py b/rpython/translator/sandbox/test/test_sandbox.py --- a/rpython/translator/sandbox/test/test_sandbox.py +++ b/rpython/translator/sandbox/test/test_sandbox.py @@ -292,6 +292,21 @@ rescode = pipe.wait() assert rescode == 0 +def test_environ_items(): + def entry_point(argv): + print os.environ.items() + return 0 + + exe = compile(entry_point) + g, f = run_in_subprocess(exe) + expect(f, g, "ll_os.ll_os_envitems", (), []) + expect(f, g, "ll_os.ll_os_write", (1, "[]\n"), 3) + g.close() + tail = f.read() + f.close() + assert tail == "" + + class TestPrintedResults: def run(self, entry_point, args, expected): From pypy.commits at gmail.com Fri Feb 19 17:29:13 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 19 Feb 2016 14:29:13 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Check for SystemError instead of ValueError. Message-ID: <56c79739.034cc20a.d605e.ffff95f8@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82334:8ed10a343cd5 Date: 2016-02-19 23:11 +0100 http://bitbucket.org/pypy/pypy/changeset/8ed10a343cd5/ Log: Check for SystemError instead of ValueError. SystemError is raised here on CPython as well, although this is "wrong" and was fixed to be ImportError in CPython's 3.6 branch. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -458,14 +458,14 @@ print('__name__ =', __name__) from .struct import inpackage """, ns) - raises(ValueError, ns['imp']) + raises(SystemError, ns['imp']) def test_future_relative_import_error_when_in_non_package2(self): ns = {'__name__': __name__} exec("""def imp(): from .. import inpackage """, ns) - raises(ValueError, ns['imp']) + raises(SystemError, ns['imp']) def test_relative_import_with___name__(self): import sys From pypy.commits at gmail.com Fri Feb 19 17:29:11 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 19 Feb 2016 14:29:11 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Backport patch from CPython Issue 26367. Message-ID: <56c79737.02931c0a.1a676.4631@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82333:01b8a2d215a6 Date: 2016-02-19 22:40 +0100 http://bitbucket.org/pypy/pypy/changeset/01b8a2d215a6/ Log: Backport patch from CPython Issue 26367. diff --git a/lib-python/3/importlib/_bootstrap.py b/lib-python/3/importlib/_bootstrap.py --- a/lib-python/3/importlib/_bootstrap.py +++ b/lib-python/3/importlib/_bootstrap.py @@ -1496,7 +1496,7 @@ raise TypeError("module name must be str, not {}".format(type(name))) if level < 0: raise ValueError('level must be >= 0') - if package: + if level > 0: if not isinstance(package, str): raise TypeError("__package__ not set to a string") elif package not in sys.modules: diff --git a/lib-python/3/test/test_importlib/import_/test_relative_imports.py b/lib-python/3/test/test_importlib/import_/test_relative_imports.py --- a/lib-python/3/test/test_importlib/import_/test_relative_imports.py +++ b/lib-python/3/test/test_importlib/import_/test_relative_imports.py @@ -208,6 +208,11 @@ with self.assertRaises(KeyError): import_util.import_('sys', level=1) + def test_relative_import_no_package_exists_absolute(self): + with self.assertRaises(SystemError): + self.__import__('sys', {'__package__': '', '__spec__': None}, + level=1) + def test_main(): from test.support import run_unittest From pypy.commits at gmail.com Fri Feb 19 17:38:50 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 19 Feb 2016 14:38:50 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix test by pleasing sanity check. Message-ID: <56c7997a.d22e1c0a.640b7.49cf@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82335:99af7e9c0c5f Date: 2016-02-19 23:36 +0100 http://bitbucket.org/pypy/pypy/changeset/99af7e9c0c5f/ Log: Fix test by pleasing sanity check. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -487,6 +487,7 @@ import imp pkg = imp.new_module('newpkg') sys.modules['newpkg'] = pkg + sys.modules['newpkg.foo'] = imp.new_module('newpkg.foo') mydict = {'__name__': 'newpkg.foo', '__path__': '/some/path'} res = __import__('', mydict, None, ['bar'], 2) assert res is pkg From pypy.commits at gmail.com Sat Feb 20 02:17:08 2016 From: pypy.commits at gmail.com (mattip) Date: Fri, 19 Feb 2016 23:17:08 -0800 (PST) Subject: [pypy-commit] pypy default: test, fix indexing ndarray with scalar, single boolean array Message-ID: <56c812f4.29cec20a.603b7.fffffa98@mx.google.com> Author: mattip Branch: Changeset: r82336:d77888929462 Date: 2016-02-19 16:29 +0200 http://bitbucket.org/pypy/pypy/changeset/d77888929462/ Log: test, fix indexing ndarray with scalar, single boolean array diff --git a/pypy/module/micronumpy/concrete.py b/pypy/module/micronumpy/concrete.py --- a/pypy/module/micronumpy/concrete.py +++ b/pypy/module/micronumpy/concrete.py @@ -12,8 +12,8 @@ ArrayArgumentException, W_NumpyObject from pypy.module.micronumpy.iterators import ArrayIter from pypy.module.micronumpy.strides import ( - IntegerChunk, SliceChunk, NewAxisChunk, EllipsisChunk, new_view, - calc_strides, calc_new_strides, shape_agreement, + IntegerChunk, SliceChunk, NewAxisChunk, EllipsisChunk, BooleanChunk, + new_view, calc_strides, calc_new_strides, shape_agreement, calculate_broadcast_strides, calc_backstrides, calc_start, is_c_contiguous, is_f_contiguous) from rpython.rlib.objectmodel import keepalive_until_here @@ -236,6 +236,8 @@ @jit.unroll_safe def _prepare_slice_args(self, space, w_idx): + print '_prepare_slice_args', w_idx + from pypy.module.micronumpy import boxes if space.isinstance_w(w_idx, space.w_str): raise oefmt(space.w_IndexError, "only integers, slices (`:`), " "ellipsis (`...`), numpy.newaxis (`None`) and integer or " @@ -258,6 +260,7 @@ result = [] i = 0 has_ellipsis = False + has_filter = False for w_item in space.fixedview(w_idx): if space.is_w(w_item, space.w_Ellipsis): if has_ellipsis: @@ -272,6 +275,16 @@ elif space.isinstance_w(w_item, space.w_slice): result.append(SliceChunk(w_item)) i += 1 + elif isinstance(w_item, W_NDimArray) and w_item.get_dtype().is_bool(): + if has_filter: + # in CNumPy, the support for this is incomplete + raise oefmt(space.w_ValueError, + "an index can only have a single boolean mask; " + "use np.take or create a sinlge mask array") + has_filter = True + result.append(BooleanChunk(w_item)) + elif isinstance(w_item, boxes.W_GenericBox): + result.append(IntegerChunk(w_item.descr_int(space))) else: result.append(IntegerChunk(w_item)) i += 1 @@ -280,11 +293,14 @@ return result def descr_getitem(self, space, orig_arr, w_index): + print 'concrete descr_gettiem %s' % str(w_index)[:35] try: item = self._single_item_index(space, w_index) + print 'concrete descr_gettiem _single_item_index succeeded' return self.getitem(item) except IndexError: # not a single result + print 'concrete descr_gettiem _single_item_index failed' chunks = self._prepare_slice_args(space, w_index) return new_view(space, orig_arr, chunks) diff --git a/pypy/module/micronumpy/ndarray.py b/pypy/module/micronumpy/ndarray.py --- a/pypy/module/micronumpy/ndarray.py +++ b/pypy/module/micronumpy/ndarray.py @@ -107,8 +107,9 @@ arr = W_NDimArray(self.implementation.transpose(self, None)) return space.wrap(loop.tostring(space, arr)) - def getitem_filter(self, space, arr): - if arr.ndims() > 1 and arr.get_shape() != self.get_shape(): + def getitem_filter(self, space, arr, axis=0): + shape = self.get_shape() + if arr.ndims() > 1 and arr.get_shape() != shape: raise OperationError(space.w_IndexError, space.wrap( "boolean index array should have 1 dimension")) if arr.get_size() > self.get_size(): @@ -116,14 +117,14 @@ "index out of range for array")) size = loop.count_all_true(arr) if arr.ndims() == 1: - if self.ndims() > 1 and arr.get_shape()[0] != self.get_shape()[0]: + if self.ndims() > 1 and arr.get_shape()[0] != shape[axis]: msg = ("boolean index did not match indexed array along" - " dimension 0; dimension is %d but corresponding" - " boolean dimension is %d" % (self.get_shape()[0], + " dimension %d; dimension is %d but corresponding" + " boolean dimension is %d" % (axis, shape[axis], arr.get_shape()[0])) #warning = space.gettypefor(support.W_VisibleDeprecationWarning) space.warn(space.wrap(msg), space.w_VisibleDeprecationWarning) - res_shape = [size] + self.get_shape()[1:] + res_shape = shape[:axis] + [size] + shape[axis+1:] else: res_shape = [size] w_res = W_NDimArray.from_shape(space, res_shape, self.get_dtype(), @@ -149,6 +150,8 @@ def _prepare_array_index(self, space, w_index): if isinstance(w_index, W_NDimArray): return [], w_index.get_shape(), w_index.get_shape(), [w_index] + if isinstance(w_index, boxes.W_GenericBox): + return [], [1], [1], [w_index] w_lst = space.listview(w_index) for w_item in w_lst: if not (space.isinstance_w(w_item, space.w_int) or space.isinstance_w(w_item, space.w_float)): @@ -162,7 +165,14 @@ arr_index_in_shape = False prefix = [] for i, w_item in enumerate(w_lst): - if (isinstance(w_item, W_NDimArray) or + if isinstance(w_item, W_NDimArray) and w_item.get_dtype().is_bool(): + if w_item.ndims() > 0: + indexes_w[i] = w_item + else: + raise oefmt(space.w_IndexError, + "in the future, 0-d boolean arrays will be " + "interpreted as a valid boolean index") + elif (isinstance(w_item, W_NDimArray) or space.isinstance_w(w_item, space.w_list)): w_item = convert_to_array(space, w_item) if shape is None: @@ -232,6 +242,8 @@ raise oefmt(space.w_IndexError, "in the future, 0-d boolean arrays will be " "interpreted as a valid boolean index") + elif isinstance(w_idx, boxes.W_GenericBox): + w_ret = self.getitem_array_int(space, w_idx) else: try: w_ret = self.implementation.descr_getitem(space, self, w_idx) diff --git a/pypy/module/micronumpy/strides.py b/pypy/module/micronumpy/strides.py --- a/pypy/module/micronumpy/strides.py +++ b/pypy/module/micronumpy/strides.py @@ -77,14 +77,40 @@ backstride = base_stride * max(0, base_length - 1) return 0, base_length, base_stride, backstride +class BooleanChunk(BaseChunk): + input_dim = 1 + out_dim = 1 + def __init__(self, w_idx): + self.w_idx = w_idx + + def compute(self, space, base_length, base_stride): + raise oefmt(space.w_NotImplementedError, 'cannot reach') def new_view(space, w_arr, chunks): arr = w_arr.implementation - r = calculate_slice_strides(space, arr.shape, arr.start, arr.get_strides(), - arr.get_backstrides(), chunks) + dim = -1 + for i, c in enumerate(chunks): + if isinstance(c, BooleanChunk): + dim = i + break + if dim >= 0: + # filter by axis r + filtr = chunks.pop(dim) + assert isinstance(filtr, BooleanChunk) + w_arr = w_arr.getitem_filter(space, filtr.w_idx, axis=dim) + arr = w_arr.implementation + r = calculate_slice_strides(space, arr.shape, arr.start, + arr.get_strides(), arr.get_backstrides(), chunks) + else: + r = calculate_slice_strides(space, arr.shape, arr.start, + arr.get_strides(), arr.get_backstrides(), chunks) shape, start, strides, backstrides = r - return W_NDimArray.new_slice(space, start, strides[:], backstrides[:], + w_ret = W_NDimArray.new_slice(space, start, strides[:], backstrides[:], shape[:], arr, w_arr) + if dim == 0: + # Do not return a view + return w_ret.descr_copy(space, space.wrap(w_ret.get_order())) + return w_ret @jit.unroll_safe def _extend_shape(old_shape, chunks): @@ -127,7 +153,7 @@ jit.isconstant(len(chunks))) def calculate_slice_strides(space, shape, start, strides, backstrides, chunks): """ - Note: `chunks` must contain exactly one EllipsisChunk object. + Note: `chunks` can contain at most one EllipsisChunk object. """ size = 0 used_dims = 0 diff --git a/pypy/module/micronumpy/test/test_deprecations.py b/pypy/module/micronumpy/test/test_deprecations.py --- a/pypy/module/micronumpy/test/test_deprecations.py +++ b/pypy/module/micronumpy/test/test_deprecations.py @@ -24,7 +24,7 @@ # boolean indexing matches the dims in index # to the first index.ndims in arr, not implemented in pypy yet raises(IndexError, arr.__getitem__, index) - raises(TypeError, arr.__getitem__, (slice(None), index)) + raises(IndexError, arr.__getitem__, (slice(None), index)) else: raises(np.VisibleDeprecationWarning, arr.__getitem__, index) raises(np.VisibleDeprecationWarning, arr.__getitem__, (slice(None), index)) diff --git a/pypy/module/micronumpy/test/test_ndarray.py b/pypy/module/micronumpy/test/test_ndarray.py --- a/pypy/module/micronumpy/test/test_ndarray.py +++ b/pypy/module/micronumpy/test/test_ndarray.py @@ -2541,6 +2541,23 @@ a[b] = np.array([[4.]]) assert (a == [[4., 4., 4.]]).all() + def test_indexing_by_boolean(self): + import numpy as np + a = np.arange(6).reshape(2,3) + assert (a[[True, False], :] == [[3, 4, 5], [0, 1, 2]]).all() + b = a[np.array([True, False]), :] + assert (b == [[0, 1, 2]]).all() + assert b.base is None + b = a[:, np.array([True, False, True])] + assert b.base is not None + + def test_scalar_indexing(self): + import numpy as np + a = np.arange(6).reshape(2,3) + i = np.dtype('int32').type(0) + assert (a[0] == a[i]).all() + + def test_ellipsis_indexing(self): import numpy as np import sys From pypy.commits at gmail.com Sat Feb 20 02:17:10 2016 From: pypy.commits at gmail.com (mattip) Date: Fri, 19 Feb 2016 23:17:10 -0800 (PST) Subject: [pypy-commit] pypy default: remove debug cruft Message-ID: <56c812f6.45631c0a.26560.ffffa886@mx.google.com> Author: mattip Branch: Changeset: r82337:39fb9fc4c967 Date: 2016-02-19 16:31 +0200 http://bitbucket.org/pypy/pypy/changeset/39fb9fc4c967/ Log: remove debug cruft diff --git a/pypy/module/micronumpy/concrete.py b/pypy/module/micronumpy/concrete.py --- a/pypy/module/micronumpy/concrete.py +++ b/pypy/module/micronumpy/concrete.py @@ -236,7 +236,6 @@ @jit.unroll_safe def _prepare_slice_args(self, space, w_idx): - print '_prepare_slice_args', w_idx from pypy.module.micronumpy import boxes if space.isinstance_w(w_idx, space.w_str): raise oefmt(space.w_IndexError, "only integers, slices (`:`), " @@ -293,14 +292,11 @@ return result def descr_getitem(self, space, orig_arr, w_index): - print 'concrete descr_gettiem %s' % str(w_index)[:35] try: item = self._single_item_index(space, w_index) - print 'concrete descr_gettiem _single_item_index succeeded' return self.getitem(item) except IndexError: # not a single result - print 'concrete descr_gettiem _single_item_index failed' chunks = self._prepare_slice_args(space, w_index) return new_view(space, orig_arr, chunks) From pypy.commits at gmail.com Sat Feb 20 02:59:15 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 19 Feb 2016 23:59:15 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: bah, forgot this debugging value Message-ID: <56c81cd3.0c2d1c0a.957fe.ffffb22b@mx.google.com> Author: Armin Rigo Branch: cpyext-gc-support-2 Changeset: r82338:8c6a66dcb994 Date: 2016-02-20 08:58 +0100 http://bitbucket.org/pypy/pypy/changeset/8c6a66dcb994/ Log: bah, forgot this debugging value diff --git a/rpython/rlib/rawrefcount.py b/rpython/rlib/rawrefcount.py --- a/rpython/rlib/rawrefcount.py +++ b/rpython/rlib/rawrefcount.py @@ -11,8 +11,8 @@ from rpython.rlib import rgc -REFCNT_FROM_PYPY = 80 -REFCNT_FROM_PYPY_LIGHT = REFCNT_FROM_PYPY + (sys.maxint//2+1) +REFCNT_FROM_PYPY = sys.maxint // 4 + 1 +REFCNT_FROM_PYPY_LIGHT = REFCNT_FROM_PYPY + (sys.maxint // 2 + 1) RAWREFCOUNT_DEALLOC_TRIGGER = lltype.Ptr(lltype.FuncType([], lltype.Void)) From pypy.commits at gmail.com Sat Feb 20 05:20:05 2016 From: pypy.commits at gmail.com (rlamy) Date: Sat, 20 Feb 2016 02:20:05 -0800 (PST) Subject: [pypy-commit] pypy desc-specialize: Initialize funcdesc.specializer in FunctionDesc.__init__ and ensure the right annotator policy is active at the time Message-ID: <56c83dd5.4c181c0a.d330b.ffffe841@mx.google.com> Author: Ronan Lamy Branch: desc-specialize Changeset: r82339:cf1b6e045f41 Date: 2016-02-20 10:19 +0000 http://bitbucket.org/pypy/pypy/changeset/cf1b6e045f41/ Log: Initialize funcdesc.specializer in FunctionDesc.__init__ and ensure the right annotator policy is active at the time diff --git a/rpython/annotator/annrpython.py b/rpython/annotator/annrpython.py --- a/rpython/annotator/annrpython.py +++ b/rpython/annotator/annrpython.py @@ -82,17 +82,17 @@ annmodel.TLS.check_str_without_nul = ( self.translator.config.translation.check_str_without_nul) - flowgraph, inputs_s = self.get_call_parameters(function, args_s, policy) + with self.using_policy(policy): + flowgraph, inputs_s = self.get_call_parameters(function, args_s) if main_entry_point: self.translator.entry_point_graph = flowgraph return self.build_graph_types(flowgraph, inputs_s, complete_now=complete_now) - def get_call_parameters(self, function, args_s, policy): - desc = self.bookkeeper.getdesc(function) - with self.using_policy(policy): - with self.bookkeeper.at_position(None): - return desc.get_call_parameters(args_s) + def get_call_parameters(self, function, args_s): + with self.bookkeeper.at_position(None): + desc = self.bookkeeper.getdesc(function) + return desc.get_call_parameters(args_s) def annotate_helper(self, function, args_s, policy=None): if policy is None: @@ -101,21 +101,21 @@ # XXX hack annmodel.TLS.check_str_without_nul = ( self.translator.config.translation.check_str_without_nul) - graph, inputcells = self.get_call_parameters(function, args_s, policy) - self.build_graph_types(graph, inputcells, complete_now=False) - self.complete_helpers(policy) + with self.using_policy(policy): + graph, inputcells = self.get_call_parameters(function, args_s) + self.build_graph_types(graph, inputcells, complete_now=False) + self.complete_helpers() return graph - def complete_helpers(self, policy): + def complete_helpers(self): saved = self.added_blocks self.added_blocks = {} - with self.using_policy(policy): - try: - self.complete() - # invoke annotation simplifications for the new blocks - self.simplify(block_subset=self.added_blocks) - finally: - self.added_blocks = saved + try: + self.complete() + # invoke annotation simplifications for the new blocks + self.simplify(block_subset=self.added_blocks) + finally: + self.added_blocks = saved @contextmanager def using_policy(self, policy): diff --git a/rpython/annotator/description.py b/rpython/annotator/description.py --- a/rpython/annotator/description.py +++ b/rpython/annotator/description.py @@ -214,6 +214,7 @@ # specializer(funcdesc, args_s) => graph # or => s_result (overridden/memo cases) self.specializer = specializer + self.init_specializer() self._cache = {} # convenience for the specializer def buildgraph(self, alt_name=None, builder=None): @@ -296,7 +297,6 @@ getattr(self.bookkeeper, "position_key", None) is not None): _, block, i = self.bookkeeper.position_key op = block.operations[i] - self.init_specializer() enforceargs = getattr(self.pyobj, '_annenforceargs_', None) signature = getattr(self.pyobj, '_signature_', None) if enforceargs and signature: diff --git a/rpython/rtyper/annlowlevel.py b/rpython/rtyper/annlowlevel.py --- a/rpython/rtyper/annlowlevel.py +++ b/rpython/rtyper/annlowlevel.py @@ -138,11 +138,12 @@ # get the graph of the mix-level helper ll_function and prepare it for # being annotated. Annotation and RTyping should be done in a single shot # at the end with finish(). - graph, args_s = self.rtyper.annotator.get_call_parameters( - ll_function, args_s, policy = self.policy) + ann = self.rtyper.annotator + with ann.using_policy(self.policy): + graph, args_s = ann.get_call_parameters(ll_function, args_s) for v_arg, s_arg in zip(graph.getargs(), args_s): - self.rtyper.annotator.setbinding(v_arg, s_arg) - self.rtyper.annotator.setbinding(graph.getreturnvar(), s_result) + ann.setbinding(v_arg, s_arg) + ann.setbinding(graph.getreturnvar(), s_result) #self.rtyper.annotator.annotated[graph.returnblock] = graph self.pending.append((ll_function, graph, args_s, s_result)) return graph @@ -224,16 +225,17 @@ bk = ann.bookkeeper translator = ann.translator original_graph_count = len(translator.graphs) - for ll_function, graph, args_s, s_result in self.pending: - # mark the return block as already annotated, because the return var - # annotation was forced in getgraph() above. This prevents temporary - # less general values reaching the return block from crashing the - # annotator (on the assert-that-new-binding-is-not-less-general). - ann.annotated[graph.returnblock] = graph - s_function = bk.immutablevalue(ll_function) - bk.emulate_pbc_call(graph, s_function, args_s) - self.newgraphs.add(graph) - ann.complete_helpers(self.policy) + with ann.using_policy(self.policy): + for ll_function, graph, args_s, s_result in self.pending: + # mark the return block as already annotated, because the return var + # annotation was forced in getgraph() above. This prevents temporary + # less general values reaching the return block from crashing the + # annotator (on the assert-that-new-binding-is-not-less-general). + ann.annotated[graph.returnblock] = graph + s_function = bk.immutablevalue(ll_function) + bk.emulate_pbc_call(graph, s_function, args_s) + self.newgraphs.add(graph) + ann.complete_helpers() for ll_function, graph, args_s, s_result in self.pending: s_real_result = ann.binding(graph.getreturnvar()) if s_real_result != s_result: diff --git a/rpython/rtyper/rtyper.py b/rpython/rtyper/rtyper.py --- a/rpython/rtyper/rtyper.py +++ b/rpython/rtyper/rtyper.py @@ -852,28 +852,29 @@ rtyper = self.rtyper args_s = [] newargs_v = [] - for v in args_v: - if v.concretetype is Void: - s_value = rtyper.annotation(v) - if s_value is None: - s_value = annmodel.s_None - if not s_value.is_constant(): - raise TyperError("non-constant variable of type Void") - if not isinstance(s_value, (annmodel.SomePBC, annmodel.SomeNone)): - raise TyperError("non-PBC Void argument: %r", (s_value,)) - args_s.append(s_value) - else: - args_s.append(lltype_to_annotation(v.concretetype)) - newargs_v.append(v) + with rtyper.annotator.using_policy(rtyper.lowlevel_ann_policy): + for v in args_v: + if v.concretetype is Void: + s_value = rtyper.annotation(v) + if s_value is None: + s_value = annmodel.s_None + if not s_value.is_constant(): + raise TyperError("non-constant variable of type Void") + if not isinstance(s_value, (annmodel.SomePBC, annmodel.SomeNone)): + raise TyperError("non-PBC Void argument: %r", (s_value,)) + args_s.append(s_value) + else: + args_s.append(lltype_to_annotation(v.concretetype)) + newargs_v.append(v) - self.rtyper.call_all_setups() # compute ForwardReferences now + self.rtyper.call_all_setups() # compute ForwardReferences now - # hack for bound methods - if hasattr(ll_function, 'im_func'): - bk = rtyper.annotator.bookkeeper - args_s.insert(0, bk.immutablevalue(ll_function.im_self)) - newargs_v.insert(0, inputconst(Void, ll_function.im_self)) - ll_function = ll_function.im_func + # hack for bound methods + if hasattr(ll_function, 'im_func'): + bk = rtyper.annotator.bookkeeper + args_s.insert(0, bk.immutablevalue(ll_function.im_self)) + newargs_v.insert(0, inputconst(Void, ll_function.im_self)) + ll_function = ll_function.im_func graph = annotate_lowlevel_helper(rtyper.annotator, ll_function, args_s, rtyper.lowlevel_ann_policy) From pypy.commits at gmail.com Sat Feb 20 06:30:29 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 20 Feb 2016 03:30:29 -0800 (PST) Subject: [pypy-commit] pypy default: Add more entries to .hgignore. Message-ID: <56c84e55.11301c0a.9815a.fffffe61@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82340:12cad570a035 Date: 2016-02-20 12:29 +0100 http://bitbucket.org/pypy/pypy/changeset/12cad570a035/ Log: Add more entries to .hgignore. diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -49,6 +49,7 @@ ^rpython/translator/jvm/\.classpath$ ^rpython/translator/jvm/eclipse-bin$ ^rpython/translator/jvm/src/pypy/.+\.class$ +^rpython/translator/llvm/.+\.so$ ^rpython/translator/benchmark/docutils$ ^rpython/translator/benchmark/templess$ ^rpython/translator/benchmark/gadfly$ @@ -72,6 +73,7 @@ ^rpython/translator/cli/src/pypylib\.dll$ ^rpython/translator/cli/src/query\.exe$ ^rpython/translator/cli/src/main\.exe$ +^lib-python/2.7/lib2to3/.+\.pickle$ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ From pypy.commits at gmail.com Sat Feb 20 07:23:02 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 20 Feb 2016 04:23:02 -0800 (PST) Subject: [pypy-commit] pypy default: Move StringTraits and UnicodeTraits classes from rpython/rtyper/module/support.py to rpython/rlib/rposix.py. Message-ID: <56c85aa6.6bb8c20a.27bc4.54d0@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82342:dd1e021c36b6 Date: 2016-02-20 12:55 +0100 http://bitbucket.org/pypy/pypy/changeset/dd1e021c36b6/ Log: Move StringTraits and UnicodeTraits classes from rpython/rtyper/module/support.py to rpython/rlib/rposix.py. diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py --- a/rpython/rlib/rposix.py +++ b/rpython/rlib/rposix.py @@ -3,7 +3,6 @@ import errno from rpython.rtyper.lltypesystem.rffi import CConstant, CExternVariable, INT from rpython.rtyper.lltypesystem import lltype, ll2ctypes, rffi -from rpython.rtyper.module.support import StringTraits, UnicodeTraits from rpython.rtyper.tool import rffi_platform from rpython.tool.sourcetools import func_renamer from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -12,7 +11,7 @@ specialize, enforceargs, register_replacement_for, NOT_CONSTANT) from rpython.rlib.signature import signature from rpython.rlib import types -from rpython.annotator.model import s_Str0 +from rpython.annotator.model import s_Str0, s_Unicode0 from rpython.rlib import jit from rpython.translator.platform import platform from rpython.rlib import rstring @@ -342,6 +341,87 @@ rstring.check_str0(res) return res + +class StringTraits: + str = str + str0 = s_Str0 + CHAR = rffi.CHAR + CCHARP = rffi.CCHARP + charp2str = staticmethod(rffi.charp2str) + charpsize2str = staticmethod(rffi.charpsize2str) + scoped_str2charp = staticmethod(rffi.scoped_str2charp) + str2charp = staticmethod(rffi.str2charp) + free_charp = staticmethod(rffi.free_charp) + scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_buffer) + + @staticmethod + def posix_function_name(name): + return UNDERSCORE_ON_WIN32 + name + + @staticmethod + def ll_os_name(name): + return 'll_os.ll_os_' + name + + @staticmethod + @specialize.argtype(0) + def as_str(path): + assert path is not None + if isinstance(path, str): + return path + elif isinstance(path, unicode): + # This never happens in PyPy's Python interpreter! + # Only in raw RPython code that uses unicode strings. + # We implement python2 behavior: silently convert to ascii. + return path.encode('ascii') + else: + return path.as_bytes() + + @staticmethod + @specialize.argtype(0) + def as_str0(path): + res = StringTraits.as_str(path) + rstring.check_str0(res) + return res + + +class UnicodeTraits: + str = unicode + str0 = s_Unicode0 + CHAR = rffi.WCHAR_T + CCHARP = rffi.CWCHARP + charp2str = staticmethod(rffi.wcharp2unicode) + charpsize2str = staticmethod(rffi.wcharpsize2unicode) + str2charp = staticmethod(rffi.unicode2wcharp) + scoped_str2charp = staticmethod(rffi.scoped_unicode2wcharp) + free_charp = staticmethod(rffi.free_wcharp) + scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_unicodebuffer) + + @staticmethod + def posix_function_name(name): + return UNDERSCORE_ON_WIN32 + 'w' + name + + @staticmethod + @specialize.argtype(0) + def ll_os_name(name): + return 'll_os.ll_os_w' + name + + @staticmethod + @specialize.argtype(0) + def as_str(path): + assert path is not None + if isinstance(path, unicode): + return path + else: + return path.as_unicode() + + @staticmethod + @specialize.argtype(0) + def as_str0(path): + res = UnicodeTraits.as_str(path) + rstring.check_str0(res) + return res + + # Returns True when the unicode function should be called: # - on Windows # - if the path is Unicode. diff --git a/rpython/rlib/rposix_environ.py b/rpython/rlib/rposix_environ.py --- a/rpython/rlib/rposix_environ.py +++ b/rpython/rlib/rposix_environ.py @@ -2,10 +2,10 @@ import sys from rpython.annotator import model as annmodel from rpython.rlib.objectmodel import enforceargs +from rpython.rlib.rposix import _WIN32, StringTraits, UnicodeTraits from rpython.rtyper.controllerentry import Controller from rpython.rtyper.extfunc import register_external from rpython.rtyper.lltypesystem import rffi, lltype -from rpython.rtyper.module.support import _WIN32, StringTraits, UnicodeTraits from rpython.translator.tool.cbuild import ExternalCompilationInfo str0 = annmodel.s_Str0 diff --git a/rpython/rtyper/module/support.py b/rpython/rtyper/module/support.py --- a/rpython/rtyper/module/support.py +++ b/rpython/rtyper/module/support.py @@ -1,6 +1,5 @@ import sys -from rpython.annotator import model as annmodel from rpython.rtyper.lltypesystem import lltype, rffi from rpython.rlib.objectmodel import specialize from rpython.rlib import rstring @@ -8,85 +7,6 @@ _WIN32 = sys.platform.startswith('win') UNDERSCORE_ON_WIN32 = '_' if _WIN32 else '' - -class StringTraits: - str = str - str0 = annmodel.s_Str0 - CHAR = rffi.CHAR - CCHARP = rffi.CCHARP - charp2str = staticmethod(rffi.charp2str) - charpsize2str = staticmethod(rffi.charpsize2str) - scoped_str2charp = staticmethod(rffi.scoped_str2charp) - str2charp = staticmethod(rffi.str2charp) - free_charp = staticmethod(rffi.free_charp) - scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_buffer) - - @staticmethod - def posix_function_name(name): - return UNDERSCORE_ON_WIN32 + name - - @staticmethod - def ll_os_name(name): - return 'll_os.ll_os_' + name - - @staticmethod - @specialize.argtype(0) - def as_str(path): - assert path is not None - if isinstance(path, str): - return path - elif isinstance(path, unicode): - # This never happens in PyPy's Python interpreter! - # Only in raw RPython code that uses unicode strings. - # We implement python2 behavior: silently convert to ascii. - return path.encode('ascii') - else: - return path.as_bytes() - - @staticmethod - @specialize.argtype(0) - def as_str0(path): - res = StringTraits.as_str(path) - rstring.check_str0(res) - return res - -class UnicodeTraits: - str = unicode - str0 = annmodel.s_Unicode0 - CHAR = rffi.WCHAR_T - CCHARP = rffi.CWCHARP - charp2str = staticmethod(rffi.wcharp2unicode) - charpsize2str = staticmethod(rffi.wcharpsize2unicode) - str2charp = staticmethod(rffi.unicode2wcharp) - scoped_str2charp = staticmethod(rffi.scoped_unicode2wcharp) - free_charp = staticmethod(rffi.free_wcharp) - scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_unicodebuffer) - - @staticmethod - def posix_function_name(name): - return UNDERSCORE_ON_WIN32 + 'w' + name - - @staticmethod - @specialize.argtype(0) - def ll_os_name(name): - return 'll_os.ll_os_w' + name - - @staticmethod - @specialize.argtype(0) - def as_str(path): - assert path is not None - if isinstance(path, unicode): - return path - else: - return path.as_unicode() - - @staticmethod - @specialize.argtype(0) - def as_str0(path): - res = UnicodeTraits.as_str(path) - rstring.check_str0(res) - return res - def ll_strcpy(dst_s, src_s, n): dstchars = dst_s.chars srcchars = src_s.chars From pypy.commits at gmail.com Sat Feb 20 07:23:06 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 20 Feb 2016 04:23:06 -0800 (PST) Subject: [pypy-commit] pypy default: Kill rpython/rtyper/module/test/test_ll_strtod.py. Message-ID: <56c85aaa.42711c0a.a3701.15d7@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82344:b9270e154cea Date: 2016-02-20 13:13 +0100 http://bitbucket.org/pypy/pypy/changeset/b9270e154cea/ Log: Kill rpython/rtyper/module/test/test_ll_strtod.py. diff --git a/rpython/rtyper/module/test/test_ll_strtod.py b/rpython/rtyper/module/test/test_ll_strtod.py deleted file mode 100644 --- a/rpython/rtyper/module/test/test_ll_strtod.py +++ /dev/null @@ -1,13 +0,0 @@ -import py - -from rpython.rtyper.test.tool import BaseRtypingTest -from rpython.rlib import rfloat - -class TestStrtod(BaseRtypingTest): - def test_formatd(self): - for flags in [0, - rfloat.DTSF_ADD_DOT_0]: - def f(y): - return rfloat.formatd(y, 'g', 2, flags) - - assert self.ll_to_string(self.interpret(f, [3.0])) == f(3.0) diff --git a/rpython/rtyper/test/test_rfloat.py b/rpython/rtyper/test/test_rfloat.py --- a/rpython/rtyper/test/test_rfloat.py +++ b/rpython/rtyper/test/test_rfloat.py @@ -204,6 +204,13 @@ res = self.ll_to_string(self.interpret(f, [10/3.0])) assert res == '3.33' + def test_formatd_g(self): + for flags in [0, rfloat.DTSF_ADD_DOT_0]: + def f(y): + return rfloat.formatd(y, 'g', 2, flags) + + assert self.ll_to_string(self.interpret(f, [3.0])) == f(3.0) + def test_formatd_repr(self): from rpython.rlib.rfloat import formatd def f(x): From pypy.commits at gmail.com Sat Feb 20 07:23:00 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 20 Feb 2016 04:23:00 -0800 (PST) Subject: [pypy-commit] pypy default: Kill LLSupport from rpython/rtyper/module/support.py. Message-ID: <56c85aa4.85b01c0a.2f328.17c9@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82341:ecb5a8d71071 Date: 2016-02-20 12:41 +0100 http://bitbucket.org/pypy/pypy/changeset/ecb5a8d71071/ Log: Kill LLSupport from rpython/rtyper/module/support.py. diff --git a/rpython/jit/metainterp/test/test_tlc.py b/rpython/jit/metainterp/test/test_tlc.py --- a/rpython/jit/metainterp/test/test_tlc.py +++ b/rpython/jit/metainterp/test/test_tlc.py @@ -1,5 +1,4 @@ import py -from rpython.rtyper.module.support import LLSupport from rpython.jit.tl import tlc diff --git a/rpython/rtyper/module/support.py b/rpython/rtyper/module/support.py --- a/rpython/rtyper/module/support.py +++ b/rpython/rtyper/module/support.py @@ -8,42 +8,6 @@ _WIN32 = sys.platform.startswith('win') UNDERSCORE_ON_WIN32 = '_' if _WIN32 else '' -# utility conversion functions -class LLSupport: - _mixin_ = True - - def to_rstr(s): - from rpython.rtyper.lltypesystem.rstr import STR, mallocstr - if s is None: - return lltype.nullptr(STR) - p = mallocstr(len(s)) - for i in range(len(s)): - p.chars[i] = s[i] - return p - to_rstr = staticmethod(to_rstr) - - def to_runicode(s): - from rpython.rtyper.lltypesystem.rstr import UNICODE, mallocunicode - if s is None: - return lltype.nullptr(UNICODE) - p = mallocunicode(len(s)) - for i in range(len(s)): - p.chars[i] = s[i] - return p - to_runicode = staticmethod(to_runicode) - - def from_rstr(rs): - if not rs: # null pointer - return None - else: - return ''.join([rs.chars[i] for i in range(len(rs.chars))]) - from_rstr = staticmethod(from_rstr) - - def from_rstr_nonnull(rs): - assert rs - return ''.join([rs.chars[i] for i in range(len(rs.chars))]) - from_rstr_nonnull = staticmethod(from_rstr_nonnull) - class StringTraits: str = str diff --git a/rpython/rtyper/test/tool.py b/rpython/rtyper/test/tool.py --- a/rpython/rtyper/test/tool.py +++ b/rpython/rtyper/test/tool.py @@ -46,12 +46,22 @@ return u''.join(s.chars) def string_to_ll(self, s): - from rpython.rtyper.module.support import LLSupport - return LLSupport.to_rstr(s) + from rpython.rtyper.lltypesystem.rstr import STR, mallocstr + if s is None: + return lltype.nullptr(STR) + p = mallocstr(len(s)) + for i in range(len(s)): + p.chars[i] = s[i] + return p def unicode_to_ll(self, s): - from rpython.rtyper.module.support import LLSupport - return LLSupport.to_runicode(s) + from rpython.rtyper.lltypesystem.rstr import UNICODE, mallocunicode + if s is None: + return lltype.nullptr(UNICODE) + p = mallocunicode(len(s)) + for i in range(len(s)): + p.chars[i] = s[i] + return p def ll_to_list(self, l): r = [] From pypy.commits at gmail.com Sat Feb 20 07:23:04 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 20 Feb 2016 04:23:04 -0800 (PST) Subject: [pypy-commit] pypy default: Kill rpython/rtyper/module/support.py. Message-ID: <56c85aa8.cf0b1c0a.7e1aa.17a9@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82343:92e0c2d63e6f Date: 2016-02-20 13:00 +0100 http://bitbucket.org/pypy/pypy/changeset/92e0c2d63e6f/ Log: Kill rpython/rtyper/module/support.py. diff --git a/rpython/rtyper/lltypesystem/module/ll_math.py b/rpython/rtyper/lltypesystem/module/ll_math.py --- a/rpython/rtyper/lltypesystem/module/ll_math.py +++ b/rpython/rtyper/lltypesystem/module/ll_math.py @@ -6,8 +6,8 @@ from rpython.translator import cdir from rpython.rlib import jit, rposix from rpython.rlib.rfloat import INFINITY, NAN, isfinite, isinf, isnan +from rpython.rlib.rposix import UNDERSCORE_ON_WIN32 from rpython.rtyper.lltypesystem import lltype, rffi -from rpython.rtyper.module.support import UNDERSCORE_ON_WIN32 from rpython.tool.sourcetools import func_with_new_name from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.translator.platform import platform diff --git a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py --- a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py +++ b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py @@ -11,12 +11,12 @@ from rpython.rtyper.lltypesystem.ll2ctypes import _llgcopaque from rpython.rtyper.annlowlevel import llhelper from rpython.rlib import rposix +from rpython.rlib.rposix import UNDERSCORE_ON_WIN32 from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.translator import cdir from rpython.tool.udir import udir from rpython.rtyper.test.test_llinterp import interpret from rpython.annotator.annrpython import RPythonAnnotator -from rpython.rtyper.module.support import UNDERSCORE_ON_WIN32 from rpython.rtyper.rtyper import RPythonTyper from rpython.rlib.rarithmetic import r_uint, get_long_pattern, is_emulated_long from rpython.rlib.rarithmetic import is_valid_int diff --git a/rpython/rtyper/module/support.py b/rpython/rtyper/module/support.py deleted file mode 100644 --- a/rpython/rtyper/module/support.py +++ /dev/null @@ -1,23 +0,0 @@ -import sys - -from rpython.rtyper.lltypesystem import lltype, rffi -from rpython.rlib.objectmodel import specialize -from rpython.rlib import rstring - -_WIN32 = sys.platform.startswith('win') -UNDERSCORE_ON_WIN32 = '_' if _WIN32 else '' - -def ll_strcpy(dst_s, src_s, n): - dstchars = dst_s.chars - srcchars = src_s.chars - i = 0 - while i < n: - dstchars[i] = srcchars[i] - i += 1 - -def _ll_strfill(dst_s, srcchars, n): - dstchars = dst_s.chars - i = 0 - while i < n: - dstchars[i] = srcchars[i] - i += 1 From pypy.commits at gmail.com Sat Feb 20 07:29:51 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 20 Feb 2016 04:29:51 -0800 (PST) Subject: [pypy-commit] pypy default: Move rpython/rtyper/module/test/test_posix.py to rpython/rlib/test/test_posix.py. Message-ID: <56c85c3f.45631c0a.26560.06c2@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82345:3382e5a65bd7 Date: 2016-02-20 13:28 +0100 http://bitbucket.org/pypy/pypy/changeset/3382e5a65bd7/ Log: Move rpython/rtyper/module/test/test_posix.py to rpython/rlib/test/test_posix.py. diff --git a/rpython/rtyper/module/test/test_posix.py b/rpython/rlib/test/test_posix.py rename from rpython/rtyper/module/test/test_posix.py rename to rpython/rlib/test/test_posix.py From pypy.commits at gmail.com Sat Feb 20 07:29:53 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 20 Feb 2016 04:29:53 -0800 (PST) Subject: [pypy-commit] pypy default: Kill empty python/rtyper/module. Message-ID: <56c85c41.2815c20a.7465a.5508@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82346:a7d549957c0a Date: 2016-02-20 13:29 +0100 http://bitbucket.org/pypy/pypy/changeset/a7d549957c0a/ Log: Kill empty python/rtyper/module. diff --git a/rpython/rtyper/module/__init__.py b/rpython/rtyper/module/__init__.py deleted file mode 100644 --- a/rpython/rtyper/module/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/rpython/rtyper/module/test/__init__.py b/rpython/rtyper/module/test/__init__.py deleted file mode 100644 --- a/rpython/rtyper/module/test/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# From pypy.commits at gmail.com Sat Feb 20 08:18:37 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 20 Feb 2016 05:18:37 -0800 (PST) Subject: [pypy-commit] pypy default: Remove some outdated entries from .hgignore. Message-ID: <56c867ad.cf0b1c0a.7e1aa.28c3@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82347:90c52cca5101 Date: 2016-02-20 14:18 +0100 http://bitbucket.org/pypy/pypy/changeset/90c52cca5101/ Log: Remove some outdated entries from .hgignore. diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -22,6 +22,7 @@ ^pypy/module/cpyext/test/.+\.obj$ ^pypy/module/cpyext/test/.+\.manifest$ ^pypy/module/test_lib_pypy/ctypes_tests/.+\.o$ +^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^pypy/module/cppyy/src/.+\.o$ ^pypy/module/cppyy/bench/.+\.so$ ^pypy/module/cppyy/bench/.+\.root$ @@ -35,7 +36,6 @@ ^pypy/module/test_lib_pypy/cffi_tests/__pycache__.+$ ^pypy/doc/.+\.html$ ^pypy/doc/config/.+\.rst$ -^pypy/doc/basicblock\.asc$ ^pypy/doc/.+\.svninfo$ ^rpython/translator/c/src/libffi_msvc/.+\.obj$ ^rpython/translator/c/src/libffi_msvc/.+\.dll$ @@ -45,55 +45,33 @@ ^rpython/translator/c/src/cjkcodecs/.+\.obj$ ^rpython/translator/c/src/stacklet/.+\.o$ ^rpython/translator/c/src/.+\.o$ -^rpython/translator/jvm/\.project$ -^rpython/translator/jvm/\.classpath$ -^rpython/translator/jvm/eclipse-bin$ -^rpython/translator/jvm/src/pypy/.+\.class$ ^rpython/translator/llvm/.+\.so$ -^rpython/translator/benchmark/docutils$ -^rpython/translator/benchmark/templess$ -^rpython/translator/benchmark/gadfly$ -^rpython/translator/benchmark/mako$ -^rpython/translator/benchmark/bench-custom\.benchmark_result$ -^rpython/translator/benchmark/shootout_benchmarks$ ^rpython/translator/goal/target.+-c$ ^rpython/translator/goal/.+\.exe$ ^rpython/translator/goal/.+\.dll$ ^pypy/goal/pypy-translation-snapshot$ ^pypy/goal/pypy-c -^pypy/goal/pypy-jvm -^pypy/goal/pypy-jvm.jar ^pypy/goal/.+\.exe$ ^pypy/goal/.+\.dll$ ^pypy/goal/.+\.lib$ ^pypy/_cache$ -^pypy/doc/statistic/.+\.html$ -^pypy/doc/statistic/.+\.eps$ -^pypy/doc/statistic/.+\.pdf$ -^rpython/translator/cli/src/pypylib\.dll$ -^rpython/translator/cli/src/query\.exe$ -^rpython/translator/cli/src/main\.exe$ ^lib-python/2.7/lib2to3/.+\.pickle$ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ ^lib_pypy/_libmpdec/.+.o$ -^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ ^include/.+\.inl$ ^pypy/doc/_build/.*$ ^pypy/doc/config/.+\.html$ ^pypy/doc/config/style\.css$ -^pypy/doc/jit/.+\.html$ -^pypy/doc/jit/style\.css$ ^pypy/doc/image/lattice1\.png$ ^pypy/doc/image/lattice2\.png$ ^pypy/doc/image/lattice3\.png$ ^pypy/doc/image/stackless_informal\.png$ ^pypy/doc/image/parsing_example.+\.png$ ^rpython/doc/_build/.*$ -^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^compiled ^.git/ ^release/ From pypy.commits at gmail.com Sat Feb 20 17:28:56 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 20 Feb 2016 14:28:56 -0800 (PST) Subject: [pypy-commit] pypy default: Convert BaseRtypingTest methods to staticmethods / classmethods to make it possible to use them without instantiation of BaseRtypingTest. Message-ID: <56c8e8a8.0357c20a.b1898.08ae@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82349:2244ee1ca7ce Date: 2016-02-20 23:25 +0100 http://bitbucket.org/pypy/pypy/changeset/2244ee1ca7ce/ Log: Convert BaseRtypingTest methods to staticmethods / classmethods to make it possible to use them without instantiation of BaseRtypingTest. diff --git a/rpython/rtyper/test/test_rpbc.py b/rpython/rtyper/test/test_rpbc.py --- a/rpython/rtyper/test/test_rpbc.py +++ b/rpython/rtyper/test/test_rpbc.py @@ -1945,7 +1945,7 @@ def interpret(self, fn, args, **kwds): kwds['config'] = self.config - return TestRPBC.interpret(self, fn, args, **kwds) + return TestRPBC.interpret(fn, args, **kwds) def test_smallfuncsets_basic(): from rpython.translator.translator import TranslationContext, graphof diff --git a/rpython/rtyper/test/tool.py b/rpython/rtyper/test/tool.py --- a/rpython/rtyper/test/tool.py +++ b/rpython/rtyper/test/tool.py @@ -5,22 +5,27 @@ class BaseRtypingTest(object): FLOAT_PRECISION = 8 - def gengraph(self, func, argtypes=[], viewbefore='auto', policy=None, + @staticmethod + def gengraph(func, argtypes=[], viewbefore='auto', policy=None, backendopt=False, config=None): return gengraph(func, argtypes, viewbefore, policy, backendopt=backendopt, config=config) - def interpret(self, fn, args, **kwds): + @staticmethod + def interpret(fn, args, **kwds): return interpret(fn, args, **kwds) - def interpret_raises(self, exc, fn, args, **kwds): + @staticmethod + def interpret_raises(exc, fn, args, **kwds): return interpret_raises(exc, fn, args, **kwds) - def float_eq(self, x, y): + @staticmethod + def float_eq(x, y): return x == y - def float_eq_approx(self, x, y): - maxError = 10**-self.FLOAT_PRECISION + @classmethod + def float_eq_approx(cls, x, y): + maxError = 10**-cls.FLOAT_PRECISION if abs(x-y) < maxError: return True @@ -31,21 +36,26 @@ return relativeError < maxError - def is_of_type(self, x, type_): + @staticmethod + def is_of_type(x, type_): return type(x) is type_ - def _skip_llinterpreter(self, reason): + @staticmethod + def _skip_llinterpreter(reason): py.test.skip("lltypesystem doesn't support %s, yet" % reason) - def ll_to_string(self, s): + @staticmethod + def ll_to_string(s): if not s: return None return ''.join(s.chars) - def ll_to_unicode(self, s): + @staticmethod + def ll_to_unicode(s): return u''.join(s.chars) - def string_to_ll(self, s): + @staticmethod + def string_to_ll(s): from rpython.rtyper.lltypesystem.rstr import STR, mallocstr if s is None: return lltype.nullptr(STR) @@ -54,7 +64,8 @@ p.chars[i] = s[i] return p - def unicode_to_ll(self, s): + @staticmethod + def unicode_to_ll(s): from rpython.rtyper.lltypesystem.rstr import UNICODE, mallocunicode if s is None: return lltype.nullptr(UNICODE) @@ -63,23 +74,28 @@ p.chars[i] = s[i] return p - def ll_to_list(self, l): + @staticmethod + def ll_to_list(l): r = [] items = l.ll_items() for i in range(l.ll_length()): r.append(items[i]) return r - def ll_unpack_tuple(self, t, length): + @staticmethod + def ll_unpack_tuple(t, length): return tuple([getattr(t, 'item%d' % i) for i in range(length)]) - def get_callable(self, fnptr): + @staticmethod + def get_callable(fnptr): return fnptr._obj._callable - def class_name(self, value): + @staticmethod + def class_name(value): return ''.join(value.super.typeptr.name.chars) - def read_attr(self, value, attr_name): + @staticmethod + def read_attr(value, attr_name): value = value._obj while value is not None: attr = getattr(value, "inst_" + attr_name, None) @@ -89,6 +105,7 @@ return attr raise AttributeError() - def is_of_instance_type(self, val): + @staticmethod + def is_of_instance_type(val): T = lltype.typeOf(val) return isinstance(T, lltype.Ptr) and isinstance(T.TO, lltype.GcStruct) From pypy.commits at gmail.com Sat Feb 20 17:28:54 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 20 Feb 2016 14:28:54 -0800 (PST) Subject: [pypy-commit] pypy default: Fix. Message-ID: <56c8e8a6.657bc20a.4c046.08d2@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82348:0f32b277e2ec Date: 2016-02-20 23:22 +0100 http://bitbucket.org/pypy/pypy/changeset/0f32b277e2ec/ Log: Fix. diff --git a/rpython/rtyper/test/test_rfloat.py b/rpython/rtyper/test/test_rfloat.py --- a/rpython/rtyper/test/test_rfloat.py +++ b/rpython/rtyper/test/test_rfloat.py @@ -205,6 +205,7 @@ assert res == '3.33' def test_formatd_g(self): + from rpython.rlib import rfloat for flags in [0, rfloat.DTSF_ADD_DOT_0]: def f(y): return rfloat.formatd(y, 'g', 2, flags) From pypy.commits at gmail.com Sun Feb 21 04:16:56 2016 From: pypy.commits at gmail.com (plan_rich) Date: Sun, 21 Feb 2016 01:16:56 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: do not use registers other than r2->r11 for that test Message-ID: <56c98088.a3abc20a.adb05.ffff9525@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82350:c2d76c8ce81a Date: 2016-02-21 10:16 +0100 http://bitbucket.org/pypy/pypy/changeset/c2d76c8ce81a/ Log: do not use registers other than r2->r11 for that test diff --git a/rpython/jit/backend/zarch/test/test_assembler.py b/rpython/jit/backend/zarch/test/test_assembler.py --- a/rpython/jit/backend/zarch/test/test_assembler.py +++ b/rpython/jit/backend/zarch/test/test_assembler.py @@ -563,43 +563,43 @@ loaded = [] # two sequences 10-11, 13-14 - self.pushpop_jitframe([r.r10, r.r11, r.r13, r.r14]) - assert stored == [(r.r10, r.r11), (r.r13, r.r14)] + self.pushpop_jitframe([r.r2, r.r3, r.r10, r.r11]) + assert stored == [(r.r2, r.r3), (r.r10, r.r11)] assert stored == loaded stored = [] loaded = [] # one sequence and on single - self.pushpop_jitframe([r.r0, r.r1, r.r3]) - assert stored == [(r.r0, r.r1), (r.r3,)] + self.pushpop_jitframe([r.r2, r.r3, r.r5]) + assert stored == [(r.r2, r.r3), (r.r5,)] assert stored == loaded stored = [] loaded = [] # single items - self.pushpop_jitframe(r.registers[::2]) - assert stored == [(x,) for x in r.registers[::2]] + self.pushpop_jitframe(r.MANAGED_REGS[::2]) + assert stored == [(x,) for x in r.MANAGED_REGS[::2]] assert stored == loaded stored = [] loaded = [] # large sequence 0-5 and one hole between - self.pushpop_jitframe([r.r0, r.r1, r.r2, r.r3, - r.r4, r.r5, r.r12, r.r13]) - assert stored == [(r.r0, r.r5), (r.r12, r.r13)] + self.pushpop_jitframe([r.r2, r.r3, + r.r4, r.r5, r.r10, r.r11]) + assert stored == [(r.r2, r.r5), (r.r10, r.r11)] assert stored == loaded stored = [] loaded = [] # ensure there is just on instruction for the 'best case' - self.pushpop_jitframe(r.registers) - assert stored == [(r.r0, r.r15)] + self.pushpop_jitframe(r.MANAGED_REGS) + assert stored == [(r.r2, r.r11)] assert stored == loaded stored = [] loaded = [] # just one single - for x in [r.r14, r.r0, r.r1, r.r15]: + for x in [r.r10, r.r3, r.r2, r.r11]: self.pushpop_jitframe([x]) assert stored == [(x,)] assert stored == loaded @@ -607,8 +607,8 @@ loaded = [] # unordered - self.pushpop_jitframe([r.r14, r.r8, r.r4, r.r0]) - assert stored == [(r.r14,), (r.r8,), (r.r4,), (r.r0,)] + self.pushpop_jitframe([r.r11, r.r8, r.r4, r.r2]) + assert stored == [(r.r11,), (r.r8,), (r.r4,), (r.r2,)] assert stored == loaded stored = [] loaded = [] From pypy.commits at gmail.com Sun Feb 21 04:52:09 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 21 Feb 2016 01:52:09 -0800 (PST) Subject: [pypy-commit] pypy default: Fix. Message-ID: <56c988c9.9a6f1c0a.977b5.5f37@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82351:73de30973361 Date: 2016-02-21 10:52 +0100 http://bitbucket.org/pypy/pypy/changeset/73de30973361/ Log: Fix. diff --git a/rpython/rlib/test/test_rerased.py b/rpython/rlib/test/test_rerased.py --- a/rpython/rlib/test/test_rerased.py +++ b/rpython/rlib/test/test_rerased.py @@ -192,7 +192,7 @@ def interpret(self, *args, **kwargs): kwargs["taggedpointers"] = True - return BaseRtypingTest.interpret(self, *args, **kwargs) + return BaseRtypingTest.interpret(*args, **kwargs) def test_rtype_1(self): def f(): return eraseX(X()) From pypy.commits at gmail.com Sun Feb 21 04:54:19 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 21 Feb 2016 01:54:19 -0800 (PST) Subject: [pypy-commit] pypy desc-specialize: Extract new function all_values() from memo() Message-ID: <56c9894b.418f1c0a.892d5.5dca@mx.google.com> Author: Ronan Lamy Branch: desc-specialize Changeset: r82353:0e58f82655b2 Date: 2016-02-20 16:48 +0100 http://bitbucket.org/pypy/pypy/changeset/0e58f82655b2/ Log: Extract new function all_values() from memo() diff --git a/rpython/annotator/specialize.py b/rpython/annotator/specialize.py --- a/rpython/annotator/specialize.py +++ b/rpython/annotator/specialize.py @@ -3,11 +3,13 @@ from rpython.tool.sourcetools import func_with_new_name from rpython.tool.algo.unionfind import UnionFind -from rpython.flowspace.model import Block, Link, Variable, SpaceOperation +from rpython.flowspace.model import Block, Link, Variable from rpython.flowspace.model import checkgraph from rpython.flowspace.operation import op from rpython.annotator import model as annmodel from rpython.flowspace.argument import Signature +from rpython.annotator.model import SomePBC, SomeImpossibleValue, SomeBool +from rpython.annotator.model import unionof def flatten_star_args(funcdesc, args_s): argnames, vararg, kwarg = funcdesc.signature @@ -127,7 +129,6 @@ def finish(self): if self.do_not_process: return - from rpython.annotator.model import unionof assert self.graph is None, "MemoTable already finished" # list of which argument positions can take more than one value example_args, example_value = self.table.iteritems().next() @@ -246,34 +247,36 @@ args_s.append(unionof(*values_s)) annotator.addpendinggraph(self.graph, args_s) +def all_values(s): + """Return the exhaustive list of possible values matching annotation `s`. -def memo(funcdesc, arglist_s): - from rpython.annotator.model import SomePBC, SomeImpossibleValue, SomeBool - from rpython.annotator.model import unionof + Raises `AnnotatorError` if no such (reasonably small) finite list exists. + """ + if s.is_constant(): + return [s.const] + elif isinstance(s, SomePBC): + values = [] + assert not s.can_be_None, "memo call: cannot mix None and PBCs" + for desc in s.descriptions: + if desc.pyobj is None: + raise annmodel.AnnotatorError( + "memo call with a class or PBC that has no " + "corresponding Python object (%r)" % (desc,)) + values.append(desc.pyobj) + return values + elif isinstance(s, SomeImpossibleValue): + return [] + elif isinstance(s, SomeBool): + return [False, True] + else: + raise annmodel.AnnotatorError("memo call: argument must be a class " + "or a frozen PBC, got %r" % (s,)) + +def memo(funcdesc, args_s): # call the function now, and collect possible results - argvalues = [] - for s in arglist_s: - if s.is_constant(): - values = [s.const] - elif isinstance(s, SomePBC): - values = [] - assert not s.can_be_None, "memo call: cannot mix None and PBCs" - for desc in s.descriptions: - if desc.pyobj is None: - raise annmodel.AnnotatorError( - "memo call with a class or PBC that has no " - "corresponding Python object (%r)" % (desc,)) - values.append(desc.pyobj) - elif isinstance(s, SomeImpossibleValue): - return s # we will probably get more possible args later - elif isinstance(s, SomeBool): - values = [False, True] - else: - raise annmodel.AnnotatorError("memo call: argument must be a class " - "or a frozen PBC, got %r" % (s,)) - argvalues.append(values) + # the list of all possible tuples of arguments to give to the memo function - possiblevalues = cartesian_product(argvalues) + possiblevalues = cartesian_product([all_values(s_arg) for s_arg in args_s]) # a MemoTable factory -- one MemoTable per family of arguments that can # be called together, merged via a UnionFind. From pypy.commits at gmail.com Sun Feb 21 04:54:17 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 21 Feb 2016 01:54:17 -0800 (PST) Subject: [pypy-commit] pypy desc-specialize: Simplify FunctionDesc.__init__ and use factory method bk.newfuncdesc() instead Message-ID: <56c98949.e6bbc20a.d2a3a.54ec@mx.google.com> Author: Ronan Lamy Branch: desc-specialize Changeset: r82352:fe52b15deb73 Date: 2016-02-20 13:51 +0000 http://bitbucket.org/pypy/pypy/changeset/fe52b15deb73/ Log: Simplify FunctionDesc.__init__ and use factory method bk.newfuncdesc() instead diff --git a/rpython/annotator/bookkeeper.py b/rpython/annotator/bookkeeper.py --- a/rpython/annotator/bookkeeper.py +++ b/rpython/annotator/bookkeeper.py @@ -9,6 +9,7 @@ from collections import OrderedDict from rpython.flowspace.model import Constant +from rpython.flowspace.bytecode import cpython_code_signature from rpython.annotator.model import ( SomeOrderedDict, SomeString, SomeChar, SomeFloat, unionof, SomeInstance, SomeDict, SomeBuiltin, SomePBC, SomeInteger, TLS, SomeUnicodeCodePoint, @@ -358,7 +359,7 @@ return self.descs[obj_key] except KeyError: if isinstance(pyobj, types.FunctionType): - result = description.FunctionDesc(self, pyobj) + result = self.newfuncdesc(pyobj) elif isinstance(pyobj, (type, types.ClassType)): if pyobj is object: raise Exception("ClassDesc for object not supported") @@ -403,6 +404,21 @@ self.descs[obj_key] = result return result + def newfuncdesc(self, pyfunc): + name = pyfunc.__name__ + if hasattr(pyfunc, '_generator_next_method_of_'): + from rpython.flowspace.argument import Signature + signature = Signature(['entry']) # haaaaaack + defaults = () + else: + signature = cpython_code_signature(pyfunc.func_code) + defaults = pyfunc.func_defaults + # get the specializer based on the tag of the 'pyobj' + # (if any), according to the current policy + tag = getattr(pyfunc, '_annspecialcase_', None) + specializer = self.annotator.policy.get_specializer(tag) + return description.FunctionDesc(self, pyfunc, name, signature, defaults, specializer) + def getfrozen(self, pyobj): return description.FrozenDesc(self, pyobj) diff --git a/rpython/annotator/classdesc.py b/rpython/annotator/classdesc.py --- a/rpython/annotator/classdesc.py +++ b/rpython/annotator/classdesc.py @@ -600,7 +600,7 @@ if mixin: # make a new copy of the FunctionDesc for this class, # but don't specialize further for all subclasses - funcdesc = FunctionDesc(self.bookkeeper, value) + funcdesc = self.bookkeeper.newfuncdesc(value) self.classdict[name] = funcdesc return # NB. if value is, say, AssertionError.__init__, then we diff --git a/rpython/annotator/description.py b/rpython/annotator/description.py --- a/rpython/annotator/description.py +++ b/rpython/annotator/description.py @@ -3,7 +3,6 @@ from rpython.annotator.signature import ( enforce_signature_args, enforce_signature_return, finish_type) from rpython.flowspace.model import FunctionGraph -from rpython.flowspace.bytecode import cpython_code_signature from rpython.annotator.argument import rawshape, ArgErr, simple_args from rpython.tool.sourcetools import valid_identifier from rpython.tool.pairtype import extendabletype @@ -192,29 +191,16 @@ class FunctionDesc(Desc): knowntype = types.FunctionType - def __init__(self, bookkeeper, pyobj=None, - name=None, signature=None, defaults=None, + def __init__(self, bookkeeper, pyobj, name, signature, defaults, specializer=None): super(FunctionDesc, self).__init__(bookkeeper, pyobj) - if name is None: - name = pyobj.func_name - if signature is None: - if hasattr(pyobj, '_generator_next_method_of_'): - from rpython.flowspace.argument import Signature - signature = Signature(['entry']) # haaaaaack - defaults = () - else: - signature = cpython_code_signature(pyobj.func_code) - if defaults is None: - defaults = pyobj.func_defaults self.name = name self.signature = signature - self.defaults = defaults or () + self.defaults = defaults # 'specializer' is a function with the following signature: # specializer(funcdesc, args_s) => graph # or => s_result (overridden/memo cases) self.specializer = specializer - self.init_specializer() self._cache = {} # convenience for the specializer def buildgraph(self, alt_name=None, builder=None): @@ -284,14 +270,6 @@ (self.name, e.getmsg())) return inputcells - def init_specializer(self): - if self.specializer is None: - # get the specializer based on the tag of the 'pyobj' - # (if any), according to the current policy - tag = getattr(self.pyobj, '_annspecialcase_', None) - policy = self.bookkeeper.annotator.policy - self.specializer = policy.get_specializer(tag) - def specialize(self, inputcells, op=None): if (op is None and getattr(self.bookkeeper, "position_key", None) is not None): From pypy.commits at gmail.com Sun Feb 21 04:54:21 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 21 Feb 2016 01:54:21 -0800 (PST) Subject: [pypy-commit] pypy desc-specialize: normalise funcdesc.defaults Message-ID: <56c9894d.2968c20a.bf25f.ffff9da9@mx.google.com> Author: Ronan Lamy Branch: desc-specialize Changeset: r82354:8eba3a267336 Date: 2016-02-20 17:03 +0100 http://bitbucket.org/pypy/pypy/changeset/8eba3a267336/ Log: normalise funcdesc.defaults diff --git a/rpython/annotator/description.py b/rpython/annotator/description.py --- a/rpython/annotator/description.py +++ b/rpython/annotator/description.py @@ -196,7 +196,7 @@ super(FunctionDesc, self).__init__(bookkeeper, pyobj) self.name = name self.signature = signature - self.defaults = defaults + self.defaults = defaults if defaults is not None else () # 'specializer' is a function with the following signature: # specializer(funcdesc, args_s) => graph # or => s_result (overridden/memo cases) From pypy.commits at gmail.com Sun Feb 21 05:26:31 2016 From: pypy.commits at gmail.com (cfbolz) Date: Sun, 21 Feb 2016 02:26:31 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: (all) planning for today Message-ID: <56c990d7.8e811c0a.91fcf.5e85@mx.google.com> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r5608:f79b3e22a319 Date: 2016-02-21 11:06 +0100 http://bitbucket.org/pypy/extradoc/changeset/f79b3e22a319/ Log: (all) planning for today diff --git a/sprintinfo/leysin-winter-2016/planning.txt b/sprintinfo/leysin-winter-2016/planning.txt new file mode 100644 --- /dev/null +++ b/sprintinfo/leysin-winter-2016/planning.txt @@ -0,0 +1,32 @@ +Tasks +===== + +- mercurial benchmarks on PyPy (fijal, marmoute, cfbolz around) +- merging cpyext-ext, numpy-on-cpyext (mattip, ronan, arigo around) +- jit leaner frontend +- maps reordering (cfbolz, fijal around) +- py3k work (manuel, richard) +- register allocation +- live ranges in JIT viewer +- VMProf on OS X, fix bugs +- continuing to refactoring annotator +- general wizardry (cfbolz, arigo, samuele not around) +- do we have warmup time to VMProf + + + +Discussions +========= + +- tooling +- too many bridges +- cpyext+numpy +- unstucking benchmarking +- Python3 +- mercurial stuff (how pypy use it) +- update work areas +- code quality +- vmprof memory +- summer of code +- future of rffi +- think about resizable lists From pypy.commits at gmail.com Sun Feb 21 06:55:00 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 03:55:00 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: create larger c files by indexing directories together (previously each file was seperate) Message-ID: <56c9a594.c1b3c20a.9ea7d.ffffcc33@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82355:30ba30f75823 Date: 2016-02-08 21:53 +0200 http://bitbucket.org/pypy/pypy/changeset/30ba30f75823/ Log: create larger c files by indexing directories together (previously each file was seperate) diff --git a/rpython/translator/c/genc.py b/rpython/translator/c/genc.py --- a/rpython/translator/c/genc.py +++ b/rpython/translator/c/genc.py @@ -547,6 +547,8 @@ relpypath = localpath.relto(pypkgpath.dirname) assert relpypath, ("%r should be relative to %r" % (localpath, pypkgpath.dirname)) + if len(relpypath.split(os.path.sep)) > 2: + return os.path.split(relpypath)[0] + '.c' return relpypath.replace('.py', '.c') return None if hasattr(node.obj, 'graph'): diff --git a/rpython/translator/c/test/test_standalone.py b/rpython/translator/c/test/test_standalone.py --- a/rpython/translator/c/test/test_standalone.py +++ b/rpython/translator/c/test/test_standalone.py @@ -123,9 +123,9 @@ # Verify that the generated C files have sane names: gen_c_files = [str(f) for f in cbuilder.extrafiles] - for expfile in ('rpython_rlib_rposix.c', - 'rpython_rtyper_lltypesystem_rstr.c', - 'rpython_translator_c_test_test_standalone.c'): + for expfile in ('rpython_rlib.c', + 'rpython_rtyper_lltypesystem.c', + 'rpython_translator_c_test.c'): assert cbuilder.targetdir.join(expfile) in gen_c_files def test_print(self): From pypy.commits at gmail.com Sun Feb 21 06:55:02 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 03:55:02 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: merge default into branch Message-ID: <56c9a596.a2afc20a.b3f17.ffffc580@mx.google.com> Author: mattip Branch: cpyext-gc-support-2 Changeset: r82356:1e38809f93be Date: 2016-02-20 13:38 +0100 http://bitbucket.org/pypy/pypy/changeset/1e38809f93be/ Log: merge default into branch diff too long, truncating to 2000 out of 7656 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -188,7 +188,7 @@ # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'include')) + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py --- a/lib_pypy/_pypy_testcapi.py +++ b/lib_pypy/_pypy_testcapi.py @@ -63,7 +63,7 @@ if sys.platform == 'win32': # XXX pyconfig.h uses a pragma to link to the import library, # which is currently python27.lib - library = os.path.join(thisdir, '..', 'include', 'python27') + library = os.path.join(thisdir, '..', 'libs', 'python27') if not os.path.exists(library + '.lib'): # For a local translation or nightly build library = os.path.join(thisdir, '..', 'pypy', 'goal', 'python27') diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.0 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.2" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -544,28 +544,50 @@ def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. if hasattr(sys, 'prefix'): - import os - libdir = os.path.join(sys.prefix, 'bin') - dirs = kwds.setdefault('library_dirs', []) - if libdir not in dirs: - dirs.append(libdir) - pythonlib = "pypy-c" + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags - libraries = kwds.setdefault('libraries', []) - if pythonlib not in libraries: - libraries.append(pythonlib) + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): @@ -631,7 +653,7 @@ compiled DLL. Use '*' to force distutils' choice, suitable for regular CPython C API modules. Use a file name ending in '.*' to ask for the system's default extension for dynamic libraries - (.so/.dll). + (.so/.dll/.dylib). The default is '*' when building a non-embedded C API extension, and (module_name + '.*') when building an embedded library. @@ -695,6 +717,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,14 +21,12 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def compile(tmpdir, ext, compiler_verbose=0): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose, - target_extension, embedding) + outputfilename = _build(tmpdir, ext, compiler_verbose) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -38,32 +36,7 @@ os.environ[key] = value return outputfilename -def _save_val(name): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - return config_vars.get(name, Ellipsis) - -def _restore_val(name, value): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - config_vars[name] = value - if value is Ellipsis: - del config_vars[name] - -def _win32_hack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): - MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ - MSVCCompiler._remove_visual_c_ref - MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file - -def _win32_unhack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - MSVCCompiler._remove_visual_c_ref = \ - MSVCCompiler._remove_visual_c_ref_CFFI_BAK - -def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def _build(tmpdir, ext, compiler_verbose=0): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -76,25 +49,14 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: - if sys.platform == 'win32' and embedding: - _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 - old_SO = _save_val('SO') - old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: - if target_extension is not None: - _restore_val('SO', target_extension) - _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') cmd_obj = dist.get_command_obj('build_ext') [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) - _restore_val('SO', old_SO) - _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) - if sys.platform == 'win32' and embedding: - _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and @@ -1357,6 +1359,58 @@ parts[-1] += extension return os.path.join(outputdir, *parts), parts + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, compiler_verbose=1, target=None, **kwds): @@ -1382,36 +1436,22 @@ target = '%s.*' % module_name else: target = '*' - if target == '*': - target_module_name = module_name - target_extension = None # use default - else: - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - else: - target += '.so' - # split along the first '.' (not the last one, otherwise the - # preceeding dots are interpreted as splitting package names) - index = target.find('.') - if index < 0: - raise ValueError("target argument %r should be a file name " - "containing a '.'" % (target,)) - target_module_name = target[:index] - target_extension = target[index:] # - ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: + patchlist = [] cwd = os.getcwd() try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose, - target_extension, - embedding=embedding) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) finally: os.chdir(cwd) + _unpatch_meths(patchlist) return outputfilename else: return ext, updated diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py --- a/lib_pypy/cffi/vengine_cpy.py +++ b/lib_pypy/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py --- a/lib_pypy/cffi/vengine_gen.py +++ b/lib_pypy/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py --- a/lib_pypy/cffi/verifier.py +++ b/lib_pypy/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -36,13 +36,16 @@ "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", - "_csv", "cppyy", "_pypyjson" + "_csv", "cppyy", "_pypyjson", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): - # it's not enough that we get x86_64 - working_modules.add('_vmprof') +from rpython.jit.backend import detect_cpu +try: + if detect_cpu.autodetect().startswith('x86'): + working_modules.add('_vmprof') +except detect_cpu.ProcessorAutodetectError: + pass + translation_modules = default_modules.copy() translation_modules.update([ @@ -85,8 +88,7 @@ module_dependencies = { '_multiprocessing': [('objspace.usemodules.time', True), ('objspace.usemodules.thread', True)], - 'cpyext': [('objspace.usemodules.array', True), - ('objspace.usemodules.micronumpy', True)], + 'cpyext': [('objspace.usemodules.array', True)], 'cppyy': [('objspace.usemodules.cpyext', True)], } module_suggests = { diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -7,6 +7,9 @@ Fixed ``_PyLong_FromByteArray()``, which was buggy. +Fixed a crash with stacklets (or greenlets) on non-Linux machines +which showed up if you forget stacklets without resuming them. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy @@ -38,7 +41,8 @@ .. branch: compress-numbering -Improve the memory signature of numbering instances in the JIT. +Improve the memory signature of numbering instances in the JIT. This should massively +decrease the amount of memory consumed by the JIT, which is significant for most programs. .. branch: fix-trace-too-long-heuristic @@ -133,3 +137,29 @@ `rpython/jit/metainterp/optimizeopt/pure.py`, which can result in better codegen for traces containing a large number of pure getfield operations. +.. branch: exctrans + +Try to ensure that no new functions get annotated during the 'source_c' phase. +Refactor sandboxing to operate at a higher level. + +.. branch: cpyext-bootstrap + +.. branch: vmprof-newstack + +Refactor vmprof to work cross-operating-system. + +.. branch: seperate-strucmember_h + +Seperate structmember.h from Python.h Also enhance creating api functions +to specify which header file they appear in (previously only pypy_decl.h) + +.. branch: llimpl + +Refactor register_external(), remove running_on_llinterp mechanism and +apply sandbox transform on externals at the end of annotation. + +.. branch: cffi-embedding-win32 + +.. branch: windows-vmprof-support + +vmprof should work on Windows. diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -239,6 +239,9 @@ raise Exception("Cannot use the --output option with PyPy " "when --shared is on (it is by default). " "See issue #1971.") + if sys.platform == 'win32': + config.translation.libname = '..\\..\\libs\\python27.lib' + thisdir.join('..', '..', 'libs').ensure(dir=1) if config.translation.thread: config.objspace.usemodules.thread = True diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -306,7 +306,7 @@ return None -class W_InterpIterable(W_Root): +class InterpIterable(object): def __init__(self, space, w_iterable): self.w_iter = space.iter(w_iterable) self.space = space @@ -745,9 +745,13 @@ return self.int_w(self.hash(w_obj)) def len_w(self, w_obj): - """shotcut for space.int_w(space.len(w_obj))""" + """shortcut for space.int_w(space.len(w_obj))""" return self.int_w(self.len(w_obj)) + def contains_w(self, w_container, w_item): + """shortcut for space.is_true(space.contains(w_container, w_item))""" + return self.is_true(self.contains(w_container, w_item)) + def setitem_str(self, w_obj, key, w_value): return self.setitem(w_obj, self.wrap(key), w_value) @@ -846,7 +850,7 @@ return lst_w[:] # make the resulting list resizable def iteriterable(self, w_iterable): - return W_InterpIterable(self, w_iterable) + return InterpIterable(self, w_iterable) def _unpackiterable_unknown_length(self, w_iterator, w_iterable): """Unpack an iterable of unknown length into an interp-level @@ -1237,7 +1241,7 @@ if not isinstance(statement, PyCode): raise TypeError('space.exec_(): expected a string, code or PyCode object') w_key = self.wrap('__builtins__') - if not self.is_true(self.contains(w_globals, w_key)): + if not self.contains_w(w_globals, w_key): self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) diff --git a/pypy/module/__builtin__/interp_classobj.py b/pypy/module/__builtin__/interp_classobj.py --- a/pypy/module/__builtin__/interp_classobj.py +++ b/pypy/module/__builtin__/interp_classobj.py @@ -20,7 +20,7 @@ if not space.isinstance_w(w_dict, space.w_dict): raise_type_err(space, 'bases', 'tuple', w_bases) - if not space.is_true(space.contains(w_dict, space.wrap("__doc__"))): + if not space.contains_w(w_dict, space.wrap("__doc__")): space.setitem(w_dict, space.wrap("__doc__"), space.w_None) # XXX missing: lengthy and obscure logic about "__module__" diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.0" +VERSION = "1.5.2" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -69,6 +69,7 @@ def startup(self, space): from pypy.module._cffi_backend import embedding embedding.glob.space = space + embedding.glob.patched_sys = False def get_dict_rtld_constants(): diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -45,6 +45,26 @@ pass glob = Global() +def patch_sys(space): + # Annoying: CPython would just use the C-level std{in,out,err} as + # configured by the main application, for example in binary mode + # on Windows or with buffering turned off. We can't easily do the + # same. Instead, go for the safest bet (but possibly bad for + # performance) and open sys.std{in,out,err} unbuffered. On + # Windows I guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # pypy_init_embedded_cffi_module(). + if not glob.patched_sys: + space.appexec([], """(): + import os, sys + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + glob.patched_sys = True + + def pypy_init_embedded_cffi_module(version, init_struct): # called from __init__.py name = "?" @@ -56,6 +76,7 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) + patch_sys(space) load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: @@ -85,14 +106,86 @@ # ____________________________________________________________ +if os.name == 'nt': -eci = ExternalCompilationInfo(separate_module_sources=[ -r""" -/* XXX Windows missing */ -#include + do_includes = r""" +#define _WIN32_WINNT 0x0501 +#include + +#define CFFI_INIT_HOME_PATH_MAX _MAX_PATH +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + HMODULE hModule = 0; + DWORD res; + + GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + (LPCTSTR)&_cffi_init, &hModule); + + if (hModule == 0 ) { + _cffi_init_error("GetModuleHandleEx() failed", ""); + return -1; + } + res = GetModuleFileName(hModule, output_home_path, CFFI_INIT_HOME_PATH_MAX); + if (res >= CFFI_INIT_HOME_PATH_MAX) { + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static LONG volatile lock = 0; + static int _init_called = 0; + + while (InterlockedCompareExchange(&lock, 1, 0) != 0) { + SwitchToThread(); /* spin loop */ + } + if (!_init_called) { + _cffi_init(); + _init_called = 1; + } + InterlockedCompareExchange(&lock, 0, 1); +} +""" + +else: + + do_includes = r""" #include #include +#define CFFI_INIT_HOME_PATH_MAX PATH_MAX +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + Dl_info info; + dlerror(); /* reset */ + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return -1; + } + if (realpath(info.dli_fname, output_home_path) == NULL) { + perror("realpath() failed"); + _cffi_init_error("realpath() failed", ""); + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + pthread_once(&once_control, _cffi_init); +} +""" + +do_startup = do_includes + r""" RPY_EXPORTED void rpython_startup_code(void); RPY_EXPORTED int pypy_setup_home(char *, int); @@ -108,17 +201,13 @@ static void _cffi_init(void) { - Dl_info info; - char *home; + char home[CFFI_INIT_HOME_PATH_MAX + 1]; rpython_startup_code(); RPyGilAllocate(); - if (dladdr(&_cffi_init, &info) == 0) { - _cffi_init_error("dladdr() failed: ", dlerror()); + if (_cffi_init_home(home) != 0) return; - } - home = realpath(info.dli_fname, NULL); if (pypy_setup_home(home, 1) != 0) { _cffi_init_error("pypy_setup_home() failed", ""); return; @@ -134,13 +223,12 @@ It assumes that we don't hold the GIL before (if it exists), and we don't hold it afterwards. */ - static pthread_once_t once_control = PTHREAD_ONCE_INIT; - _cffi_module_name = name; /* not really thread-safe, but better than nothing */ - pthread_once(&once_control, _cffi_init); + _cffi_init_once(); return (int)_cffi_ready - 1; } -"""]) +""" +eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,15 +4,18 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module +from pypy.module._cffi_backend import embedding def test_checkmodule(): # prepare_file_argument() is not working without translating the _file # module too def dummy_prepare_file_argument(space, fileobj): - # call load_cffi1_module() too, from a random place like here - cffi1_module.load_cffi1_module(space, "foo", "foo", 42) + # call pypy_init_embedded_cffi_module() from a random place like here + # --- this calls load_cffi1_module(), too + embedding.pypy_init_embedded_cffi_module( + rffi.cast(rffi.INT, embedding.EMBED_VERSION_MIN), + 42) return lltype.nullptr(rffi.CCHARP.TO) old = ctypeptr.prepare_file_argument try: From pypy.commits at gmail.com Sun Feb 21 06:55:06 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 03:55:06 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: fix merge Message-ID: <56c9a59a.a118c20a.2a649.ffffc00c@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82358:8b4f91899232 Date: 2016-02-20 14:28 +0100 http://bitbucket.org/pypy/pypy/changeset/8b4f91899232/ Log: fix merge diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -119,6 +119,8 @@ def is_valid_fp(fp): return is_valid_fd(fileno(fp)) +pypy_decl = 'pypy_decl.h' + constant_names = """ Py_TPFLAGS_READY Py_TPFLAGS_READYING Py_TPFLAGS_HAVE_GETCHARBUFFER METH_COEXIST METH_STATIC METH_CLASS @@ -128,7 +130,7 @@ """.split() for name in constant_names: setattr(CConfig_constants, name, rffi_platform.ConstantInteger(name)) -udir.join('pypy_decl.h').write("/* Will be filled later */\n") +udir.join(pypy_decl).write("/* Will be filled later */\n") udir.join('pypy_structmember_decl.h').write("/* Will be filled later */\n") udir.join('pypy_macros.h').write("/* Will be filled later */\n") globals().update(rffi_platform.configure(CConfig_constants)) @@ -233,7 +235,7 @@ wrapper.c_name = cpyext_namespace.uniquename(self.c_name) return wrapper -def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header='pypy_decl.h', +def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header=pypy_decl, gil=None): """ Declares a function to be exported. @@ -964,7 +966,7 @@ # implement function callbacks and generate function decls functions = [] decls = {} - pypy_decls = decls['pypy_decl.h'] = [] + pypy_decls = decls[pypy_decl] = [] pypy_decls.append("#ifndef _PYPY_PYPY_DECL_H\n") pypy_decls.append("#define _PYPY_PYPY_DECL_H\n") pypy_decls.append("#ifndef PYPY_STANDALONE\n") diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -5,7 +5,7 @@ from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import ( cpython_api, generic_cpy_call, PyObject, Py_ssize_t, Py_TPFLAGS_CHECKTYPES, - mangle_name) + mangle_name, pypy_decl) from pypy.module.cpyext.typeobjectdefs import ( unaryfunc, wrapperfunc, ternaryfunc, PyTypeObjectPtr, binaryfunc, getattrfunc, getattrofunc, setattrofunc, lenfunc, ssizeargfunc, inquiry, @@ -365,7 +365,9 @@ def build_slot_tp_function(space, typedef, name): w_type = space.gettypeobject(typedef) - external = mangle_name('', typedef.name) is not None + header = pypy_decl + if mangle_name('', typedef.name) is None: + header = None if name == 'tp_setattro': setattr_fn = w_type.getdictvalue(space, '__setattr__') delattr_fn = w_type.getdictvalue(space, '__delattr__') @@ -373,7 +375,7 @@ return @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, - error=-1, external=external) + error=-1, header=header) @func_renamer("cpyext_tp_setattro_%s" % (typedef.name,)) def slot_tp_setattro(space, w_self, w_name, w_value): if w_value is not None: @@ -387,8 +389,7 @@ if getattr_fn is None: return - @cpython_api([PyObject, PyObject], PyObject, - external=external) + @cpython_api([PyObject, PyObject], PyObject, header=header) @func_renamer("cpyext_tp_getattro_%s" % (typedef.name,)) def slot_tp_getattro(space, w_self, w_name): return space.call_function(getattr_fn, w_self, w_name) diff --git a/pypy/module/cpyext/test/test_stringobject.py b/pypy/module/cpyext/test/test_stringobject.py --- a/pypy/module/cpyext/test/test_stringobject.py +++ b/pypy/module/cpyext/test/test_stringobject.py @@ -366,9 +366,3 @@ w_joined = api._PyString_Join(w_sep, w_seq) assert space.unwrap(w_joined) == 'ab' - def test_type(self, space, api): - py_str = make_ref(space, space.w_str) - py_unicode = make_ref(space, space.w_unicode) - py_basestr = make_ref(space, space.w_basestring) - #import pdb - #pdb.set_trace() From pypy.commits at gmail.com Sun Feb 21 06:55:10 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 03:55:10 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: unicode tests pass Message-ID: <56c9a59e.82561c0a.32111.7a99@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82360:74587c2585fe Date: 2016-02-20 23:03 +0100 http://bitbucket.org/pypy/pypy/changeset/74587c2585fe/ Log: unicode tests pass diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -473,7 +473,7 @@ "PyUnicode_Type": "space.w_unicode", "PyBaseString_Type": "space.w_basestring", "PyDict_Type": "space.w_dict", - "PyDictProxy_Type": "space.type(space.w_NotImplemented)", + #"PyDictProxy_Type": "space.type(space.w_NotImplemented)", "PyTuple_Type": "space.w_tuple", "PyList_Type": "space.w_list", "PySet_Type": "space.w_set", diff --git a/pypy/module/cpyext/floatobject.py b/pypy/module/cpyext/floatobject.py --- a/pypy/module/cpyext/floatobject.py +++ b/pypy/module/cpyext/floatobject.py @@ -3,7 +3,7 @@ cpython_struct, CANNOT_FAIL, cpython_api, PyObject, build_type_checkers, CONST_STRING) from pypy.module.cpyext.pyobject import ( - make_typedescr, track_reference, RefcountState, from_ref) + make_typedescr, track_reference, from_ref) from pypy.interpreter.error import OperationError from rpython.rlib.rstruct import runpack from pypy.objspace.std.floatobject import W_FloatObject @@ -36,8 +36,6 @@ w_obj = space.allocate_instance(W_FloatObject, w_type) w_obj.__init__(floatval) track_reference(space, obj, w_obj) - state = space.fromcache(RefcountState) - state.set_lifeline(w_obj, obj) return w_obj PyFloat_Check, PyFloat_CheckExact = build_type_checkers("Float") diff --git a/pypy/module/cpyext/test/test_unicodeobject.py b/pypy/module/cpyext/test/test_unicodeobject.py --- a/pypy/module/cpyext/test/test_unicodeobject.py +++ b/pypy/module/cpyext/test/test_unicodeobject.py @@ -1,4 +1,4 @@ -# encoding: iso-8859-15 +# encoding: utf-8 from pypy.module.cpyext.test.test_api import BaseApiTest from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase from pypy.module.cpyext.unicodeobject import ( @@ -21,13 +21,13 @@ PyObject* s = PyUnicode_FromString("Hello world"); int result = 0; - if(PyUnicode_GetSize(s) == 11) { - result = 1; + if(PyUnicode_GetSize(s) != 11) { + result = -PyUnicode_GetSize(s); } - if(s->ob_type->tp_basicsize != sizeof(void*)*6) - result = 0; + if(s->ob_type->tp_basicsize != sizeof(void*)*7) + result = s->ob_type->tp_basicsize; Py_DECREF(s); - return PyBool_FromLong(result); + return PyLong_FromLong(result); """), ("test_GetSize_exception", "METH_NOARGS", """ @@ -42,7 +42,7 @@ return PyBool_FromLong(PyUnicode_Check(PyTuple_GetItem(args, 0))); """)]) assert module.get_hello1() == u'Hello world' - assert module.test_GetSize() + assert module.test_GetSize() == 0 raises(TypeError, module.test_GetSize_exception) assert module.test_is_unicode(u"") @@ -73,7 +73,8 @@ ]) s = module.getunicode() assert len(s) == 4 - assert s == u'a�\x00c' + assert s == u'a\xe9\x00c' + def test_hash(self): module = self.import_extension('foo', [ @@ -130,7 +131,7 @@ utf_8 = rffi.str2charp('utf-8') encoded = api.PyUnicode_AsEncodedString(space.wrap(u'sp�m'), utf_8, None) - assert space.unwrap(encoded) == 'sp\xc3\xa4m' + assert space.unwrap(encoded) == 'sp\xef\xbf\xbdm' encoded_obj = api.PyUnicode_AsEncodedObject(space.wrap(u'sp�m'), utf_8, None) assert space.eq_w(encoded, encoded_obj) @@ -155,14 +156,14 @@ rffi.free_wcharp(buf) def test_fromstring(self, space, api): - s = rffi.str2charp(u'sp�m'.encode("utf-8")) + s = rffi.str2charp(u'sp\x09m'.encode("utf-8")) w_res = api.PyUnicode_FromString(s) - assert space.unwrap(w_res) == u'sp�m' + assert space.unwrap(w_res) == u'sp\x09m' res = api.PyUnicode_FromStringAndSize(s, 4) w_res = from_ref(space, res) api.Py_DecRef(res) - assert space.unwrap(w_res) == u'sp�' + assert space.unwrap(w_res) == u'sp\x09m' rffi.free_charp(s) def test_unicode_resize(self, space, api): @@ -188,16 +189,16 @@ lltype.free(ar, flavor='raw') def test_AsUTF8String(self, space, api): - w_u = space.wrap(u'sp�m') + w_u = space.wrap(u'sp\x09m') w_res = api.PyUnicode_AsUTF8String(w_u) assert space.type(w_res) is space.w_str - assert space.unwrap(w_res) == 'sp\xc3\xa4m' + assert space.unwrap(w_res) == 'sp\tm' def test_decode_utf8(self, space, api): - u = rffi.str2charp(u'sp�m'.encode("utf-8")) + u = rffi.str2charp(u'sp\x134m'.encode("utf-8")) w_u = api.PyUnicode_DecodeUTF8(u, 5, None) assert space.type(w_u) is space.w_unicode - assert space.unwrap(w_u) == u'sp�m' + assert space.unwrap(w_u) == u'sp\x134m' w_u = api.PyUnicode_DecodeUTF8(u, 2, None) assert space.type(w_u) is space.w_unicode @@ -205,9 +206,9 @@ rffi.free_charp(u) def test_encode_utf8(self, space, api): - u = rffi.unicode2wcharp(u'sp�m') + u = rffi.unicode2wcharp(u'sp\x09m') w_s = api.PyUnicode_EncodeUTF8(u, 4, None) - assert space.unwrap(w_s) == u'sp�m'.encode('utf-8') + assert space.unwrap(w_s) == u'sp\x09m'.encode('utf-8') rffi.free_wcharp(u) def test_encode_decimal(self, space, api): @@ -269,13 +270,11 @@ for char in [0x0a, 0x0d, 0x1c, 0x1d, 0x1e, 0x85, 0x2028, 0x2029]: assert api.Py_UNICODE_ISLINEBREAK(unichr(char)) - assert api.Py_UNICODE_ISLOWER(u'�') - assert not api.Py_UNICODE_ISUPPER(u'�') + assert api.Py_UNICODE_ISLOWER(u'\xdf') # sharp s + assert api.Py_UNICODE_ISUPPER(u'\xde') # capital thorn assert api.Py_UNICODE_ISLOWER(u'a') assert not api.Py_UNICODE_ISUPPER(u'a') - assert not api.Py_UNICODE_ISLOWER(u'�') - assert api.Py_UNICODE_ISUPPER(u'�') - assert not api.Py_UNICODE_ISTITLE(u'A') + assert not api.Py_UNICODE_ISTITLE(u'\xce') assert api.Py_UNICODE_ISTITLE( u'\N{LATIN CAPITAL LETTER L WITH SMALL LETTER J}') diff --git a/pypy/module/cpyext/tupleobject.py b/pypy/module/cpyext/tupleobject.py --- a/pypy/module/cpyext/tupleobject.py +++ b/pypy/module/cpyext/tupleobject.py @@ -101,7 +101,7 @@ track_reference(space, py_obj, w_obj) return w_obj - at cpython_api([PyObject], lltype.Void, external=False) + at cpython_api([PyObject], lltype.Void, header=None) def tuple_dealloc(space, py_obj): """Frees allocated PyTupleObject resources. """ diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -16,7 +16,8 @@ cpython_api, cpython_struct, bootstrap_function, Py_ssize_t, Py_ssize_tP, generic_cpy_call, Py_TPFLAGS_READY, Py_TPFLAGS_READYING, Py_TPFLAGS_HEAPTYPE, METH_VARARGS, METH_KEYWORDS, CANNOT_FAIL, - Py_TPFLAGS_HAVE_GETCHARBUFFER, build_type_checkers, StaticObjectBuilder) + Py_TPFLAGS_HAVE_GETCHARBUFFER, build_type_checkers, StaticObjectBuilder, + PyObjectFields) from pypy.module.cpyext.methodobject import ( PyDescr_NewWrapper, PyCFunction_NewEx, PyCFunction_typedef) from pypy.module.cpyext.modsupport import convert_method_defs From pypy.commits at gmail.com Sun Feb 21 06:55:04 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 03:55:04 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: merge default into branch Message-ID: <56c9a598.8673c20a.9e569.ffffca61@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82357:56c22c3dbaea Date: 2016-02-20 13:49 +0100 http://bitbucket.org/pypy/pypy/changeset/56c22c3dbaea/ Log: merge default into branch diff too long, truncating to 2000 out of 7485 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -188,7 +188,7 @@ # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'include')) + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py --- a/lib_pypy/_pypy_testcapi.py +++ b/lib_pypy/_pypy_testcapi.py @@ -62,7 +62,7 @@ if sys.platform == 'win32': # XXX pyconfig.h uses a pragma to link to the import library, # which is currently python27.lib - library = os.path.join(thisdir, '..', 'include', 'python27') + library = os.path.join(thisdir, '..', 'libs', 'python27') if not os.path.exists(library + '.lib'): # For a local translation or nightly build library = os.path.join(thisdir, '..', 'pypy', 'goal', 'python27') diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.0 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.2" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -544,28 +544,50 @@ def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. if hasattr(sys, 'prefix'): - import os - libdir = os.path.join(sys.prefix, 'bin') - dirs = kwds.setdefault('library_dirs', []) - if libdir not in dirs: - dirs.append(libdir) - pythonlib = "pypy-c" + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags - libraries = kwds.setdefault('libraries', []) - if pythonlib not in libraries: - libraries.append(pythonlib) + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): @@ -631,7 +653,7 @@ compiled DLL. Use '*' to force distutils' choice, suitable for regular CPython C API modules. Use a file name ending in '.*' to ask for the system's default extension for dynamic libraries - (.so/.dll). + (.so/.dll/.dylib). The default is '*' when building a non-embedded C API extension, and (module_name + '.*') when building an embedded library. @@ -695,6 +717,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,14 +21,12 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def compile(tmpdir, ext, compiler_verbose=0): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose, - target_extension, embedding) + outputfilename = _build(tmpdir, ext, compiler_verbose) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -38,32 +36,7 @@ os.environ[key] = value return outputfilename -def _save_val(name): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - return config_vars.get(name, Ellipsis) - -def _restore_val(name, value): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - config_vars[name] = value - if value is Ellipsis: - del config_vars[name] - -def _win32_hack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): - MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ - MSVCCompiler._remove_visual_c_ref - MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file - -def _win32_unhack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - MSVCCompiler._remove_visual_c_ref = \ - MSVCCompiler._remove_visual_c_ref_CFFI_BAK - -def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def _build(tmpdir, ext, compiler_verbose=0): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -76,25 +49,14 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: - if sys.platform == 'win32' and embedding: - _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 - old_SO = _save_val('SO') - old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: - if target_extension is not None: - _restore_val('SO', target_extension) - _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') cmd_obj = dist.get_command_obj('build_ext') [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) - _restore_val('SO', old_SO) - _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) - if sys.platform == 'win32' and embedding: - _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and @@ -1357,6 +1359,58 @@ parts[-1] += extension return os.path.join(outputdir, *parts), parts + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, compiler_verbose=1, target=None, **kwds): @@ -1382,36 +1436,22 @@ target = '%s.*' % module_name else: target = '*' - if target == '*': - target_module_name = module_name - target_extension = None # use default - else: - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - else: - target += '.so' - # split along the first '.' (not the last one, otherwise the - # preceeding dots are interpreted as splitting package names) - index = target.find('.') - if index < 0: - raise ValueError("target argument %r should be a file name " - "containing a '.'" % (target,)) - target_module_name = target[:index] - target_extension = target[index:] # - ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: + patchlist = [] cwd = os.getcwd() try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose, - target_extension, - embedding=embedding) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) finally: os.chdir(cwd) + _unpatch_meths(patchlist) return outputfilename else: return ext, updated diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py --- a/lib_pypy/cffi/vengine_cpy.py +++ b/lib_pypy/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py --- a/lib_pypy/cffi/vengine_gen.py +++ b/lib_pypy/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py --- a/lib_pypy/cffi/verifier.py +++ b/lib_pypy/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -36,13 +36,16 @@ "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", - "_csv", "cppyy", "_pypyjson" + "_csv", "cppyy", "_pypyjson", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): - # it's not enough that we get x86_64 - working_modules.add('_vmprof') +from rpython.jit.backend import detect_cpu +try: + if detect_cpu.autodetect().startswith('x86'): + working_modules.add('_vmprof') +except detect_cpu.ProcessorAutodetectError: + pass + translation_modules = default_modules.copy() translation_modules.update([ diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -7,6 +7,9 @@ Fixed ``_PyLong_FromByteArray()``, which was buggy. +Fixed a crash with stacklets (or greenlets) on non-Linux machines +which showed up if you forget stacklets without resuming them. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy @@ -38,7 +41,8 @@ .. branch: compress-numbering -Improve the memory signature of numbering instances in the JIT. +Improve the memory signature of numbering instances in the JIT. This should massively +decrease the amount of memory consumed by the JIT, which is significant for most programs. .. branch: fix-trace-too-long-heuristic @@ -139,3 +143,23 @@ Refactor sandboxing to operate at a higher level. .. branch: cpyext-bootstrap + +.. branch: vmprof-newstack + +Refactor vmprof to work cross-operating-system. + +.. branch: seperate-strucmember_h + +Seperate structmember.h from Python.h Also enhance creating api functions +to specify which header file they appear in (previously only pypy_decl.h) + +.. branch: llimpl + +Refactor register_external(), remove running_on_llinterp mechanism and +apply sandbox transform on externals at the end of annotation. + +.. branch: cffi-embedding-win32 + +.. branch: windows-vmprof-support + +vmprof should work on Windows. diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -239,6 +239,9 @@ raise Exception("Cannot use the --output option with PyPy " "when --shared is on (it is by default). " "See issue #1971.") + if sys.platform == 'win32': + config.translation.libname = '..\\..\\libs\\python27.lib' + thisdir.join('..', '..', 'libs').ensure(dir=1) if config.translation.thread: config.objspace.usemodules.thread = True diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -306,7 +306,7 @@ return None -class W_InterpIterable(W_Root): +class InterpIterable(object): def __init__(self, space, w_iterable): self.w_iter = space.iter(w_iterable) self.space = space @@ -745,9 +745,13 @@ return self.int_w(self.hash(w_obj)) def len_w(self, w_obj): - """shotcut for space.int_w(space.len(w_obj))""" + """shortcut for space.int_w(space.len(w_obj))""" return self.int_w(self.len(w_obj)) + def contains_w(self, w_container, w_item): + """shortcut for space.is_true(space.contains(w_container, w_item))""" + return self.is_true(self.contains(w_container, w_item)) + def setitem_str(self, w_obj, key, w_value): return self.setitem(w_obj, self.wrap(key), w_value) @@ -846,7 +850,7 @@ return lst_w[:] # make the resulting list resizable def iteriterable(self, w_iterable): - return W_InterpIterable(self, w_iterable) + return InterpIterable(self, w_iterable) def _unpackiterable_unknown_length(self, w_iterator, w_iterable): """Unpack an iterable of unknown length into an interp-level @@ -1237,7 +1241,7 @@ if not isinstance(statement, PyCode): raise TypeError('space.exec_(): expected a string, code or PyCode object') w_key = self.wrap('__builtins__') - if not self.is_true(self.contains(w_globals, w_key)): + if not self.contains_w(w_globals, w_key): self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) diff --git a/pypy/module/__builtin__/interp_classobj.py b/pypy/module/__builtin__/interp_classobj.py --- a/pypy/module/__builtin__/interp_classobj.py +++ b/pypy/module/__builtin__/interp_classobj.py @@ -20,7 +20,7 @@ if not space.isinstance_w(w_dict, space.w_dict): raise_type_err(space, 'bases', 'tuple', w_bases) - if not space.is_true(space.contains(w_dict, space.wrap("__doc__"))): + if not space.contains_w(w_dict, space.wrap("__doc__")): space.setitem(w_dict, space.wrap("__doc__"), space.w_None) # XXX missing: lengthy and obscure logic about "__module__" diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.0" +VERSION = "1.5.2" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -69,6 +69,7 @@ def startup(self, space): from pypy.module._cffi_backend import embedding embedding.glob.space = space + embedding.glob.patched_sys = False def get_dict_rtld_constants(): diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -45,6 +45,26 @@ pass glob = Global() +def patch_sys(space): + # Annoying: CPython would just use the C-level std{in,out,err} as + # configured by the main application, for example in binary mode + # on Windows or with buffering turned off. We can't easily do the + # same. Instead, go for the safest bet (but possibly bad for + # performance) and open sys.std{in,out,err} unbuffered. On + # Windows I guess binary mode is a better default choice. + # + # XXX if needed, we could add support for a flag passed to + # pypy_init_embedded_cffi_module(). + if not glob.patched_sys: + space.appexec([], """(): + import os, sys + sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) + sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) + sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) + """) + glob.patched_sys = True + + def pypy_init_embedded_cffi_module(version, init_struct): # called from __init__.py name = "?" @@ -56,6 +76,7 @@ must_leave = False try: must_leave = space.threadlocals.try_enter_thread(space) + patch_sys(space) load_embedded_cffi_module(space, version, init_struct) res = 0 except OperationError, operr: @@ -85,14 +106,86 @@ # ____________________________________________________________ +if os.name == 'nt': -eci = ExternalCompilationInfo(separate_module_sources=[ -r""" -/* XXX Windows missing */ -#include + do_includes = r""" +#define _WIN32_WINNT 0x0501 +#include + +#define CFFI_INIT_HOME_PATH_MAX _MAX_PATH +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + HMODULE hModule = 0; + DWORD res; + + GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + (LPCTSTR)&_cffi_init, &hModule); + + if (hModule == 0 ) { + _cffi_init_error("GetModuleHandleEx() failed", ""); + return -1; + } + res = GetModuleFileName(hModule, output_home_path, CFFI_INIT_HOME_PATH_MAX); + if (res >= CFFI_INIT_HOME_PATH_MAX) { + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static LONG volatile lock = 0; + static int _init_called = 0; + + while (InterlockedCompareExchange(&lock, 1, 0) != 0) { + SwitchToThread(); /* spin loop */ + } + if (!_init_called) { + _cffi_init(); + _init_called = 1; + } + InterlockedCompareExchange(&lock, 0, 1); +} +""" + +else: + + do_includes = r""" #include #include +#define CFFI_INIT_HOME_PATH_MAX PATH_MAX +static void _cffi_init(void); +static void _cffi_init_error(const char *msg, const char *extra); + +static int _cffi_init_home(char *output_home_path) +{ + Dl_info info; + dlerror(); /* reset */ + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return -1; + } + if (realpath(info.dli_fname, output_home_path) == NULL) { + perror("realpath() failed"); + _cffi_init_error("realpath() failed", ""); + return -1; + } + return 0; +} + +static void _cffi_init_once(void) +{ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + pthread_once(&once_control, _cffi_init); +} +""" + +do_startup = do_includes + r""" RPY_EXPORTED void rpython_startup_code(void); RPY_EXPORTED int pypy_setup_home(char *, int); @@ -108,17 +201,13 @@ static void _cffi_init(void) { - Dl_info info; - char *home; + char home[CFFI_INIT_HOME_PATH_MAX + 1]; rpython_startup_code(); RPyGilAllocate(); - if (dladdr(&_cffi_init, &info) == 0) { - _cffi_init_error("dladdr() failed: ", dlerror()); + if (_cffi_init_home(home) != 0) return; - } - home = realpath(info.dli_fname, NULL); if (pypy_setup_home(home, 1) != 0) { _cffi_init_error("pypy_setup_home() failed", ""); return; @@ -134,13 +223,12 @@ It assumes that we don't hold the GIL before (if it exists), and we don't hold it afterwards. */ - static pthread_once_t once_control = PTHREAD_ONCE_INIT; - _cffi_module_name = name; /* not really thread-safe, but better than nothing */ - pthread_once(&once_control, _cffi_init); + _cffi_init_once(); return (int)_cffi_ready - 1; } -"""]) +""" +eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_cffi_backend/test/test_ztranslation.py b/pypy/module/_cffi_backend/test/test_ztranslation.py --- a/pypy/module/_cffi_backend/test/test_ztranslation.py +++ b/pypy/module/_cffi_backend/test/test_ztranslation.py @@ -4,15 +4,18 @@ # side-effect: FORMAT_LONGDOUBLE must be built before test_checkmodule() from pypy.module._cffi_backend import misc -from pypy.module._cffi_backend import cffi1_module +from pypy.module._cffi_backend import embedding def test_checkmodule(): # prepare_file_argument() is not working without translating the _file # module too def dummy_prepare_file_argument(space, fileobj): - # call load_cffi1_module() too, from a random place like here - cffi1_module.load_cffi1_module(space, "foo", "foo", 42) + # call pypy_init_embedded_cffi_module() from a random place like here + # --- this calls load_cffi1_module(), too + embedding.pypy_init_embedded_cffi_module( + rffi.cast(rffi.INT, embedding.EMBED_VERSION_MIN), + 42) return lltype.nullptr(rffi.CCHARP.TO) old = ctypeptr.prepare_file_argument try: diff --git a/pypy/module/_demo/test/test_import.py b/pypy/module/_demo/test/test_import.py --- a/pypy/module/_demo/test/test_import.py +++ b/pypy/module/_demo/test/test_import.py @@ -12,8 +12,7 @@ w_modules = space.sys.get('modules') assert _demo.Module.demo_events == ['setup'] - assert not space.is_true(space.contains(w_modules, - space.wrap('_demo'))) + assert not space.contains_w(w_modules, space.wrap('_demo')) # first import w_import = space.builtin.get('__import__') diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py From pypy.commits at gmail.com Sun Feb 21 06:55:12 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 03:55:12 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: fixes Message-ID: <56c9a5a0.c1b3c20a.9ea7d.ffffcc48@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82361:3472ea77ece7 Date: 2016-02-21 10:12 +0100 http://bitbucket.org/pypy/pypy/changeset/3472ea77ece7/ Log: fixes diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py --- a/pypy/module/cpyext/sequence.py +++ b/pypy/module/cpyext/sequence.py @@ -4,7 +4,7 @@ from pypy.objspace.std.listobject import ListStrategy from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, Py_ssize_t, PyObject, PyObjectP) -from pypy.module.cpyext.pyobject import PyObject +from pypy.module.cpyext.pyobject import PyObject, make_ref, from_ref from rpython.rtyper.lltypesystem import rffi, lltype from pypy.objspace.std import listobject, tupleobject diff --git a/pypy/module/cpyext/test/test_stringobject.py b/pypy/module/cpyext/test/test_stringobject.py --- a/pypy/module/cpyext/test/test_stringobject.py +++ b/pypy/module/cpyext/test/test_stringobject.py @@ -30,7 +30,7 @@ result = 1; } #ifdef PYPY_VERSION - size_t expected_size = sizeof(void*)*6; + size_t expected_size = sizeof(void*)*7; #else size_t expected_size = 37; #endif diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -154,8 +154,6 @@ w_obj = space.allocate_instance(W_MemberDescr, w_type) w_obj.__init__(member, w_type) track_reference(space, obj, w_obj) - state = space.fromcache(RefcountState) - state.set_lifeline(w_obj, obj) return w_obj def getsetdescr_attach(space, py_obj, w_obj): From pypy.commits at gmail.com Sun Feb 21 06:55:13 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 03:55:13 -0800 (PST) Subject: [pypy-commit] pypy cpyext-gc-support-2: merge heads Message-ID: <56c9a5a1.4c181c0a.d330b.ffff823f@mx.google.com> Author: mattip Branch: cpyext-gc-support-2 Changeset: r82362:fdbbb1763463 Date: 2016-02-21 12:54 +0100 http://bitbucket.org/pypy/pypy/changeset/fdbbb1763463/ Log: merge heads diff --git a/rpython/rlib/rawrefcount.py b/rpython/rlib/rawrefcount.py --- a/rpython/rlib/rawrefcount.py +++ b/rpython/rlib/rawrefcount.py @@ -11,8 +11,8 @@ from rpython.rlib import rgc -REFCNT_FROM_PYPY = 80 -REFCNT_FROM_PYPY_LIGHT = REFCNT_FROM_PYPY + (sys.maxint//2+1) +REFCNT_FROM_PYPY = sys.maxint // 4 + 1 +REFCNT_FROM_PYPY_LIGHT = REFCNT_FROM_PYPY + (sys.maxint // 2 + 1) RAWREFCOUNT_DEALLOC_TRIGGER = lltype.Ptr(lltype.FuncType([], lltype.Void)) From pypy.commits at gmail.com Sun Feb 21 06:55:08 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 03:55:08 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: merge cpyext-gc-support-2 into brancj Message-ID: <56c9a59c.05e41c0a.86363.ffff8664@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82359:48408805395d Date: 2016-02-20 14:33 +0100 http://bitbucket.org/pypy/pypy/changeset/48408805395d/ Log: merge cpyext-gc-support-2 into brancj diff too long, truncating to 2000 out of 4643 lines diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py --- a/lib_pypy/_pypy_testcapi.py +++ b/lib_pypy/_pypy_testcapi.py @@ -7,6 +7,7 @@ content = fid.read() # from cffi's Verifier() key = '\x00'.join([sys.version[:3], content]) + key += 'cpyext-gc-support-2' # this branch requires recompilation! if sys.version_info >= (3,): key = key.encode('utf-8') k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) diff --git a/pypy/doc/discussion/rawrefcount.rst b/pypy/doc/discussion/rawrefcount.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/discussion/rawrefcount.rst @@ -0,0 +1,158 @@ +====================== +Rawrefcount and the GC +====================== + + +GC Interface +------------ + +"PyObject" is a raw structure with at least two fields, ob_refcnt and +ob_pypy_link. The ob_refcnt is the reference counter as used on +CPython. If the PyObject structure is linked to a live PyPy object, +its current address is stored in ob_pypy_link and ob_refcnt is bumped +by either the constant REFCNT_FROM_PYPY, or the constant +REFCNT_FROM_PYPY_LIGHT (== REFCNT_FROM_PYPY + SOME_HUGE_VALUE) +(to mean "light finalizer"). + +Most PyPy objects exist outside cpyext, and conversely in cpyext it is +possible that a lot of PyObjects exist without being seen by the rest +of PyPy. At the interface, however, we can "link" a PyPy object and a +PyObject. There are two kinds of link: + +rawrefcount.create_link_pypy(p, ob) + + Makes a link between an exising object gcref 'p' and a newly + allocated PyObject structure 'ob'. ob->ob_refcnt must be + initialized to either REFCNT_FROM_PYPY, or + REFCNT_FROM_PYPY_LIGHT. (The second case is an optimization: + when the GC finds the PyPy object and PyObject no longer + referenced, it can just free() the PyObject.) + +rawrefcount.create_link_pyobj(p, ob) + + Makes a link from an existing PyObject structure 'ob' to a newly + allocated W_CPyExtPlaceHolderObject 'p'. You must also add + REFCNT_FROM_PYPY to ob->ob_refcnt. For cases where the PyObject + contains all the data, and the PyPy object is just a proxy. The + W_CPyExtPlaceHolderObject should have only a field that contains + the address of the PyObject, but that's outside the scope of the + GC. + +rawrefcount.from_obj(p) + + If there is a link from object 'p' made with create_link_pypy(), + returns the corresponding 'ob'. Otherwise, returns NULL. + +rawrefcount.to_obj(Class, ob) + + Returns ob->ob_pypy_link, cast to an instance of 'Class'. + + +Collection logic +---------------- + +Objects existing purely on the C side have ob->ob_pypy_link == 0; +these are purely reference counted. On the other hand, if +ob->ob_pypy_link != 0, then ob->ob_refcnt is at least REFCNT_FROM_PYPY +and the object is part of a "link". + +The idea is that links whose 'p' is not reachable from other PyPy +objects *and* whose 'ob->ob_refcnt' is REFCNT_FROM_PYPY or +REFCNT_FROM_PYPY_LIGHT are the ones who die. But it is more messy +because PyObjects still (usually) need to have a tp_dealloc called, +and this cannot occur immediately (and can do random things like +accessing other references this object points to, or resurrecting the +object). + +Let P = list of links created with rawrefcount.create_link_pypy() +and O = list of links created with rawrefcount.create_link_pyobj(). +The PyPy objects in the list O are all W_CPyExtPlaceHolderObject: all +the data is in the PyObjects, and all outsite references (if any) are +in C, as "PyObject *" fields. + +So, during the collection we do this about P links: + + for (p, ob) in P: + if ob->ob_refcnt != REFCNT_FROM_PYPY + and ob->ob_refcnt != REFCNT_FROM_PYPY_LIGHT: + mark 'p' as surviving, as well as all its dependencies + +At the end of the collection, the P and O links are both handled like +this: + + for (p, ob) in P + O: + if p is not surviving: # even if 'ob' might be surviving + unlink p and ob + if ob->ob_refcnt == REFCNT_FROM_PYPY_LIGHT: + free(ob) + elif ob->ob_refcnt > REFCNT_FROM_PYPY_LIGHT: + ob->ob_refcnt -= REFCNT_FROM_PYPY_LIGHT + else: + ob->ob_refcnt -= REFCNT_FROM_PYPY + if ob->ob_refcnt == 0: + invoke _Py_Dealloc(ob) later, outside the GC + + +GC Implementation +----------------- + +We need two copies of both the P list and O list, for young or old +objects. All four lists can be regular AddressLists of 'ob' objects. + +We also need an AddressDict mapping 'p' to 'ob' for all links in the P +list, and update it when PyPy objects move. + + +Further notes +------------- + +XXX +XXX the rest is the ideal world, but as a first step, we'll look +XXX for the minimal tweaks needed to adapt the existing cpyext +XXX + +For objects that are opaque in CPython, like , we always create +a PyPy object, and then when needed we make an empty PyObject and +attach it with create_link_pypy()/REFCNT_FROM_PYPY_LIGHT. + +For and objects, the corresponding PyObjects contain a +"long" or "double" field too. We link them with create_link_pypy() +and we can use REFCNT_FROM_PYPY_LIGHT too: 'tp_dealloc' doesn't +need to be called, and instead just calling free() is fine. + +For objects, we need both a PyPy and a PyObject side. These +are made with create_link_pypy()/REFCNT_FROM_PYPY. + +For custom PyXxxObjects allocated from the C extension module, we +need create_link_pyobj(). + +For or objects coming from PyPy, we use +create_link_pypy()/REFCNT_FROM_PYPY_LIGHT with a PyObject +preallocated with the size of the string. We copy the string +lazily into that area if PyString_AS_STRING() is called. + +For , , or objects in the C extension +module, we first allocate it as only a PyObject, which supports +mutation of the data from C, like CPython. When it is exported to +PyPy we could make a W_CPyExtPlaceHolderObject with +create_link_pyobj(). + +For objects coming from PyPy, if they are not specialized, +then the PyPy side holds a regular reference to the items. Then we +can allocate a PyTupleObject and store in it borrowed PyObject +pointers to the items. Such a case is created with +create_link_pypy()/REFCNT_FROM_PYPY_LIGHT. If it is specialized, +then it doesn't work because the items are created just-in-time on the +PyPy side. In this case, the PyTupleObject needs to hold real +references to the PyObject items, and we use create_link_pypy()/ +REFCNT_FROM_PYPY. In all cases, we have a C array of PyObjects +that we can directly return from PySequence_Fast_ITEMS, PyTuple_ITEMS, +PyTuple_GetItem, and so on. + +For objects coming from PyPy, we can use a cpyext list +strategy. The list turns into a PyListObject, as if it had been +allocated from C in the first place. The special strategy can hold +(only) a direct reference to the PyListObject, and we can use either +create_link_pyobj() or create_link_pypy() (to be decided). +PySequence_Fast_ITEMS then works for lists too, and PyList_GetItem +can return a borrowed reference, and so on. diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -27,7 +27,7 @@ class W_Root(object): """This is the abstract root class of all wrapped objects that live in a 'normal' object space like StdObjSpace.""" - __slots__ = () + __slots__ = ('__weakref__',) user_overridden_class = False def getdict(self, space): diff --git a/pypy/module/cpyext/__init__.py b/pypy/module/cpyext/__init__.py --- a/pypy/module/cpyext/__init__.py +++ b/pypy/module/cpyext/__init__.py @@ -60,7 +60,6 @@ import pypy.module.cpyext.funcobject import pypy.module.cpyext.frameobject import pypy.module.cpyext.classobject -import pypy.module.cpyext.pypyintf import pypy.module.cpyext.memoryobject import pypy.module.cpyext.codecs import pypy.module.cpyext.pyfile diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -9,7 +9,7 @@ from rpython.rtyper.tool import rffi_platform from rpython.rtyper.lltypesystem import ll2ctypes from rpython.rtyper.annlowlevel import llhelper -from rpython.rlib.objectmodel import we_are_translated +from rpython.rlib.objectmodel import we_are_translated, keepalive_until_here from rpython.translator import cdir from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.translator.gensupp import NameManager @@ -30,13 +30,13 @@ from rpython.rlib.rposix import is_valid_fd, validate_fd from rpython.rlib.unroll import unrolling_iterable from rpython.rlib.objectmodel import specialize -from rpython.rlib.exports import export_struct from pypy.module import exceptions from pypy.module.exceptions import interp_exceptions # CPython 2.4 compatibility from py.builtin import BaseException from rpython.tool.sourcetools import func_with_new_name from rpython.rtyper.lltypesystem.lloperation import llop +from rpython.rlib import rawrefcount DEBUG_WRAPPER = True @@ -196,7 +196,7 @@ class ApiFunction: def __init__(self, argtypes, restype, callable, error=_NOT_SPECIFIED, - c_name=None, gil=None): + c_name=None, gil=None, result_borrowed=False): self.argtypes = argtypes self.restype = restype self.functype = lltype.Ptr(lltype.FuncType(argtypes, restype)) @@ -213,17 +213,15 @@ self.argnames = argnames[1:] assert len(self.argnames) == len(self.argtypes) self.gil = gil + self.result_borrowed = result_borrowed + # + def get_llhelper(space): + return llhelper(self.functype, self.get_wrapper(space)) + self.get_llhelper = get_llhelper def _freeze_(self): return True - def get_llhelper(self, space): - llh = getattr(self, '_llhelper', None) - if llh is None: - llh = llhelper(self.functype, self.get_wrapper(space)) - self._llhelper = llh - return llh - @specialize.memo() def get_wrapper(self, space): wrapper = getattr(self, '_wrapper', None) @@ -236,7 +234,7 @@ return wrapper def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header=pypy_decl, - gil=None): + gil=None, result_borrowed=False): """ Declares a function to be exported. - `argtypes`, `restype` are lltypes and describe the function signature. @@ -265,13 +263,15 @@ rffi.cast(restype, 0) == 0) def decorate(func): + func._always_inline_ = 'try' func_name = func.func_name if header is not None: c_name = None else: c_name = func_name api_function = ApiFunction(argtypes, restype, func, error, - c_name=c_name, gil=gil) + c_name=c_name, gil=gil, + result_borrowed=result_borrowed) func.api_func = api_function if header is not None: @@ -282,6 +282,10 @@ raise ValueError("function %s has no return value for exceptions" % func) def make_unwrapper(catch_exception): + # ZZZ is this whole logic really needed??? It seems to be only + # for RPython code calling PyXxx() functions directly. I would + # think that usually directly calling the function is clean + # enough now names = api_function.argnames types_names_enum_ui = unrolling_iterable(enumerate( zip(api_function.argtypes, @@ -289,56 +293,58 @@ @specialize.ll() def unwrapper(space, *args): - from pypy.module.cpyext.pyobject import Py_DecRef - from pypy.module.cpyext.pyobject import make_ref, from_ref - from pypy.module.cpyext.pyobject import Reference + from pypy.module.cpyext.pyobject import Py_DecRef, is_pyobj + from pypy.module.cpyext.pyobject import from_ref, as_pyobj newargs = () - to_decref = [] + keepalives = () assert len(args) == len(api_function.argtypes) for i, (ARG, is_wrapped) in types_names_enum_ui: input_arg = args[i] if is_PyObject(ARG) and not is_wrapped: - # build a reference - if input_arg is None: - arg = lltype.nullptr(PyObject.TO) - elif isinstance(input_arg, W_Root): - ref = make_ref(space, input_arg) - to_decref.append(ref) - arg = rffi.cast(ARG, ref) + # build a 'PyObject *' (not holding a reference) + if not is_pyobj(input_arg): + keepalives += (input_arg,) + arg = rffi.cast(ARG, as_pyobj(space, input_arg)) + else: + arg = rffi.cast(ARG, input_arg) + elif is_PyObject(ARG) and is_wrapped: + # build a W_Root, possibly from a 'PyObject *' + if is_pyobj(input_arg): + arg = from_ref(space, input_arg) else: arg = input_arg - elif is_PyObject(ARG) and is_wrapped: - # convert to a wrapped object - if input_arg is None: - arg = input_arg - elif isinstance(input_arg, W_Root): - arg = input_arg - else: - try: - arg = from_ref(space, - rffi.cast(PyObject, input_arg)) - except TypeError, e: - err = OperationError(space.w_TypeError, - space.wrap( - "could not cast arg to PyObject")) - if not catch_exception: - raise err - state = space.fromcache(State) - state.set_exception(err) - if is_PyObject(restype): - return None - else: - return api_function.error_value + + ## ZZZ: for is_pyobj: + ## try: + ## arg = from_ref(space, + ## rffi.cast(PyObject, input_arg)) + ## except TypeError, e: + ## err = OperationError(space.w_TypeError, + ## space.wrap( + ## "could not cast arg to PyObject")) + ## if not catch_exception: + ## raise err + ## state = space.fromcache(State) + ## state.set_exception(err) + ## if is_PyObject(restype): + ## return None + ## else: + ## return api_function.error_value else: - # convert to a wrapped object + # arg is not declared as PyObject, no magic arg = input_arg newargs += (arg, ) - try: + if not catch_exception: + try: + res = func(space, *newargs) + finally: + keepalive_until_here(*keepalives) + else: + # non-rpython variant + assert not we_are_translated() try: res = func(space, *newargs) except OperationError, e: - if not catch_exception: - raise if not hasattr(api_function, "error_value"): raise state = space.fromcache(State) @@ -347,21 +353,13 @@ return None else: return api_function.error_value - if not we_are_translated(): - got_integer = isinstance(res, (int, long, float)) - assert got_integer == expect_integer,'got %r not integer' % res - if res is None: - return None - elif isinstance(res, Reference): - return res.get_wrapped(space) - else: - return res - finally: - for arg in to_decref: - Py_DecRef(space, arg) + # 'keepalives' is alive here (it's not rpython) + got_integer = isinstance(res, (int, long, float)) + assert got_integer == expect_integer, ( + 'got %r not integer' % (res,)) + return res unwrapper.func = func unwrapper.api_func = api_function - unwrapper._always_inline_ = 'try' return unwrapper unwrapper_catch = make_unwrapper(True) @@ -502,7 +500,7 @@ GLOBALS['%s#' % (cpyname, )] = ('PyTypeObject*', pypyexpr) for cpyname in '''PyMethodObject PyListObject PyLongObject - PyDictObject PyTupleObject PyClassObject'''.split(): + PyDictObject PyClassObject'''.split(): FORWARD_DECLS.append('typedef struct { PyObject_HEAD } %s' % (cpyname, )) build_exported_objects() @@ -515,14 +513,16 @@ "PyIntObject*": PyIntObject, "PyDateTime_CAPI*": lltype.Ptr(PyDateTime_CAPI)}[ctype] +# Note: as a special case, "PyObject" is the pointer type in RPython, +# corresponding to "PyObject *" in C. We do that only for PyObject. +# For example, "PyTypeObject" is the struct type even in RPython. PyTypeObject = lltype.ForwardReference() PyTypeObjectPtr = lltype.Ptr(PyTypeObject) -# It is important that these PyObjects are allocated in a raw fashion -# Thus we cannot save a forward pointer to the wrapped object -# So we need a forward and backward mapping in our State instance PyObjectStruct = lltype.ForwardReference() PyObject = lltype.Ptr(PyObjectStruct) -PyObjectFields = (("ob_refcnt", lltype.Signed), ("ob_type", PyTypeObjectPtr)) +PyObjectFields = (("ob_refcnt", lltype.Signed), + ("ob_pypy_link", lltype.Signed), + ("ob_type", PyTypeObjectPtr)) PyVarObjectFields = PyObjectFields + (("ob_size", Py_ssize_t), ) cpython_struct('PyObject', PyObjectFields, PyObjectStruct) PyVarObjectStruct = cpython_struct("PyVarObject", PyVarObjectFields) @@ -619,8 +619,8 @@ @specialize.ll() def wrapper(*args): - from pypy.module.cpyext.pyobject import make_ref, from_ref - from pypy.module.cpyext.pyobject import Reference + from pypy.module.cpyext.pyobject import make_ref, from_ref, is_pyobj + from pypy.module.cpyext.pyobject import as_pyobj # we hope that malloc removal removes the newtuple() that is # inserted exactly here by the varargs specializer if gil_acquire: @@ -629,6 +629,7 @@ llop.gc_stack_bottom(lltype.Void) # marker for trackgcroot.py retval = fatal_value boxed_args = () + tb = None try: if not we_are_translated() and DEBUG_WRAPPER: print >>sys.stderr, callable, @@ -636,10 +637,8 @@ for i, (typ, is_wrapped) in argtypes_enum_ui: arg = args[i] if is_PyObject(typ) and is_wrapped: - if arg: - arg_conv = from_ref(space, rffi.cast(PyObject, arg)) - else: - arg_conv = None + assert is_pyobj(arg) + arg_conv = from_ref(space, rffi.cast(PyObject, arg)) else: arg_conv = arg boxed_args += (arg_conv, ) @@ -654,6 +653,7 @@ except BaseException, e: failed = True if not we_are_translated(): + tb = sys.exc_info()[2] message = repr(e) import traceback traceback.print_exc() @@ -672,29 +672,34 @@ retval = error_value elif is_PyObject(callable.api_func.restype): - if result is None: - retval = rffi.cast(callable.api_func.restype, - make_ref(space, None)) - elif isinstance(result, Reference): - retval = result.get_ref(space) - elif not rffi._isllptr(result): - retval = rffi.cast(callable.api_func.restype, - make_ref(space, result)) + if is_pyobj(result): + retval = result else: - retval = result + if result is not None: + if callable.api_func.result_borrowed: + retval = as_pyobj(space, result) + else: + retval = make_ref(space, result) + retval = rffi.cast(callable.api_func.restype, retval) + else: + retval = lltype.nullptr(PyObject.TO) elif callable.api_func.restype is not lltype.Void: retval = rffi.cast(callable.api_func.restype, result) except Exception, e: print 'Fatal error in cpyext, CPython compatibility layer, calling', callable.__name__ print 'Either report a bug or consider not using this particular extension' if not we_are_translated(): + if tb is None: + tb = sys.exc_info()[2] import traceback traceback.print_exc() - print str(e) + if sys.stdout == sys.__stdout__: + import pdb; pdb.post_mortem(tb) # we can't do much here, since we're in ctypes, swallow else: print str(e) pypy_debug_catch_fatal_exception() + assert False rffi.stackcounter.stacks_counter -= 1 if gil_release: rgil.release() @@ -828,6 +833,19 @@ outputfilename=str(udir / "module_cache" / "pypyapi")) modulename = py.path.local(eci.libraries[-1]) + def dealloc_trigger(): + from pypy.module.cpyext.pyobject import _Py_Dealloc + print 'dealloc_trigger...' + while True: + ob = rawrefcount.next_dead(PyObject) + if not ob: + break + print ob + _Py_Dealloc(space, ob) + print 'dealloc_trigger DONE' + return "RETRY" + rawrefcount.init(dealloc_trigger) + run_bootstrap_functions(space) # load the bridge, and init structure @@ -837,9 +855,9 @@ space.fromcache(State).install_dll(eci) # populate static data - builder = StaticObjectBuilder(space) + builder = space.fromcache(StaticObjectBuilder) for name, (typ, expr) in GLOBALS.iteritems(): - from pypy.module import cpyext + from pypy.module import cpyext # for the eval() below w_obj = eval(expr) if name.endswith('#'): name = name[:-1] @@ -895,27 +913,44 @@ class StaticObjectBuilder: def __init__(self, space): self.space = space - self.to_attach = [] + self.static_pyobjs = [] + self.static_objs_w = [] + self.cpyext_type_init = None + # + # add a "method" that is overridden in setup_library() + # ('self.static_pyobjs' is completely ignored in that case) + self.get_static_pyobjs = lambda: self.static_pyobjs def prepare(self, py_obj, w_obj): - from pypy.module.cpyext.pyobject import track_reference - py_obj.c_ob_refcnt = 1 - track_reference(self.space, py_obj, w_obj) - self.to_attach.append((py_obj, w_obj)) + "NOT_RPYTHON" + if py_obj: + py_obj.c_ob_refcnt = 1 # 1 for kept immortal + self.static_pyobjs.append(py_obj) + self.static_objs_w.append(w_obj) def attach_all(self): + # this is RPython, called once in pypy-c when it imports cpyext from pypy.module.cpyext.pyobject import get_typedescr, make_ref from pypy.module.cpyext.typeobject import finish_type_1, finish_type_2 + from pypy.module.cpyext.pyobject import track_reference + # space = self.space - space._cpyext_type_init = [] - for py_obj, w_obj in self.to_attach: + static_pyobjs = self.get_static_pyobjs() + static_objs_w = self.static_objs_w + for i in range(len(static_objs_w)): + track_reference(space, static_pyobjs[i], static_objs_w[i]) + # + self.cpyext_type_init = [] + for i in range(len(static_objs_w)): + py_obj = static_pyobjs[i] + w_obj = static_objs_w[i] w_type = space.type(w_obj) typedescr = get_typedescr(w_type.instancetypedef) py_obj.c_ob_type = rffi.cast(PyTypeObjectPtr, make_ref(space, w_type)) typedescr.attach(space, py_obj, w_obj) - cpyext_type_init = space._cpyext_type_init - del space._cpyext_type_init + cpyext_type_init = self.cpyext_type_init + self.cpyext_type_init = None for pto, w_type in cpyext_type_init: finish_type_1(space, pto) finish_type_2(space, pto, w_type) @@ -1069,7 +1104,7 @@ if name.endswith('#'): structs.append('%s %s;' % (typ[:-1], name[:-1])) elif name.startswith('PyExc_'): - structs.append('extern PyTypeObject _%s;' % (name,)) + structs.append('PyTypeObject _%s;' % (name,)) structs.append('PyObject* %s = (PyObject*)&_%s;' % (name, name)) elif typ == 'PyDateTime_CAPI*': structs.append('%s %s = NULL;' % (typ, name)) @@ -1118,10 +1153,8 @@ def setup_library(space): "NOT_RPYTHON" - from pypy.module.cpyext.pyobject import make_ref + export_symbols = sorted(FUNCTIONS) + sorted(SYMBOLS_C) + sorted(GLOBALS) use_micronumpy = setup_micronumpy(space) - - export_symbols = list(FUNCTIONS) + SYMBOLS_C + list(GLOBALS) from rpython.translator.c.database import LowLevelDatabase db = LowLevelDatabase() @@ -1137,41 +1170,37 @@ run_bootstrap_functions(space) setup_va_functions(eci) - from pypy.module import cpyext # for eval() below - - # Set up the types. Needs a special case, because of the - # immediate cycle involving 'c_ob_type', and because we don't - # want these types to be Py_TPFLAGS_HEAPTYPE. - static_types = {} - for name, (typ, expr) in GLOBALS.items(): - if typ == 'PyTypeObject*': - pto = lltype.malloc(PyTypeObject, immortal=True, - zero=True, flavor='raw') - pto.c_ob_refcnt = 1 - pto.c_tp_basicsize = -1 - static_types[name] = pto - builder = StaticObjectBuilder(space) - for name, pto in static_types.items(): - pto.c_ob_type = static_types['PyType_Type#'] - w_type = eval(GLOBALS[name][1]) - builder.prepare(rffi.cast(PyObject, pto), w_type) - builder.attach_all() - - # populate static data - for name, (typ, expr) in GLOBALS.iteritems(): - name = name.replace("#", "") - if name.startswith('PyExc_'): + # emit uninitialized static data + builder = space.fromcache(StaticObjectBuilder) + lines = ['PyObject *pypy_static_pyobjs[] = {\n'] + include_lines = ['RPY_EXTERN PyObject *pypy_static_pyobjs[];\n'] + for name, (typ, expr) in sorted(GLOBALS.items()): + if name.endswith('#'): + assert typ in ('PyObject*', 'PyTypeObject*', 'PyIntObject*') + typ, name = typ[:-1], name[:-1] + elif name.startswith('PyExc_'): + typ = 'PyTypeObject' name = '_' + name - w_obj = eval(expr) - if typ in ('PyObject*', 'PyTypeObject*', 'PyIntObject*'): - struct_ptr = make_ref(space, w_obj) elif typ == 'PyDateTime_CAPI*': continue else: assert False, "Unknown static data: %s %s" % (typ, name) - struct = rffi.cast(get_structtype_for_ctype(typ), struct_ptr)._obj - struct._compilation_info = eci - export_struct(name, struct) + + from pypy.module import cpyext # for the eval() below + w_obj = eval(expr) + builder.prepare(None, w_obj) + lines.append('\t(PyObject *)&%s,\n' % (name,)) + include_lines.append('RPY_EXPORTED %s %s;\n' % (typ, name)) + + lines.append('};\n') + eci2 = CConfig._compilation_info_.merge(ExternalCompilationInfo( + separate_module_sources = [''.join(lines)], + post_include_bits = [''.join(include_lines)], + )) + # override this method to return a pointer to this C array directly + builder.get_static_pyobjs = rffi.CExternVariable( + PyObjectP, 'pypy_static_pyobjs', eci2, c_type='PyObject **', + getter_only=True, declare_as_extern=False) for name, func in FUNCTIONS.iteritems(): newname = mangle_name('PyPy', name) or name @@ -1182,6 +1211,10 @@ trunk_include = pypydir.dirpath() / 'include' copy_header_files(trunk_include, use_micronumpy) +def init_static_data_translated(space): + builder = space.fromcache(StaticObjectBuilder) + builder.attach_all() + def _load_from_cffi(space, name, path, initptr): from pypy.module._cffi_backend import cffi1_module cffi1_module.load_cffi1_module(space, name, path, initptr) @@ -1264,22 +1297,18 @@ @specialize.ll() def generic_cpy_call(space, func, *args): FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, True, False)(space, func, *args) - - at specialize.ll() -def generic_cpy_call_dont_decref(space, func, *args): - FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, False, False)(space, func, *args) + return make_generic_cpy_call(FT, False)(space, func, *args) @specialize.ll() def generic_cpy_call_expect_null(space, func, *args): FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, True, True)(space, func, *args) + return make_generic_cpy_call(FT, True)(space, func, *args) @specialize.memo() -def make_generic_cpy_call(FT, decref_args, expect_null): +def make_generic_cpy_call(FT, expect_null): from pypy.module.cpyext.pyobject import make_ref, from_ref, Py_DecRef - from pypy.module.cpyext.pyobject import RefcountState + from pypy.module.cpyext.pyobject import is_pyobj, as_pyobj + from pypy.module.cpyext.pyobject import get_w_obj_and_decref from pypy.module.cpyext.pyerrors import PyErr_Occurred unrolling_arg_types = unrolling_iterable(enumerate(FT.ARGS)) RESULT_TYPE = FT.RESULT @@ -1307,65 +1336,49 @@ @specialize.ll() def generic_cpy_call(space, func, *args): boxed_args = () - to_decref = [] + keepalives = () assert len(args) == len(FT.ARGS) for i, ARG in unrolling_arg_types: arg = args[i] if is_PyObject(ARG): - if arg is None: - boxed_args += (lltype.nullptr(PyObject.TO),) - elif isinstance(arg, W_Root): - ref = make_ref(space, arg) - boxed_args += (ref,) - if decref_args: - to_decref.append(ref) - else: - boxed_args += (arg,) - else: - boxed_args += (arg,) + if not is_pyobj(arg): + keepalives += (arg,) + arg = as_pyobj(space, arg) + boxed_args += (arg,) try: - # create a new container for borrowed references - state = space.fromcache(RefcountState) - old_container = state.swap_borrow_container(None) - try: - # Call the function - result = call_external_function(func, *boxed_args) - finally: - state.swap_borrow_container(old_container) + # Call the function + result = call_external_function(func, *boxed_args) + finally: + keepalive_until_here(*keepalives) - if is_PyObject(RESULT_TYPE): - if result is None: - ret = result - elif isinstance(result, W_Root): - ret = result + if is_PyObject(RESULT_TYPE): + if not is_pyobj(result): + ret = result + else: + # The object reference returned from a C function + # that is called from Python must be an owned reference + # - ownership is transferred from the function to its caller. + if result: + ret = get_w_obj_and_decref(space, result) else: - ret = from_ref(space, result) - # The object reference returned from a C function - # that is called from Python must be an owned reference - # - ownership is transferred from the function to its caller. - if result: - Py_DecRef(space, result) + ret = None - # Check for exception consistency - has_error = PyErr_Occurred(space) is not None - has_result = ret is not None - if has_error and has_result: - raise OperationError(space.w_SystemError, space.wrap( - "An exception was set, but function returned a value")) - elif not expect_null and not has_error and not has_result: - raise OperationError(space.w_SystemError, space.wrap( - "Function returned a NULL result without setting an exception")) + # Check for exception consistency + has_error = PyErr_Occurred(space) is not None + has_result = ret is not None + if has_error and has_result: + raise OperationError(space.w_SystemError, space.wrap( + "An exception was set, but function returned a value")) + elif not expect_null and not has_error and not has_result: + raise OperationError(space.w_SystemError, space.wrap( + "Function returned a NULL result without setting an exception")) - if has_error: - state = space.fromcache(State) - state.check_and_raise_exception() + if has_error: + state = space.fromcache(State) + state.check_and_raise_exception() - return ret - return result - finally: - if decref_args: - for ref in to_decref: - Py_DecRef(space, ref) + return ret + return result + return generic_cpy_call - diff --git a/pypy/module/cpyext/complexobject.py b/pypy/module/cpyext/complexobject.py --- a/pypy/module/cpyext/complexobject.py +++ b/pypy/module/cpyext/complexobject.py @@ -43,7 +43,7 @@ # lltype does not handle functions returning a structure. This implements a # helper function, which takes as argument a reference to the return value. - at cpython_api([PyObject, Py_complex_ptr], lltype.Void) + at cpython_api([PyObject, Py_complex_ptr], rffi.INT_real, error=-1) def _PyComplex_AsCComplex(space, w_obj, result): """Return the Py_complex value of the complex number op. @@ -60,7 +60,7 @@ # if the above did not work, interpret obj as a float giving the # real part of the result, and fill in the imaginary part as 0. result.c_real = PyFloat_AsDouble(space, w_obj) # -1 on failure - return + return 0 if not PyComplex_Check(space, w_obj): raise OperationError(space.w_TypeError, space.wrap( @@ -69,3 +69,4 @@ assert isinstance(w_obj, W_ComplexObject) result.c_real = w_obj.realval result.c_imag = w_obj.imagval + return 0 diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -2,8 +2,7 @@ from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, build_type_checkers, Py_ssize_t, Py_ssize_tP, CONST_STRING) -from pypy.module.cpyext.pyobject import PyObject, PyObjectP, borrow_from -from pypy.module.cpyext.pyobject import RefcountState +from pypy.module.cpyext.pyobject import PyObject, PyObjectP, as_pyobj from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.interpreter.error import OperationError from rpython.rlib.objectmodel import specialize @@ -14,13 +13,17 @@ PyDict_Check, PyDict_CheckExact = build_type_checkers("Dict") - at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL) + at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL, + result_borrowed=True) def PyDict_GetItem(space, w_dict, w_key): try: w_res = space.getitem(w_dict, w_key) except: return None - return borrow_from(w_dict, w_res) + # NOTE: this works so far because all our dict strategies store + # *values* as full objects, which stay alive as long as the dict is + # alive and not modified. So we can return a borrowed ref. + return w_res @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) def PyDict_SetItem(space, w_dict, w_key, w_obj): @@ -47,7 +50,8 @@ else: PyErr_BadInternalCall(space) - at cpython_api([PyObject, CONST_STRING], PyObject, error=CANNOT_FAIL) + at cpython_api([PyObject, CONST_STRING], PyObject, error=CANNOT_FAIL, + result_borrowed=True) def PyDict_GetItemString(space, w_dict, key): """This is the same as PyDict_GetItem(), but key is specified as a char*, rather than a PyObject*.""" @@ -55,9 +59,10 @@ w_res = space.finditem_str(w_dict, rffi.charp2str(key)) except: w_res = None - if w_res is None: - return None - return borrow_from(w_dict, w_res) + # NOTE: this works so far because all our dict strategies store + # *values* as full objects, which stay alive as long as the dict is + # alive and not modified. So we can return a borrowed ref. + return w_res @cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=-1) def PyDict_DelItemString(space, w_dict, key_ptr): @@ -196,10 +201,13 @@ if w_dict is None: return 0 - # Note: this is not efficient. Storing an iterator would probably + # XXX XXX PyDict_Next is not efficient. Storing an iterator would probably # work, but we can't work out how to not leak it if iteration does - # not complete. + # not complete. Alternatively, we could add some RPython-only + # dict-iterator method to move forward by N steps. + w_dict.ensure_object_strategy() # make sure both keys and values can + # be borrwed try: w_iter = space.call_method(space.w_dict, "iteritems", w_dict) pos = ppos[0] @@ -209,11 +217,10 @@ w_item = space.call_method(w_iter, "next") w_key, w_value = space.fixedview(w_item, 2) - state = space.fromcache(RefcountState) if pkey: - pkey[0] = state.make_borrowed(w_dict, w_key) + pkey[0] = as_pyobj(space, w_key) if pvalue: - pvalue[0] = state.make_borrowed(w_dict, w_value) + pvalue[0] = as_pyobj(space, w_value) ppos[0] += 1 except OperationError, e: if not e.match(space, space.w_StopIteration): diff --git a/pypy/module/cpyext/eval.py b/pypy/module/cpyext/eval.py --- a/pypy/module/cpyext/eval.py +++ b/pypy/module/cpyext/eval.py @@ -4,7 +4,7 @@ from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, fread, feof, Py_ssize_tP, cpython_struct, is_valid_fp) -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.module.cpyext.pyerrors import PyErr_SetFromErrno from pypy.module.cpyext.funcobject import PyCodeObject from pypy.module.__builtin__ import compiling @@ -23,7 +23,7 @@ def PyEval_CallObjectWithKeywords(space, w_obj, w_arg, w_kwds): return space.call(w_obj, w_arg, w_kwds) - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyEval_GetBuiltins(space): """Return a dictionary of the builtins in the current execution frame, or the interpreter of the thread state if no frame is @@ -36,25 +36,25 @@ w_builtins = w_builtins.getdict(space) else: w_builtins = space.builtin.getdict(space) - return borrow_from(None, w_builtins) + return w_builtins # borrowed ref in all cases - at cpython_api([], PyObject, error=CANNOT_FAIL) + at cpython_api([], PyObject, error=CANNOT_FAIL, result_borrowed=True) def PyEval_GetLocals(space): """Return a dictionary of the local variables in the current execution frame, or NULL if no frame is currently executing.""" caller = space.getexecutioncontext().gettopframe_nohidden() if caller is None: return None - return borrow_from(None, caller.getdictscope()) + return caller.getdictscope() # borrowed ref - at cpython_api([], PyObject, error=CANNOT_FAIL) + at cpython_api([], PyObject, error=CANNOT_FAIL, result_borrowed=True) def PyEval_GetGlobals(space): """Return a dictionary of the global variables in the current execution frame, or NULL if no frame is currently executing.""" caller = space.getexecutioncontext().gettopframe_nohidden() if caller is None: return None - return borrow_from(None, caller.get_w_globals()) + return caller.get_w_globals() # borrowed ref @cpython_api([PyCodeObject, PyObject, PyObject], PyObject) def PyEval_EvalCode(space, w_code, w_globals, w_locals): diff --git a/pypy/module/cpyext/funcobject.py b/pypy/module/cpyext/funcobject.py --- a/pypy/module/cpyext/funcobject.py +++ b/pypy/module/cpyext/funcobject.py @@ -3,7 +3,7 @@ PyObjectFields, generic_cpy_call, CONST_STRING, CANNOT_FAIL, Py_ssize_t, cpython_api, bootstrap_function, cpython_struct, build_type_checkers) from pypy.module.cpyext.pyobject import ( - PyObject, make_ref, from_ref, Py_DecRef, make_typedescr, borrow_from) + PyObject, make_ref, from_ref, Py_DecRef, make_typedescr) from rpython.rlib.unroll import unrolling_iterable from pypy.interpreter.error import OperationError from pypy.interpreter.function import Function, Method @@ -83,12 +83,12 @@ from pypy.module.cpyext.object import PyObject_dealloc PyObject_dealloc(space, py_obj) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyFunction_GetCode(space, w_func): """Return the code object associated with the function object op.""" func = space.interp_w(Function, w_func) w_code = space.wrap(func.code) - return borrow_from(w_func, w_code) + return w_code # borrowed ref @cpython_api([PyObject, PyObject, PyObject], PyObject) def PyMethod_New(space, w_func, w_self, w_cls): @@ -99,25 +99,25 @@ class which provides the unbound method.""" return Method(space, w_func, w_self, w_cls) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyMethod_Function(space, w_method): """Return the function object associated with the method meth.""" assert isinstance(w_method, Method) - return borrow_from(w_method, w_method.w_function) + return w_method.w_function # borrowed ref - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyMethod_Self(space, w_method): """Return the instance associated with the method meth if it is bound, otherwise return NULL.""" assert isinstance(w_method, Method) - return borrow_from(w_method, w_method.w_instance) + return w_method.w_instance # borrowed ref - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyMethod_Class(space, w_method): """Return the class object from which the method meth was created; if this was created from an instance, it will be the class of the instance.""" assert isinstance(w_method, Method) - return borrow_from(w_method, w_method.w_class) + return w_method.w_class # borrowed ref def unwrap_list_of_strings(space, w_list): return [space.str_w(w_item) for w_item in space.fixedview(w_list)] diff --git a/pypy/module/cpyext/import_.py b/pypy/module/cpyext/import_.py --- a/pypy/module/cpyext/import_.py +++ b/pypy/module/cpyext/import_.py @@ -1,7 +1,6 @@ from pypy.interpreter import module from pypy.module.cpyext.api import ( generic_cpy_call, cpython_api, PyObject, CONST_STRING) -from pypy.module.cpyext.pyobject import borrow_from from rpython.rtyper.lltypesystem import lltype, rffi from pypy.interpreter.error import OperationError from pypy.interpreter.module import Module @@ -56,7 +55,7 @@ from pypy.module.imp.importing import reload return reload(space, w_mod) - at cpython_api([CONST_STRING], PyObject) + at cpython_api([CONST_STRING], PyObject, result_borrowed=True) def PyImport_AddModule(space, name): """Return the module object corresponding to a module name. The name argument may be of the form package.module. First check the modules @@ -74,14 +73,16 @@ w_mod = check_sys_modules_w(space, modulename) if not w_mod or space.is_w(w_mod, space.w_None): w_mod = Module(space, space.wrap(modulename)) - return borrow_from(None, w_mod) + space.setitem(space.sys.get('modules'), space.wrap(modulename), w_mod) + # return a borrowed ref --- assumes one copy in sys.modules + return w_mod - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyImport_GetModuleDict(space): """Return the dictionary used for the module administration (a.k.a. sys.modules). Note that this is a per-interpreter variable.""" w_modulesDict = space.sys.get('modules') - return borrow_from(None, w_modulesDict) + return w_modulesDict # borrowed ref @cpython_api([rffi.CCHARP, PyObject], PyObject) def PyImport_ExecCodeModule(space, name, w_code): diff --git a/pypy/module/cpyext/include/complexobject.h b/pypy/module/cpyext/include/complexobject.h --- a/pypy/module/cpyext/include/complexobject.h +++ b/pypy/module/cpyext/include/complexobject.h @@ -15,7 +15,7 @@ } Py_complex; /* generated function */ -PyAPI_FUNC(void) _PyComplex_AsCComplex(PyObject *, Py_complex *); +PyAPI_FUNC(int) _PyComplex_AsCComplex(PyObject *, Py_complex *); PyAPI_FUNC(PyObject *) _PyComplex_FromCComplex(Py_complex *); Py_LOCAL_INLINE(Py_complex) PyComplex_AsCComplex(PyObject *obj) diff --git a/pypy/module/cpyext/include/object.h b/pypy/module/cpyext/include/object.h --- a/pypy/module/cpyext/include/object.h +++ b/pypy/module/cpyext/include/object.h @@ -17,7 +17,8 @@ #define staticforward static #define PyObject_HEAD \ - long ob_refcnt; \ + Py_ssize_t ob_refcnt; \ + Py_ssize_t ob_pypy_link; \ struct _typeobject *ob_type; #define PyObject_VAR_HEAD \ @@ -25,7 +26,7 @@ Py_ssize_t ob_size; /* Number of items in variable part */ #define PyObject_HEAD_INIT(type) \ - 1, type, + 1, 0, type, #define PyVarObject_HEAD_INIT(type, size) \ PyObject_HEAD_INIT(type) size, @@ -40,19 +41,19 @@ #ifdef PYPY_DEBUG_REFCOUNT /* Slow version, but useful for debugging */ -#define Py_INCREF(ob) (Py_IncRef((PyObject *)ob)) -#define Py_DECREF(ob) (Py_DecRef((PyObject *)ob)) -#define Py_XINCREF(ob) (Py_IncRef((PyObject *)ob)) -#define Py_XDECREF(ob) (Py_DecRef((PyObject *)ob)) +#define Py_INCREF(ob) (Py_IncRef((PyObject *)(ob))) +#define Py_DECREF(ob) (Py_DecRef((PyObject *)(ob))) +#define Py_XINCREF(ob) (Py_IncRef((PyObject *)(ob))) +#define Py_XDECREF(ob) (Py_DecRef((PyObject *)(ob))) #else /* Fast version */ -#define Py_INCREF(ob) (((PyObject *)ob)->ob_refcnt++) -#define Py_DECREF(ob) \ +#define Py_INCREF(ob) (((PyObject *)(ob))->ob_refcnt++) +#define Py_DECREF(op) \ do { \ - if (((PyObject *)ob)->ob_refcnt > 1) \ - ((PyObject *)ob)->ob_refcnt--; \ + if (--((PyObject *)(op))->ob_refcnt != 0) \ + ; \ else \ - Py_DecRef((PyObject *)ob); \ + _Py_Dealloc((PyObject *)(op)); \ } while (0) #define Py_XINCREF(op) do { if ((op) == NULL) ; else Py_INCREF(op); } while (0) diff --git a/pypy/module/cpyext/include/patchlevel.h b/pypy/module/cpyext/include/patchlevel.h --- a/pypy/module/cpyext/include/patchlevel.h +++ b/pypy/module/cpyext/include/patchlevel.h @@ -30,6 +30,13 @@ /* PyPy version as a string */ #define PYPY_VERSION "4.1.0-alpha0" +#define PYPY_VERSION_NUM 0x04010000 + +/* Defined to mean a PyPy where cpyext holds more regular references + to PyObjects, e.g. staying alive as long as the internal PyPy object + stays alive. */ +#define PYPY_CPYEXT_GC 1 +#define PyPy_Borrow(a, b) ((void) 0) /* Subversion Revision number of this file (not of the repository). * Empty since Mercurial migration. */ diff --git a/pypy/module/cpyext/include/tupleobject.h b/pypy/module/cpyext/include/tupleobject.h --- a/pypy/module/cpyext/include/tupleobject.h +++ b/pypy/module/cpyext/include/tupleobject.h @@ -7,11 +7,21 @@ extern "C" { #endif +typedef struct { + PyObject_HEAD + Py_ssize_t ob_size; + PyObject **ob_item; /* XXX optimize to ob_item[] */ +} PyTupleObject; + /* defined in varargswrapper.c */ PyAPI_FUNC(PyObject *) PyTuple_Pack(Py_ssize_t, ...); -#define PyTuple_SET_ITEM PyTuple_SetItem -#define PyTuple_GET_ITEM PyTuple_GetItem +/* Macro, trading safety for speed */ +#define PyTuple_GET_ITEM(op, i) (((PyTupleObject *)(op))->ob_item[i]) +#define PyTuple_GET_SIZE(op) Py_SIZE(op) + +/* Macro, *only* to be used to fill in brand new tuples */ +#define PyTuple_SET_ITEM(op, i, v) (((PyTupleObject *)(op))->ob_item[i] = v) #ifdef __cplusplus diff --git a/pypy/module/cpyext/intobject.py b/pypy/module/cpyext/intobject.py --- a/pypy/module/cpyext/intobject.py +++ b/pypy/module/cpyext/intobject.py @@ -5,7 +5,7 @@ cpython_api, cpython_struct, build_type_checkers, bootstrap_function, PyObject, PyObjectFields, CONST_STRING, CANNOT_FAIL, Py_ssize_t) from pypy.module.cpyext.pyobject import ( - make_typedescr, track_reference, RefcountState, from_ref) + make_typedescr, track_reference, from_ref) from rpython.rlib.rarithmetic import r_uint, intmask, LONG_TEST, r_ulonglong from pypy.objspace.std.intobject import W_IntObject import sys @@ -38,8 +38,6 @@ w_obj = space.allocate_instance(W_IntObject, w_type) w_obj.__init__(intval) track_reference(space, obj, w_obj) - state = space.fromcache(RefcountState) - state.set_lifeline(w_obj, obj) return w_obj PyInt_Check, PyInt_CheckExact = build_type_checkers("Int") diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py --- a/pypy/module/cpyext/listobject.py +++ b/pypy/module/cpyext/listobject.py @@ -3,7 +3,7 @@ from pypy.module.cpyext.api import (cpython_api, CANNOT_FAIL, Py_ssize_t, build_type_checkers) from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall -from pypy.module.cpyext.pyobject import Py_DecRef, PyObject, borrow_from +from pypy.module.cpyext.pyobject import Py_DecRef, PyObject from pypy.objspace.std.listobject import W_ListObject from pypy.interpreter.error import OperationError @@ -38,7 +38,7 @@ w_list.setitem(index, w_item) return 0 - at cpython_api([PyObject, Py_ssize_t], PyObject) + at cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True) def PyList_GetItem(space, w_list, index): """Return the object at position pos in the list pointed to by p. The position must be positive, indexing from the end of the list is not @@ -49,8 +49,10 @@ if index < 0 or index >= w_list.length(): raise OperationError(space.w_IndexError, space.wrap( "list index out of range")) - w_item = w_list.getitem(index) - return borrow_from(w_list, w_item) + w_list.ensure_object_strategy() # make sure we can return a borrowed obj + # XXX ^^^ how does this interact with CPyListStrategy? + w_res = w_list.getitem(index) + return w_res # borrowed ref @cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) diff --git a/pypy/module/cpyext/modsupport.py b/pypy/module/cpyext/modsupport.py --- a/pypy/module/cpyext/modsupport.py +++ b/pypy/module/cpyext/modsupport.py @@ -1,7 +1,7 @@ from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import cpython_api, cpython_struct, \ METH_STATIC, METH_CLASS, METH_COEXIST, CANNOT_FAIL, CONST_STRING -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.interpreter.module import Module from pypy.module.cpyext.methodobject import ( W_PyCFunctionObject, PyCFunction_NewEx, PyDescr_NewMethod, @@ -34,7 +34,7 @@ # This is actually the Py_InitModule4 function, # renamed to refuse modules built against CPython headers. @cpython_api([CONST_STRING, lltype.Ptr(PyMethodDef), CONST_STRING, - PyObject, rffi.INT_real], PyObject) + PyObject, rffi.INT_real], PyObject, result_borrowed=True) def _Py_InitPyPyModule(space, name, methods, doc, w_self, apiver): """ Create a new module object based on a name and table of functions, returning @@ -69,7 +69,7 @@ if doc: space.setattr(w_mod, space.wrap("__doc__"), space.wrap(rffi.charp2str(doc))) - return borrow_from(None, w_mod) + return w_mod # borrowed result kept alive in PyImport_AddModule() def convert_method_defs(space, dict_w, methods, w_type, w_self=None, name=None): @@ -114,12 +114,12 @@ return int(space.is_w(w_type, w_obj_type) or space.is_true(space.issubtype(w_obj_type, w_type))) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyModule_GetDict(space, w_mod): if PyModule_Check(space, w_mod): assert isinstance(w_mod, Module) w_dict = w_mod.getdict(space) - return borrow_from(w_mod, w_dict) + return w_dict # borrowed reference, likely from w_mod.w_dict else: PyErr_BadInternalCall(space) diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py --- a/pypy/module/cpyext/object.py +++ b/pypy/module/cpyext/object.py @@ -6,7 +6,7 @@ Py_GE, CONST_STRING, FILEP, fwrite) from pypy.module.cpyext.pyobject import ( PyObject, PyObjectP, create_ref, from_ref, Py_IncRef, Py_DecRef, - track_reference, get_typedescr, _Py_NewReference, RefcountState) + get_typedescr, _Py_NewReference) from pypy.module.cpyext.typeobject import PyTypeObjectPtr from pypy.module.cpyext.pyerrors import PyErr_NoMemory, PyErr_BadInternalCall from pypy.objspace.std.typeobject import W_TypeObject @@ -33,7 +33,7 @@ assert isinstance(w_type, W_TypeObject) typedescr = get_typedescr(w_type.instancetypedef) py_obj = typedescr.allocate(space, w_type, itemcount=itemcount) - py_obj.c_ob_refcnt = 0 + #py_obj.c_ob_refcnt = 0 --- will be set to 1 again by PyObject_Init{Var} if type.c_tp_itemsize == 0: w_obj = PyObject_Init(space, py_obj, type) else: diff --git a/pypy/module/cpyext/pyerrors.py b/pypy/module/cpyext/pyerrors.py --- a/pypy/module/cpyext/pyerrors.py +++ b/pypy/module/cpyext/pyerrors.py @@ -6,7 +6,7 @@ from pypy.module.cpyext.api import cpython_api, CANNOT_FAIL, CONST_STRING from pypy.module.exceptions.interp_exceptions import W_RuntimeWarning from pypy.module.cpyext.pyobject import ( - PyObject, PyObjectP, make_ref, from_ref, Py_DecRef, borrow_from) + PyObject, PyObjectP, make_ref, from_ref, Py_DecRef) from pypy.module.cpyext.state import State from pypy.module.cpyext.import_ import PyImport_Import from rpython.rlib import rposix, jit @@ -28,12 +28,12 @@ """This is a shorthand for PyErr_SetObject(type, Py_None).""" PyErr_SetObject(space, w_type, space.w_None) - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyErr_Occurred(space): state = space.fromcache(State) if state.operror is None: return None - return borrow_from(None, state.operror.w_type) + return state.operror.w_type # borrowed ref @cpython_api([], lltype.Void) def PyErr_Clear(space): diff --git a/pypy/module/cpyext/pyfile.py b/pypy/module/cpyext/pyfile.py --- a/pypy/module/cpyext/pyfile.py +++ b/pypy/module/cpyext/pyfile.py @@ -1,7 +1,7 @@ from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, build_type_checkers) -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.module.cpyext.object import Py_PRINT_RAW from pypy.interpreter.error import OperationError from pypy.module._file.interp_file import W_File @@ -83,7 +83,8 @@ @cpython_api([PyObject], PyObject) def PyFile_Name(space, w_p): """Return the name of the file specified by p as a string object.""" - return borrow_from(w_p, space.getattr(w_p, space.wrap("name"))) + w_name = space.getattr(w_p, space.wrap("name")) + return w_name # borrowed ref, should be a W_StringObject from the file @cpython_api([PyObject, rffi.INT_real], rffi.INT_real, error=CANNOT_FAIL) def PyFile_SoftSpace(space, w_p, newflag): diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -2,15 +2,19 @@ from pypy.interpreter.baseobjspace import W_Root, SpaceCache from rpython.rtyper.lltypesystem import rffi, lltype +from rpython.rtyper.extregistry import ExtRegistryEntry from pypy.module.cpyext.api import ( cpython_api, bootstrap_function, PyObject, PyObjectP, ADDR, - CANNOT_FAIL, Py_TPFLAGS_HEAPTYPE, PyTypeObjectPtr) + CANNOT_FAIL, Py_TPFLAGS_HEAPTYPE, PyTypeObjectPtr, is_PyObject, + INTERPLEVEL_API) from pypy.module.cpyext.state import State from pypy.objspace.std.typeobject import W_TypeObject from pypy.objspace.std.objectobject import W_ObjectObject from rpython.rlib.objectmodel import specialize, we_are_translated -from rpython.rlib.rweakref import RWeakKeyDictionary +from rpython.rlib.objectmodel import keepalive_until_here from rpython.rtyper.annlowlevel import llhelper +from rpython.rlib import rawrefcount + #________________________________________________________ # type description @@ -28,13 +32,15 @@ def allocate(self, space, w_type, itemcount=0): # similar to PyType_GenericAlloc? # except that it's not related to any pypy object. + # this returns a PyObject with ob_refcnt == 1. - pytype = rffi.cast(PyTypeObjectPtr, make_ref(space, w_type)) + pytype = as_pyobj(space, w_type) + pytype = rffi.cast(PyTypeObjectPtr, pytype) + assert pytype # Don't increase refcount for non-heaptypes - if pytype: - flags = rffi.cast(lltype.Signed, pytype.c_tp_flags) - if not flags & Py_TPFLAGS_HEAPTYPE: - Py_DecRef(space, w_type) + flags = rffi.cast(lltype.Signed, pytype.c_tp_flags) + if flags & Py_TPFLAGS_HEAPTYPE: + Py_IncRef(space, w_type) if pytype: size = pytype.c_tp_basicsize @@ -42,6 +48,7 @@ size = rffi.sizeof(self.basestruct) if itemcount: size += itemcount * pytype.c_tp_itemsize + assert size >= rffi.sizeof(PyObject.TO) buf = lltype.malloc(rffi.VOIDP.TO, size, flavor='raw', zero=True) pyobj = rffi.cast(PyObject, buf) @@ -56,9 +63,6 @@ w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type)) w_obj = space.allocate_instance(self.W_BaseObject, w_type) track_reference(space, obj, w_obj) - if w_type is not space.gettypefor(self.W_BaseObject): - state = space.fromcache(RefcountState) - state.set_lifeline(w_obj, obj) return w_obj typedescr_cache = {} @@ -134,104 +138,6 @@ #________________________________________________________ # refcounted object support -class RefcountState: - def __init__(self, space): - self.space = space - self.py_objects_w2r = {} # { w_obj -> raw PyObject } - self.py_objects_r2w = {} # { addr of raw PyObject -> w_obj } - - self.lifeline_dict = RWeakKeyDictionary(W_Root, PyOLifeline) - - self.borrow_mapping = {None: {}} - # { w_container -> { w_containee -> None } } - # the None entry manages references borrowed during a call to - # generic_cpy_call() - - # For tests - self.non_heaptypes_w = [] - - def _cleanup_(self): - assert self.borrow_mapping == {None: {}} - self.py_objects_r2w.clear() # is not valid anymore after translation - - def init_r2w_from_w2r(self): - """Rebuilds the dict py_objects_r2w on startup""" - for w_obj, obj in self.py_objects_w2r.items(): - ptr = rffi.cast(ADDR, obj) - self.py_objects_r2w[ptr] = w_obj - - def print_refcounts(self): - print "REFCOUNTS" - for w_obj, obj in self.py_objects_w2r.items(): - print "%r: %i" % (w_obj, obj.c_ob_refcnt) - - def get_from_lifeline(self, w_obj): - lifeline = self.lifeline_dict.get(w_obj) - if lifeline is not None: # make old PyObject ready for use in C code - py_obj = lifeline.pyo - assert py_obj.c_ob_refcnt == 0 - return py_obj - else: - return lltype.nullptr(PyObject.TO) - - def set_lifeline(self, w_obj, py_obj): - self.lifeline_dict.set(w_obj, - PyOLifeline(self.space, py_obj)) - - def make_borrowed(self, w_container, w_borrowed): - """ - Create a borrowed reference, which will live as long as the container - has a living reference (as a PyObject!) - """ - ref = make_ref(self.space, w_borrowed) - obj_ptr = rffi.cast(ADDR, ref) - - borrowees = self.borrow_mapping.setdefault(w_container, {}) - if w_borrowed in borrowees: - Py_DecRef(self.space, w_borrowed) # cancel incref from make_ref() - else: - borrowees[w_borrowed] = None - - return ref - - def reset_borrowed_references(self): - "Used in tests" - for w_container, w_borrowed in self.borrow_mapping.items(): - Py_DecRef(self.space, w_borrowed) - self.borrow_mapping = {None: {}} - - def delete_borrower(self, w_obj): - """ - Called when a potential container for borrowed references has lost its - last reference. Removes the borrowed references it contains. - """ - if w_obj in self.borrow_mapping: # move to lifeline __del__ - for w_containee in self.borrow_mapping[w_obj]: - self.forget_borrowee(w_containee) - del self.borrow_mapping[w_obj] - - def swap_borrow_container(self, container): - """switch the current default contained with the given one.""" - if container is None: - old_container = self.borrow_mapping[None] - self.borrow_mapping[None] = {} - return old_container - else: - old_container = self.borrow_mapping[None] - self.borrow_mapping[None] = container - for w_containee in old_container: - self.forget_borrowee(w_containee) - - def forget_borrowee(self, w_obj): - "De-register an object from the list of borrowed references" - ref = self.py_objects_w2r.get(w_obj, lltype.nullptr(PyObject.TO)) - if not ref: - if DEBUG_REFCOUNT: - print >>sys.stderr, "Borrowed object is already gone!" - return - - Py_DecRef(self.space, ref) - class InvalidPointerException(Exception): pass @@ -249,55 +155,37 @@ def create_ref(space, w_obj, itemcount=0): """ Allocates a PyObject, and fills its fields with info from the given - intepreter object. + interpreter object. """ - state = space.fromcache(RefcountState) w_type = space.type(w_obj) - if w_type.is_cpytype(): - py_obj = state.get_from_lifeline(w_obj) - if py_obj: - Py_IncRef(space, py_obj) - return py_obj - typedescr = get_typedescr(w_obj.typedef) py_obj = typedescr.allocate(space, w_type, itemcount=itemcount) - if w_type.is_cpytype(): - state.set_lifeline(w_obj, py_obj) + track_reference(space, py_obj, w_obj) + # + # py_obj.c_ob_refcnt should be exactly REFCNT_FROM_PYPY + 1 here, + # and we want only REFCNT_FROM_PYPY, i.e. only count as attached + # to the W_Root but not with any reference from the py_obj side. + assert py_obj.c_ob_refcnt > rawrefcount.REFCNT_FROM_PYPY + py_obj.c_ob_refcnt -= 1 + # typedescr.attach(space, py_obj, w_obj) return py_obj -def track_reference(space, py_obj, w_obj, replace=False): +def track_reference(space, py_obj, w_obj): """ Ties together a PyObject and an interpreter object. + The PyObject's refcnt is increased by REFCNT_FROM_PYPY. + The reference in 'py_obj' is not stolen! Remember to Py_DecRef() + it is you need to. """ # XXX looks like a PyObject_GC_TRACK - ptr = rffi.cast(ADDR, py_obj) - state = space.fromcache(RefcountState) + assert py_obj.c_ob_refcnt < rawrefcount.REFCNT_FROM_PYPY + py_obj.c_ob_refcnt += rawrefcount.REFCNT_FROM_PYPY if DEBUG_REFCOUNT: debug_refcount("MAKREF", py_obj, w_obj) - if not replace: - assert w_obj not in state.py_objects_w2r - assert ptr not in state.py_objects_r2w - state.py_objects_w2r[w_obj] = py_obj - if ptr: # init_typeobject() bootstraps with NULL references - state.py_objects_r2w[ptr] = w_obj - -def make_ref(space, w_obj): - """ - Returns a new reference to an intepreter object. - """ - if w_obj is None: - return lltype.nullptr(PyObject.TO) - assert isinstance(w_obj, W_Root) - state = space.fromcache(RefcountState) - try: - py_obj = state.py_objects_w2r[w_obj] - except KeyError: - py_obj = create_ref(space, w_obj) - track_reference(space, py_obj, w_obj) - else: - Py_IncRef(space, py_obj) - return py_obj + assert w_obj + assert py_obj + rawrefcount.create_link_pypy(w_obj, py_obj) def from_ref(space, ref): @@ -305,16 +193,12 @@ Finds the interpreter object corresponding to the given reference. If the object is not yet realized (see stringobject.py), creates it. """ - assert lltype.typeOf(ref) == PyObject + assert is_pyobj(ref) if not ref: return None - state = space.fromcache(RefcountState) - ptr = rffi.cast(ADDR, ref) - - try: - return state.py_objects_r2w[ptr] - except KeyError: - pass + w_obj = rawrefcount.to_obj(W_Root, ref) + if w_obj is not None: + return w_obj # This reference is not yet a real interpreter object. # Realize it. @@ -326,47 +210,117 @@ return get_typedescr(w_type.instancetypedef).realize(space, ref) -# XXX Optimize these functions and put them into macro definitions - at cpython_api([PyObject], lltype.Void) -def Py_DecRef(space, obj): - if not obj: - return - assert lltype.typeOf(obj) == PyObject +def debug_collect(): + rawrefcount._collect() - obj.c_ob_refcnt -= 1 - if DEBUG_REFCOUNT: - debug_refcount("DECREF", obj, obj.c_ob_refcnt, frame_stackdepth=3) - if obj.c_ob_refcnt == 0: - state = space.fromcache(RefcountState) - ptr = rffi.cast(ADDR, obj) - if ptr not in state.py_objects_r2w: - # this is a half-allocated object, lets call the deallocator - # without modifying the r2w/w2r dicts - _Py_Dealloc(space, obj) - else: - w_obj = state.py_objects_r2w[ptr] - del state.py_objects_r2w[ptr] - w_type = space.type(w_obj) - if not w_type.is_cpytype(): + +def as_pyobj(space, w_obj): + """ + Returns a 'PyObject *' representing the given intepreter object. + This doesn't give a new reference, but the returned 'PyObject *' + is valid at least as long as 'w_obj' is. **To be safe, you should + use keepalive_until_here(w_obj) some time later.** In case of + doubt, use the safer make_ref(). + """ + if w_obj is not None: + assert not is_pyobj(w_obj) + py_obj = rawrefcount.from_obj(PyObject, w_obj) + if not py_obj: + py_obj = create_ref(space, w_obj) + return py_obj + else: + return lltype.nullptr(PyObject.TO) +as_pyobj._always_inline_ = 'try' +INTERPLEVEL_API['as_pyobj'] = as_pyobj + +def pyobj_has_w_obj(pyobj): + return rawrefcount.to_obj(W_Root, pyobj) is not None +INTERPLEVEL_API['pyobj_has_w_obj'] = staticmethod(pyobj_has_w_obj) + + +def is_pyobj(x): + if x is None or isinstance(x, W_Root): + return False + elif is_PyObject(lltype.typeOf(x)): + return True + else: + raise TypeError(repr(type(x))) +INTERPLEVEL_API['is_pyobj'] = staticmethod(is_pyobj) + +class Entry(ExtRegistryEntry): + _about_ = is_pyobj + def compute_result_annotation(self, s_x): + from rpython.rtyper.llannotation import SomePtr + return self.bookkeeper.immutablevalue(isinstance(s_x, SomePtr)) + def specialize_call(self, hop): + hop.exception_cannot_occur() + return hop.inputconst(lltype.Bool, hop.s_result.const) + + at specialize.ll() +def make_ref(space, obj): + """Increment the reference counter of the PyObject and return it. + Can be called with either a PyObject or a W_Root. + """ + if is_pyobj(obj): + pyobj = rffi.cast(PyObject, obj) + else: + pyobj = as_pyobj(space, obj) + if pyobj: + assert pyobj.c_ob_refcnt > 0 + pyobj.c_ob_refcnt += 1 + if not is_pyobj(obj): + keepalive_until_here(obj) + return pyobj +INTERPLEVEL_API['make_ref'] = make_ref + + + at specialize.ll() +def get_w_obj_and_decref(space, obj): + """Decrement the reference counter of the PyObject and return the + corresponding W_Root object (so the reference count is at least + REFCNT_FROM_PYPY and cannot be zero). Can be called with either + a PyObject or a W_Root. + """ + if is_pyobj(obj): + pyobj = rffi.cast(PyObject, obj) + w_obj = from_ref(space, pyobj) + else: + w_obj = obj + pyobj = as_pyobj(space, w_obj) + if pyobj: + pyobj.c_ob_refcnt -= 1 + assert pyobj.c_ob_refcnt >= rawrefcount.REFCNT_FROM_PYPY + keepalive_until_here(w_obj) + return w_obj +INTERPLEVEL_API['get_w_obj_and_decref'] = get_w_obj_and_decref + + + at specialize.ll() +def incref(space, obj): + make_ref(space, obj) +INTERPLEVEL_API['incref'] = incref + + at specialize.ll() +def decref(space, obj): + if is_pyobj(obj): + obj = rffi.cast(PyObject, obj) + if obj: + assert obj.c_ob_refcnt > 0 + obj.c_ob_refcnt -= 1 + if obj.c_ob_refcnt == 0: _Py_Dealloc(space, obj) - del state.py_objects_w2r[w_obj] - # if the object was a container for borrowed references - state.delete_borrower(w_obj) else: - if not we_are_translated() and obj.c_ob_refcnt < 0: - message = "Negative refcount for obj %s with type %s" % ( - obj, rffi.charp2str(obj.c_ob_type.c_tp_name)) - print >>sys.stderr, message - assert False, message + get_w_obj_and_decref(space, obj) +INTERPLEVEL_API['decref'] = decref + @cpython_api([PyObject], lltype.Void) def Py_IncRef(space, obj): - if not obj: - return - obj.c_ob_refcnt += 1 - assert obj.c_ob_refcnt > 0 - if DEBUG_REFCOUNT: - debug_refcount("INCREF", obj, obj.c_ob_refcnt, frame_stackdepth=3) + incref(space, obj) + + at cpython_api([PyObject], lltype.Void) +def Py_DecRef(space, obj): + decref(space, obj) @cpython_api([PyObject], lltype.Void) def _Py_NewReference(space, obj): @@ -375,74 +329,13 @@ assert isinstance(w_type, W_TypeObject) get_typedescr(w_type.instancetypedef).realize(space, obj) + at cpython_api([PyObject], lltype.Void) def _Py_Dealloc(space, obj): - from pypy.module.cpyext.api import generic_cpy_call_dont_decref + from pypy.module.cpyext.api import generic_cpy_call pto = obj.c_ob_type #print >>sys.stderr, "Calling dealloc slot", pto.c_tp_dealloc, "of", obj, \ # "'s type which is", rffi.charp2str(pto.c_tp_name) - generic_cpy_call_dont_decref(space, pto.c_tp_dealloc, obj) - -#___________________________________________________________ -# Support for "lifelines" -# -# Object structure must stay alive even when not referenced -# by any C code. - -class PyOLifeline(object): - def __init__(self, space, pyo): - self.pyo = pyo - self.space = space - - def __del__(self): - if self.pyo: - assert self.pyo.c_ob_refcnt == 0 - _Py_Dealloc(self.space, self.pyo) - self.pyo = lltype.nullptr(PyObject.TO) - # XXX handle borrowed objects here - -#___________________________________________________________ -# Support for borrowed references - -def make_borrowed_ref(space, w_container, w_borrowed): - """ - Create a borrowed reference, which will live as long as the container - has a living reference (as a PyObject!) - """ - if w_borrowed is None: - return lltype.nullptr(PyObject.TO) - - state = space.fromcache(RefcountState) - return state.make_borrowed(w_container, w_borrowed) - -class Reference: - def __init__(self, pyobj): - assert not isinstance(pyobj, W_Root) - self.pyobj = pyobj - - def get_ref(self, space): - return self.pyobj - - def get_wrapped(self, space): - return from_ref(space, self.pyobj) - -class BorrowPair(Reference): - """ - Delays the creation of a borrowed reference. - """ - def __init__(self, w_container, w_borrowed): - self.w_container = w_container - self.w_borrowed = w_borrowed - - def get_ref(self, space): - return make_borrowed_ref(space, self.w_container, self.w_borrowed) - - def get_wrapped(self, space): - return self.w_borrowed - -def borrow_from(container, borrowed): - return BorrowPair(container, borrowed) - -#___________________________________________________________ + generic_cpy_call(space, pto.c_tp_dealloc, obj) @cpython_api([rffi.VOIDP], lltype.Signed, error=CANNOT_FAIL) def _Py_HashPointer(space, ptr): diff --git a/pypy/module/cpyext/pypyintf.py b/pypy/module/cpyext/pypyintf.py deleted file mode 100644 --- a/pypy/module/cpyext/pypyintf.py +++ /dev/null @@ -1,9 +0,0 @@ -from pypy.module.cpyext.api import cpython_api -from pypy.module.cpyext.pyobject import PyObject, borrow_from - - - at cpython_api([PyObject, PyObject], PyObject) -def PyPy_Borrow(space, w_parentobj, w_obj): - """Returns a borrowed reference to 'obj', borrowing from the 'parentobj'. - """ - return borrow_from(w_parentobj, w_obj) diff --git a/pypy/module/cpyext/pytraceback.py b/pypy/module/cpyext/pytraceback.py --- a/pypy/module/cpyext/pytraceback.py +++ b/pypy/module/cpyext/pytraceback.py @@ -3,7 +3,7 @@ PyObjectFields, generic_cpy_call, CONST_STRING, CANNOT_FAIL, Py_ssize_t, cpython_api, bootstrap_function, cpython_struct, build_type_checkers) from pypy.module.cpyext.pyobject import ( - PyObject, make_ref, from_ref, Py_DecRef, make_typedescr, borrow_from) + PyObject, make_ref, from_ref, Py_DecRef, make_typedescr) from pypy.module.cpyext.frameobject import PyFrameObject from rpython.rlib.unroll import unrolling_iterable from pypy.interpreter.error import OperationError diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py --- a/pypy/module/cpyext/sequence.py +++ b/pypy/module/cpyext/sequence.py @@ -4,7 +4,7 @@ from pypy.objspace.std.listobject import ListStrategy from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, Py_ssize_t, PyObject, PyObjectP) -from pypy.module.cpyext.pyobject import borrow_from, make_ref, from_ref +from pypy.module.cpyext.pyobject import PyObject from rpython.rtyper.lltypesystem import rffi, lltype from pypy.objspace.std import listobject, tupleobject @@ -44,16 +44,18 @@ which case o is returned. Use PySequence_Fast_GET_ITEM() to access the members of the result. Returns NULL on failure. If the object is not a sequence, raises TypeError with m as the message text.""" - if isinstance(w_obj, listobject.W_ListObject): + # make sure we can return a borrowed obj from PySequence_Fast_GET_ITEM w_obj.convert_to_cpy_strategy(space) return w_obj + if isinstance(w_obj, tupleobject.W_TupleObject): + return w_obj try: return listobject.W_ListObject.newlist_cpyext(space, space.listview(w_obj)) except OperationError: raise OperationError(space.w_TypeError, space.wrap(rffi.charp2str(m))) - at cpython_api([PyObject, Py_ssize_t], PyObject) + at cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True) def PySequence_Fast_GET_ITEM(space, w_obj, index): """Return the ith element of o, assuming that o was returned by PySequence_Fast(), o is not NULL, and that i is within bounds. @@ -63,7 +65,7 @@ else: assert isinstance(w_obj, tupleobject.W_TupleObject) w_res = w_obj.wrappeditems[index] - return borrow_from(w_obj, w_res) + return w_res # borrowed ref @cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL) def PySequence_Fast_GET_SIZE(space, w_obj): diff --git a/pypy/module/cpyext/setobject.py b/pypy/module/cpyext/setobject.py --- a/pypy/module/cpyext/setobject.py +++ b/pypy/module/cpyext/setobject.py @@ -3,7 +3,7 @@ from pypy.module.cpyext.api import (cpython_api, Py_ssize_t, CANNOT_FAIL, build_type_checkers) from pypy.module.cpyext.pyobject import (PyObject, PyObjectP, Py_DecRef, - borrow_from, make_ref, from_ref) + make_ref, from_ref) from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.objspace.std.setobject import W_SetObject, W_FrozensetObject, newset diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -32,17 +32,16 @@ Py_GE = 5 -def check_num_args(space, ob, n): - from pypy.module.cpyext.tupleobject import PyTuple_CheckExact, \ - PyTuple_GET_SIZE - if not PyTuple_CheckExact(space, ob): +def check_num_args(space, w_ob, n): + from pypy.module.cpyext.tupleobject import PyTuple_CheckExact + if not PyTuple_CheckExact(space, w_ob): raise OperationError(space.w_SystemError, space.wrap("PyArg_UnpackTuple() argument list is not a tuple")) - if n == PyTuple_GET_SIZE(space, ob): + if n == space.len_w(w_ob): return raise oefmt(space.w_TypeError, "expected %d arguments, got %d", - n, PyTuple_GET_SIZE(space, ob)) + n, space.len_w(w_ob)) def wrap_init(space, w_self, w_args, func, w_kwargs): func_init = rffi.cast(initproc, func) diff --git a/pypy/module/cpyext/src/getargs.c b/pypy/module/cpyext/src/getargs.c --- a/pypy/module/cpyext/src/getargs.c +++ b/pypy/module/cpyext/src/getargs.c @@ -442,7 +442,7 @@ strncpy(msgbuf, "is not retrievable", bufsize); return msgbuf; } - PyPy_Borrow(arg, item); + //PyPy_Borrow(arg, item); msg = convertitem(item, &format, p_va, flags, levels+1, msgbuf, bufsize, freelist); /* PySequence_GetItem calls tp->sq_item, which INCREFs */ diff --git a/pypy/module/cpyext/state.py b/pypy/module/cpyext/state.py From pypy.commits at gmail.com Sun Feb 21 08:28:46 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 05:28:46 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: fix order of operations Message-ID: <56c9bb8e.8a921c0a.c7b4.51b7@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82363:46e8c7546570 Date: 2016-02-21 14:21 +0100 http://bitbucket.org/pypy/pypy/changeset/46e8c7546570/ Log: fix order of operations diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -1153,8 +1153,8 @@ def setup_library(space): "NOT_RPYTHON" + use_micronumpy = setup_micronumpy(space) export_symbols = sorted(FUNCTIONS) + sorted(SYMBOLS_C) + sorted(GLOBALS) - use_micronumpy = setup_micronumpy(space) from rpython.translator.c.database import LowLevelDatabase db = LowLevelDatabase() From pypy.commits at gmail.com Sun Feb 21 08:28:48 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 05:28:48 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: merge cpyext-gc-support-2 into branch Message-ID: <56c9bb90.84c9c20a.40921.ffffdd1d@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82364:4ccd599cd77a Date: 2016-02-21 14:21 +0100 http://bitbucket.org/pypy/pypy/changeset/4ccd599cd77a/ Log: merge cpyext-gc-support-2 into branch diff --git a/rpython/rlib/rawrefcount.py b/rpython/rlib/rawrefcount.py --- a/rpython/rlib/rawrefcount.py +++ b/rpython/rlib/rawrefcount.py @@ -11,8 +11,8 @@ from rpython.rlib import rgc -REFCNT_FROM_PYPY = 80 -REFCNT_FROM_PYPY_LIGHT = REFCNT_FROM_PYPY + (sys.maxint//2+1) +REFCNT_FROM_PYPY = sys.maxint // 4 + 1 +REFCNT_FROM_PYPY_LIGHT = REFCNT_FROM_PYPY + (sys.maxint // 2 + 1) RAWREFCOUNT_DEALLOC_TRIGGER = lltype.Ptr(lltype.FuncType([], lltype.Void)) From pypy.commits at gmail.com Sun Feb 21 09:15:06 2016 From: pypy.commits at gmail.com (cfbolz) Date: Sun, 21 Feb 2016 06:15:06 -0800 (PST) Subject: [pypy-commit] pypy default: (arigo, cfbolz): don't use __slots__ as storage for fixed sized arrays Message-ID: <56c9c66a.c13fc20a.65a74.ffffeb10@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82365:4a97212b2ecf Date: 2016-02-21 15:07 +0100 http://bitbucket.org/pypy/pypy/changeset/4a97212b2ecf/ Log: (arigo, cfbolz): don't use __slots__ as storage for fixed sized arrays diff --git a/rpython/rtyper/lltypesystem/ll2ctypes.py b/rpython/rtyper/lltypesystem/ll2ctypes.py --- a/rpython/rtyper/lltypesystem/ll2ctypes.py +++ b/rpython/rtyper/lltypesystem/ll2ctypes.py @@ -463,6 +463,9 @@ def remove_regular_struct_content(container): STRUCT = container._TYPE + if isinstance(STRUCT, lltype.FixedSizeArray): + del container._items + return for field_name in STRUCT._names: FIELDTYPE = getattr(STRUCT, field_name) if not isinstance(FIELDTYPE, lltype.ContainerType): @@ -642,6 +645,12 @@ cobj = lltype2ctypes(value) setattr(self._storage.contents, field_name, cobj) + def getitem(self, index, uninitialized_ok=False): + return getattr(self, "item%s" % index) + + def setitem(self, index, value): + setattr(self, "item%s" % index, value) + class _array_mixin(_parentable_mixin): """Mixin added to _array containers when they become ctypes-based.""" __slots__ = () diff --git a/rpython/rtyper/lltypesystem/lltype.py b/rpython/rtyper/lltypesystem/lltype.py --- a/rpython/rtyper/lltypesystem/lltype.py +++ b/rpython/rtyper/lltypesystem/lltype.py @@ -1761,7 +1761,10 @@ def __new__(self, TYPE, n=None, initialization=None, parent=None, parentindex=None): - my_variety = _struct_variety(TYPE._names) + if isinstance(TYPE, FixedSizeArray): + my_variety = _fixedsizearray + else: + my_variety = _struct_variety(TYPE._names) return object.__new__(my_variety) def __init__(self, TYPE, n=None, initialization=None, parent=None, @@ -1814,23 +1817,48 @@ raise UninitializedMemoryAccess("%r.%s"%(self, field_name)) return r - # for FixedSizeArray kind of structs: + +class _fixedsizearray(_struct): + def __init__(self, TYPE, n=None, initialization=None, parent=None, + parentindex=None): + _parentable.__init__(self, TYPE) + if n is not None: + raise TypeError("%r is not variable-sized" % (TYPE,)) + typ = TYPE.OF + storage = [] + for i, fld in enumerate(TYPE._names): + value = typ._allocate(initialization=initialization, + parent=self, parentindex=fld) + storage.append(value) + self._items = storage + if parent is not None: + self._setparentstructure(parent, parentindex) def getlength(self): - assert isinstance(self._TYPE, FixedSizeArray) return self._TYPE.length def getbounds(self): return 0, self.getlength() def getitem(self, index, uninitialized_ok=False): - assert isinstance(self._TYPE, FixedSizeArray) - return self._getattr('item%d' % index, uninitialized_ok) + assert 0 <= index < self.getlength() + return self._items[index] def setitem(self, index, value): - assert isinstance(self._TYPE, FixedSizeArray) - setattr(self, 'item%d' % index, value) + assert 0 <= index < self.getlength() + self._items[index] = value + def __getattr__(self, name): + # obscure + if name.startswith("item"): + return self.getitem(int(name[len('item'):])) + return _struct.__getattr__(self, name) + + def __setattr__(self, name, value): + if name.startswith("item"): + self.setitem(int(name[len('item'):]), value) + return + _struct.__setattr__(self, name, value) class _array(_parentable): _kind = "array" From pypy.commits at gmail.com Sun Feb 21 09:15:09 2016 From: pypy.commits at gmail.com (cfbolz) Date: Sun, 21 Feb 2016 06:15:09 -0800 (PST) Subject: [pypy-commit] pypy default: (arigo, cfbolz, mattip around): kill a dead function Message-ID: <56c9c66d.4e8e1c0a.85f3f.ffffb210@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82367:90ab95428105 Date: 2016-02-21 15:13 +0100 http://bitbucket.org/pypy/pypy/changeset/90ab95428105/ Log: (arigo, cfbolz, mattip around): kill a dead function diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -156,20 +156,6 @@ get_unique_interplevel_subclass._annspecialcase_ = "specialize:memo" _subclass_cache = {} -def enum_interplevel_subclasses(config, cls): - """Return a list of all the extra interp-level subclasses of 'cls' that - can be built by get_unique_interplevel_subclass().""" - result = [] - for flag1 in (False, True): - for flag2 in (False, True): - for flag3 in (False, True): - for flag4 in (False, True): - result.append(get_unique_interplevel_subclass( - config, cls, flag1, flag2, flag3, flag4)) - result = dict.fromkeys(result) - assert len(result) <= 6 - return result.keys() - def _getusercls(config, cls, wants_dict, wants_slots, wants_del, weakrefable): typedef = cls.typedef if wants_dict and typedef.hasdict: From pypy.commits at gmail.com Sun Feb 21 09:15:08 2016 From: pypy.commits at gmail.com (cfbolz) Date: Sun, 21 Feb 2016 06:15:08 -0800 (PST) Subject: [pypy-commit] pypy default: (cfbolz, arigo): kill unused line Message-ID: <56c9c66c.2aacc20a.b189d.ffffeb6e@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82366:4cb232f5380b Date: 2016-02-21 15:12 +0100 http://bitbucket.org/pypy/pypy/changeset/4cb232f5380b/ Log: (cfbolz, arigo): kill unused line diff --git a/rpython/rtyper/lltypesystem/lltype.py b/rpython/rtyper/lltypesystem/lltype.py --- a/rpython/rtyper/lltypesystem/lltype.py +++ b/rpython/rtyper/lltypesystem/lltype.py @@ -1774,7 +1774,6 @@ raise TypeError("%r is not variable-sized" % (TYPE,)) if n is None and TYPE._arrayfld is not None: raise TypeError("%r is variable-sized" % (TYPE,)) - first, FIRSTTYPE = TYPE._first_struct() for fld, typ in TYPE._flds.items(): if fld == TYPE._arrayfld: value = _array(typ, n, initialization=initialization, From pypy.commits at gmail.com Sun Feb 21 10:02:54 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 21 Feb 2016 07:02:54 -0800 (PST) Subject: [pypy-commit] pypy default: test, fix boolean array indexing, extending d77888929462 Message-ID: <56c9d19e.418f1c0a.892d5.ffffba53@mx.google.com> Author: mattip Branch: Changeset: r82368:85f5eef31a2a Date: 2016-02-21 15:53 +0100 http://bitbucket.org/pypy/pypy/changeset/85f5eef31a2a/ Log: test, fix boolean array indexing, extending d77888929462 diff --git a/pypy/module/micronumpy/compile.py b/pypy/module/micronumpy/compile.py --- a/pypy/module/micronumpy/compile.py +++ b/pypy/module/micronumpy/compile.py @@ -196,6 +196,10 @@ def newfloat(self, f): return self.float(f) + def newslice(self, start, stop, step): + return SliceObject(self.int_w(start), self.int_w(stop), + self.int_w(step)) + def le(self, w_obj1, w_obj2): assert isinstance(w_obj1, boxes.W_GenericBox) assert isinstance(w_obj2, boxes.W_GenericBox) diff --git a/pypy/module/micronumpy/strides.py b/pypy/module/micronumpy/strides.py --- a/pypy/module/micronumpy/strides.py +++ b/pypy/module/micronumpy/strides.py @@ -94,11 +94,13 @@ dim = i break if dim >= 0: - # filter by axis r - filtr = chunks.pop(dim) + # filter by axis dim + filtr = chunks[dim] assert isinstance(filtr, BooleanChunk) w_arr = w_arr.getitem_filter(space, filtr.w_idx, axis=dim) arr = w_arr.implementation + chunks[dim] = SliceChunk(space.newslice(space.wrap(0), + space.wrap(-1), space.w_None)) r = calculate_slice_strides(space, arr.shape, arr.start, arr.get_strides(), arr.get_backstrides(), chunks) else: diff --git a/pypy/module/micronumpy/test/test_ndarray.py b/pypy/module/micronumpy/test/test_ndarray.py --- a/pypy/module/micronumpy/test/test_ndarray.py +++ b/pypy/module/micronumpy/test/test_ndarray.py @@ -2550,6 +2550,8 @@ assert b.base is None b = a[:, np.array([True, False, True])] assert b.base is not None + b = a[np.array([True, False]), 0] + assert (b ==[0]).all() def test_scalar_indexing(self): import numpy as np From pypy.commits at gmail.com Sun Feb 21 11:31:14 2016 From: pypy.commits at gmail.com (cfbolz) Date: Sun, 21 Feb 2016 08:31:14 -0800 (PST) Subject: [pypy-commit] pypy default: reduce all guards from int_floordiv_ovf if one of the arguments is constant Message-ID: <56c9e652.44e21c0a.52c2c.51fa@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82369:4ee6b2b67a6b Date: 2016-02-21 17:27 +0100 http://bitbucket.org/pypy/pypy/changeset/4ee6b2b67a6b/ Log: reduce all guards from int_floordiv_ovf if one of the arguments is constant (completely random old diff I had lying around) diff --git a/rpython/jit/codewriter/support.py b/rpython/jit/codewriter/support.py --- a/rpython/jit/codewriter/support.py +++ b/rpython/jit/codewriter/support.py @@ -246,12 +246,12 @@ def _ll_2_int_floordiv_ovf_zer(x, y): if y == 0: raise ZeroDivisionError - if x == -sys.maxint - 1 and y == -1: - raise OverflowError - return llop.int_floordiv(lltype.Signed, x, y) + return _ll_2_int_floordiv_ovf(x, y) def _ll_2_int_floordiv_ovf(x, y): - if x == -sys.maxint - 1 and y == -1: + # intentionally not short-circuited to produce only one guard + # and to remove the check fully if one of the arguments is known + if (x == -sys.maxint - 1) & (y == -1): raise OverflowError return llop.int_floordiv(lltype.Signed, x, y) @@ -263,12 +263,11 @@ def _ll_2_int_mod_ovf_zer(x, y): if y == 0: raise ZeroDivisionError - if x == -sys.maxint - 1 and y == -1: - raise OverflowError - return llop.int_mod(lltype.Signed, x, y) + return _ll_2_int_mod_ovf(x, y) def _ll_2_int_mod_ovf(x, y): - if x == -sys.maxint - 1 and y == -1: + #see comment in _ll_2_int_floordiv_ovf + if (x == -sys.maxint - 1) & (y == -1): raise OverflowError return llop.int_mod(lltype.Signed, x, y) diff --git a/rpython/jit/metainterp/test/test_ajit.py b/rpython/jit/metainterp/test/test_ajit.py --- a/rpython/jit/metainterp/test/test_ajit.py +++ b/rpython/jit/metainterp/test/test_ajit.py @@ -1199,6 +1199,31 @@ (-sys.maxint-1) // (-6) + 100 * 8) + def test_overflow_fold_if_divisor_constant(self): + import sys + from rpython.rtyper.lltypesystem.lloperation import llop + myjitdriver = JitDriver(greens = [], reds = ['x', 'y', 'res']) + def f(x, y): + res = 0 + while y > 0: + myjitdriver.can_enter_jit(x=x, y=y, res=res) + myjitdriver.jit_merge_point(x=x, y=y, res=res) + try: + res += llop.int_floordiv_ovf(lltype.Signed, + x, 2) + res += llop.int_mod_ovf(lltype.Signed, + x, 2) + x += 5 + except OverflowError: + res += 100 + y -= 1 + return res + res = self.meta_interp(f, [-41, 8]) + # the guard_true are for the loop condition + # the guard_false needed to check whether an overflow can occur have + # been folded away + self.check_resops(guard_true=2, guard_false=0) + def test_isinstance(self): class A: pass From pypy.commits at gmail.com Sun Feb 21 11:55:05 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 21 Feb 2016 08:55:05 -0800 (PST) Subject: [pypy-commit] pypy vlen-resume: (cfbolz, fijal, arigo) Message-ID: <56c9ebe9.046f1c0a.c7219.ffffd849@mx.google.com> Author: Armin Rigo Branch: vlen-resume Changeset: r82370:8122832414e9 Date: 2016-02-21 15:57 +0100 http://bitbucket.org/pypy/pypy/changeset/8122832414e9/ Log: (cfbolz, fijal, arigo) trying to look at and fix the variable-length encoding of resume data From pypy.commits at gmail.com Sun Feb 21 11:55:07 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 21 Feb 2016 08:55:07 -0800 (PST) Subject: [pypy-commit] pypy vlen-resume: (fijal, cfbolz, arigo) Message-ID: <56c9ebeb.8abb1c0a.92707.ffffd028@mx.google.com> Author: Armin Rigo Branch: vlen-resume Changeset: r82371:a3dc1d20dc0f Date: 2016-02-21 16:39 +0100 http://bitbucket.org/pypy/pypy/changeset/a3dc1d20dc0f/ Log: (fijal, cfbolz, arigo) work in progress diff --git a/rpython/jit/metainterp/resume.py b/rpython/jit/metainterp/resume.py --- a/rpython/jit/metainterp/resume.py +++ b/rpython/jit/metainterp/resume.py @@ -1147,7 +1147,7 @@ end = first_snapshot_size - len(virtualizable_boxes) elif ginfo is not None: item, self.cur_index = resumecode.numb_next_item(self.numb, - first_snapshot_size - 1) + first_snapshot_size - 1)xxxxxxxxxxxxx virtualizable_boxes = [self.decode_ref(item)] end = first_snapshot_size - 1 else: @@ -1483,7 +1483,7 @@ if vinfo is not None: end_vref = self.consume_vable_info(vinfo) if ginfo is not None: - end_vref -= 1 + end_vref -= 1 xxxxxxxxxxxxxxx self.consume_virtualref_info(vrefinfo, end_vref) self.cur_index = rffi.cast(lltype.Signed, self.numb.first_snapshot_size) diff --git a/rpython/jit/metainterp/resumecode.py b/rpython/jit/metainterp/resumecode.py --- a/rpython/jit/metainterp/resumecode.py +++ b/rpython/jit/metainterp/resumecode.py @@ -20,77 +20,64 @@ # ('prev', NUMBERINGP), # ('prev_index', rffi.USHORT), ('first_snapshot_size', rffi.USHORT), # ugh, ugly - ('code', lltype.Array(rffi.SHORT))) + ('code', lltype.Array(rffi.UCHAR))) NUMBERINGP.TO.become(NUMBERING) NULL_NUMBER = lltype.nullptr(NUMBERING) # this is the actually used version +## def create_numbering(lst, first_snapshot_size): +## numb = lltype.malloc(NUMBERING, len(lst)) +## for i in range(len(lst)): +## numb.code[i] = rffi.cast(rffi.SHORT, lst[i]) +## numb.first_snapshot_size = rffi.cast(rffi.USHORT, first_snapshot_size) +## return numb + +## def numb_next_item(numb, index): +## return rffi.cast(lltype.Signed, numb.code[index]), index + 1 + +# this is the version that can be potentially used + def create_numbering(lst, first_snapshot_size): - numb = lltype.malloc(NUMBERING, len(lst)) - for i in range(len(lst)): - numb.code[i] = rffi.cast(rffi.SHORT, lst[i]) + result = [] + for item in lst: + item *= 2 + if item < 0: + item = -1 - item + + assert item >= 0 + if item < 2**7: + result.append(rffi.cast(rffi.UCHAR, item)) + elif item < 2**14: + result.append(rffi.cast(rffi.UCHAR, item | 0x80)) + result.append(rffi.cast(rffi.UCHAR, item >> 7)) + else: + assert item < 2**16 + result.append(rffi.cast(rffi.UCHAR, item | 0x80)) + result.append(rffi.cast(rffi.UCHAR, (item >> 7) | 0x80)) + result.append(rffi.cast(rffi.UCHAR, item >> 14)) + + numb = lltype.malloc(NUMBERING, len(result)) numb.first_snapshot_size = rffi.cast(rffi.USHORT, first_snapshot_size) + for i in range(len(result)): + numb.code[i] = result[i] return numb def numb_next_item(numb, index): - return rffi.cast(lltype.Signed, numb.code[index]), index + 1 - -# this is the version that can be potentially used - -def _create_numbering(lst, prev, prev_index, first_snapshot_size): - count = 0 - for item in lst: - if item < 0: - if item < -63: - count += 1 - if item > 127: - count += 1 - count += 1 - numb = lltype.malloc(NUMBERING, count) - numb.prev = prev - numb.prev_index = rffi.cast(rffi.USHORT, prev_index) - numb.first_snapshot_size = rffi.cast(rffi.USHORT, first_snapshot_size) - index = 0 - for item in lst: - if 0 <= item <= 128: - numb.code[index] = rffi.cast(rffi.UCHAR, item) + value = rffi.cast(lltype.Signed, numb.code[index]) + index += 1 + if value & (2**7): + value &= 2**7 - 1 + value |= rffi.cast(lltype.Signed, numb.code[index]) << 7 + index += 1 + if value & (2**14): + value &= 2**14 - 1 + value |= rffi.cast(lltype.Signed, numb.code[index]) << 14 index += 1 - else: - assert (item >> 8) <= 63 - if item < 0: - item = -item - if item <= 63: - numb.code[index] = rffi.cast(rffi.UCHAR, item | 0x40) - index += 1 - else: - numb.code[index] = rffi.cast(rffi.UCHAR, (item >> 8) | 0x80 | 0x40) - numb.code[index + 1] = rffi.cast(rffi.UCHAR, item & 0xff) - index += 2 - else: - numb.code[index] = rffi.cast(rffi.UCHAR, (item >> 8) | 0x80) - numb.code[index + 1] = rffi.cast(rffi.UCHAR, item & 0xff) - index += 2 - return numb - -def copy_from_list_to_numb(lst, numb, index): - i = 0 - while i < len(lst): - numb.code[i + index] = lst[i] - i += 1 - -def _numb_next_item(numb, index): - one = rffi.cast(lltype.Signed, numb.code[index]) - if one & 0x40: - if one & 0x80: - two = rffi.cast(lltype.Signed, numb.code[index + 1]) - return -(((one & ~(0x80 | 0x40)) << 8) | two), index + 2 - else: - return -(one & (~0x40)), index + 1 - if one & 0x80: - two = rffi.cast(lltype.Signed, numb.code[index + 1]) - return ((one & 0x7f) << 8) | two, index + 2 - return one, index + 1 + if value & 1: + value = -1 - value + value >>= 1 + return value, index def unpack_numbering(numb): l = [] diff --git a/rpython/jit/metainterp/test/test_resumecode.py b/rpython/jit/metainterp/test/test_resumecode.py --- a/rpython/jit/metainterp/test/test_resumecode.py +++ b/rpython/jit/metainterp/test/test_resumecode.py @@ -1,9 +1,12 @@ from rpython.jit.metainterp.resumecode import NUMBERING, NULL_NUMBER from rpython.jit.metainterp.resumecode import create_numbering,\ - unpack_numbering, copy_from_list_to_numb + unpack_numbering from rpython.rtyper.lltypesystem import lltype +from hypothesis import strategies, given + + def test_pack_unpack(): examples = [ [1, 2, 3, 4, 257, 10000, 13, 15], @@ -14,3 +17,13 @@ for l in examples: n = create_numbering(l, 0) assert unpack_numbering(n) == l + + at given(strategies.lists(strategies.integers(-2**15, 2**15-1))) +def test_roundtrip(l): + n = create_numbering(l, 0) + assert unpack_numbering(n) == l + + at given(strategies.lists(strategies.integers(-2**15, 2**15-1))) +def test_compressing(l): + n = create_numbering(l, 0) + assert len(n.code) <= len(l) * 3 From pypy.commits at gmail.com Sun Feb 21 11:55:09 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 21 Feb 2016 08:55:09 -0800 (PST) Subject: [pypy-commit] pypy vlen-resume: (fijal, cfbolz, arigo) Message-ID: <56c9ebed.45941c0a.dbf35.ffffd6e6@mx.google.com> Author: Armin Rigo Branch: vlen-resume Changeset: r82372:d18f72a15f77 Date: 2016-02-21 17:54 +0100 http://bitbucket.org/pypy/pypy/changeset/d18f72a15f77/ Log: (fijal, cfbolz, arigo) in-progress diff --git a/rpython/jit/metainterp/resume.py b/rpython/jit/metainterp/resume.py --- a/rpython/jit/metainterp/resume.py +++ b/rpython/jit/metainterp/resume.py @@ -27,6 +27,13 @@ self.prev = prev self.boxes = boxes +class TopSnapshot(Snapshot): + __slots__ = ('vable_boxes',) + + def __init__(self, prev, boxes, vable_boxes): + Snapshot.__init__(self, prev, boxes) + self.vable_boxes = vable_boxes + def combine_uint(index1, index2): assert 0 <= index1 < 65536 assert 0 <= index2 < 65536 @@ -127,9 +134,8 @@ snapshot_storage): n = len(framestack) - 1 if virtualizable_boxes is not None: - boxes = virtualref_boxes + virtualizable_boxes - else: - boxes = virtualref_boxes[:] + virtualizable_boxes = virtualizable_boxes[:] + virtualref_boxes = virtualref_boxes[:] if n >= 0: top = framestack[n] _ensure_parent_resumedata(framestack, n) @@ -138,11 +144,12 @@ snapshot_storage.rd_frame_info_list = frame_info_list snapshot = Snapshot(top.parent_resumedata_snapshot, top.get_list_of_active_boxes(False)) - snapshot = Snapshot(snapshot, boxes) + snapshot = TopSnapshot(snapshot, virtualref_boxes, virtualizable_boxes) snapshot_storage.rd_snapshot = snapshot else: snapshot_storage.rd_frame_info_list = None - snapshot_storage.rd_snapshot = Snapshot(None, boxes) + snapshot_storage.rd_snapshot = TopSnapshot(None, virtualref_boxes, + virtualizable_boxes) PENDINGFIELDSTRUCT = lltype.Struct('PendingField', ('lldescr', OBJECTPTR), @@ -200,10 +207,12 @@ self.v = 0 def count_boxes(self, lst): - c = 0 + snapshot = lst[0] + assert isinstance(snapshot, TopSnapshot) + c = len(snapshot.vable_boxes) for snapshot in lst: c += len(snapshot.boxes) - c += 2 * (len(lst) - 1) + c += 2 * (len(lst) - 1) + 1 return c def append(self, item): @@ -294,13 +303,11 @@ state.append(tagged) state.n = n state.v = v - state.position -= length + 2 - def number(self, optimizer, snapshot, frameinfo): + def number(self, optimizer, topsnapshot, frameinfo): # flatten the list - vref_snapshot = snapshot - cur = snapshot.prev - snapshot_list = [vref_snapshot] + cur = topsnapshot.prev + snapshot_list = [topsnapshot] framestack_list = [] while cur: framestack_list.append(frameinfo) @@ -311,19 +318,29 @@ # we want to number snapshots starting from the back, but ending # with a forward list - for i in range(len(snapshot_list) - 1, -1, -1): - state.position -= len(snapshot_list[i].boxes) - if i != 0: - frameinfo = framestack_list[i - 1] - jitcode_pos, pc = unpack_uint(frameinfo.packed_jitcode_pc) - state.position -= 2 - state.append(rffi.cast(rffi.SHORT, jitcode_pos)) - state.append(rffi.cast(rffi.SHORT, pc)) + for i in range(len(snapshot_list) - 1, 0, -1): + state.position -= len(snapshot_list[i].boxes) + 2 + frameinfo = framestack_list[i - 1] + jitcode_pos, pc = unpack_uint(frameinfo.packed_jitcode_pc) + state.append(rffi.cast(rffi.SHORT, jitcode_pos)) + state.append(rffi.cast(rffi.SHORT, pc)) self._number_boxes(snapshot_list[i].boxes, optimizer, state) + state.position -= len(snapshot_list[i].boxes) + 2 - numb = resumecode.create_numbering(state.current, - len(vref_snapshot.boxes)) + assert isinstance(topsnapshot, TopSnapshot) + special_boxes_size = (len(topsnapshot.vable_boxes) + + 1 + len(topsnapshot.boxes)) + assert state.position == special_boxes_size + state.position = 0 + self._number_boxes(topsnapshot.vable_boxes, optimizer, state) + n = len(topsnapshot.boxes) + assert not (n & 1) + state.append(rffi.cast(rffi.SHORT, n >> 1)) + self._number_boxes(topsnapshot.boxes, optimizer, state) + assert state.position == special_boxes_size + + numb = resumecode.create_numbering(state.current) return numb, state.liveboxes, state.v def forget_numberings(self): @@ -1146,8 +1163,9 @@ virtualizable_boxes = self.consume_virtualizable_boxes(vinfo) end = first_snapshot_size - len(virtualizable_boxes) elif ginfo is not None: + xxxxxx item, self.cur_index = resumecode.numb_next_item(self.numb, - first_snapshot_size - 1)xxxxxxxxxxxxx + first_snapshot_size - 1) virtualizable_boxes = [self.decode_ref(item)] end = first_snapshot_size - 1 else: @@ -1483,7 +1501,8 @@ if vinfo is not None: end_vref = self.consume_vable_info(vinfo) if ginfo is not None: - end_vref -= 1 xxxxxxxxxxxxxxx + end_vref -= 1 + xxxxxxxxxxxxxxx self.consume_virtualref_info(vrefinfo, end_vref) self.cur_index = rffi.cast(lltype.Signed, self.numb.first_snapshot_size) diff --git a/rpython/jit/metainterp/resumecode.py b/rpython/jit/metainterp/resumecode.py --- a/rpython/jit/metainterp/resumecode.py +++ b/rpython/jit/metainterp/resumecode.py @@ -1,44 +1,30 @@ """ Resume bytecode. It goes as following: - + [ ] if vinfo is not None + -OR- + [] if ginfo is not None + -OR- + [] if both are None -until the length of the array. + [ ] for virtualrefs -The interface is only create_numbering/numb_next_item, but! there is a trick -that uses first_snapshot_size + some knowledge about inside to decode -virtualref/virtualizable_fields/virtualizable in that order in resume.py. + [ ] the frames + [ ] + ... -If the algorithm changes, the part about how to find where virtualizable -and virtualrefs are to be found + until the length of the array. """ from rpython.rtyper.lltypesystem import rffi, lltype NUMBERINGP = lltype.Ptr(lltype.GcForwardReference()) NUMBERING = lltype.GcStruct('Numbering', -# ('prev', NUMBERINGP), -# ('prev_index', rffi.USHORT), - ('first_snapshot_size', rffi.USHORT), # ugh, ugly ('code', lltype.Array(rffi.UCHAR))) NUMBERINGP.TO.become(NUMBERING) NULL_NUMBER = lltype.nullptr(NUMBERING) -# this is the actually used version - -## def create_numbering(lst, first_snapshot_size): -## numb = lltype.malloc(NUMBERING, len(lst)) -## for i in range(len(lst)): -## numb.code[i] = rffi.cast(rffi.SHORT, lst[i]) -## numb.first_snapshot_size = rffi.cast(rffi.USHORT, first_snapshot_size) -## return numb - -## def numb_next_item(numb, index): -## return rffi.cast(lltype.Signed, numb.code[index]), index + 1 - -# this is the version that can be potentially used - -def create_numbering(lst, first_snapshot_size): +def create_numbering(lst): result = [] for item in lst: item *= 2 @@ -58,7 +44,6 @@ result.append(rffi.cast(rffi.UCHAR, item >> 14)) numb = lltype.malloc(NUMBERING, len(result)) - numb.first_snapshot_size = rffi.cast(rffi.USHORT, first_snapshot_size) for i in range(len(result)): numb.code[i] = result[i] return numb diff --git a/rpython/jit/metainterp/test/test_resume.py b/rpython/jit/metainterp/test/test_resume.py --- a/rpython/jit/metainterp/test/test_resume.py +++ b/rpython/jit/metainterp/test/test_resume.py @@ -10,7 +10,7 @@ VArrayInfoNotClear, VStrPlainInfo, VStrConcatInfo, VStrSliceInfo,\ VUniPlainInfo, VUniConcatInfo, VUniSliceInfo, Snapshot, FrameInfo,\ capture_resumedata, ResumeDataLoopMemo, UNASSIGNEDVIRTUAL, INT,\ - annlowlevel, PENDINGFIELDSP, unpack_uint, TAG_CONST_OFFSET + annlowlevel, PENDINGFIELDSP, unpack_uint, TAG_CONST_OFFSET, TopSnapshot from rpython.jit.metainterp.resumecode import unpack_numbering,\ create_numbering, NULL_NUMBER @@ -278,9 +278,7 @@ assert bh.written_f == expected_f -def Numbering(nums): - numb = create_numbering(nums, 0) - return numb +Numbering = create_numbering def tagconst(i): return tag(i + TAG_CONST_OFFSET, TAGCONST) @@ -610,7 +608,8 @@ assert unpack_uint(frame_info_list.packed_jitcode_pc) == (2, 15) snapshot = storage.rd_snapshot - assert snapshot.boxes == vrs + vbs # in the same list + assert snapshot.boxes == vrs + assert snapshot.vable_boxes == vbs snapshot = snapshot.prev assert snapshot.prev is fs[2].parent_resumedata_snapshot @@ -904,9 +903,9 @@ env = [b1, c1, b2, b1, c2] snap = Snapshot(None, env) env1 = [c3, b3, b1, c1] - snap1 = Snapshot(snap, env1) + snap1 = TopSnapshot(snap, env1, []) env2 = [c3, b3, b1, c3] - snap2 = Snapshot(snap, env2) + snap2 = TopSnapshot(snap, env2, []) memo = ResumeDataLoopMemo(FakeMetaInterpStaticData()) frameinfo = FrameInfo(None, FakeJitCode("jitcode", 0), 0) @@ -916,10 +915,11 @@ assert liveboxes == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX), b3: tag(2, TAGBOX)} - base = [tag(0, TAGBOX), tag(1, TAGINT), tag(1, TAGBOX), tag(0, TAGBOX), tag(2, TAGINT)] + base = [0, 0, tag(0, TAGBOX), tag(1, TAGINT), + tag(1, TAGBOX), tag(0, TAGBOX), tag(2, TAGINT)] - assert unpack_numbering(numb) == [ - tag(3, TAGINT), tag(2, TAGBOX), tag(0, TAGBOX), tag(1, TAGINT), 0, 0] + base + assert unpack_numbering(numb) == [2, tag(3, TAGINT), tag(2, TAGBOX), + tag(0, TAGBOX), tag(1, TAGINT)] + base numb2, liveboxes2, v = memo.number(FakeOptimizer(), snap2, frameinfo) assert v == 0 @@ -927,11 +927,11 @@ assert liveboxes2 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX), b3: tag(2, TAGBOX)} assert liveboxes2 is not liveboxes - assert unpack_numbering(numb2) == [ - tag(3, TAGINT), tag(2, TAGBOX), tag(0, TAGBOX), tag(3, TAGINT), 0, 0] + base + assert unpack_numbering(numb2) == [2, tag(3, TAGINT), tag(2, TAGBOX), + tag(0, TAGBOX), tag(3, TAGINT)] + base env3 = [c3, b3, b1, c3] - snap3 = Snapshot(snap, env3) + snap3 = TopSnapshot(snap, env3, []) class FakeVirtualInfo(info.AbstractInfo): def __init__(self, virt): @@ -946,13 +946,12 @@ assert v == 0 assert liveboxes3 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX)} - assert unpack_numbering(numb3) == [tag(3, TAGINT), tag(4, TAGINT), - tag(0, TAGBOX), - tag(3, TAGINT), 0, 0] + base + assert unpack_numbering(numb3) == [2, tag(3, TAGINT), tag(4, TAGINT), + tag(0, TAGBOX), tag(3, TAGINT)] + base # virtual env4 = [c3, b4, b1, c3] - snap4 = Snapshot(snap, env4) + snap4 = TopSnapshot(snap, env4, []) b4.set_forwarded(FakeVirtualInfo(True)) numb4, liveboxes4, v = memo.number(FakeOptimizer(), snap4, frameinfo) @@ -960,11 +959,11 @@ assert liveboxes4 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX), b4: tag(0, TAGVIRTUAL)} - assert unpack_numbering(numb4) == [tag(3, TAGINT), tag(0, TAGVIRTUAL), - tag(0, TAGBOX), tag(3, TAGINT), 0, 0] + base + assert unpack_numbering(numb4) == [2, tag(3, TAGINT), tag(0, TAGVIRTUAL), + tag(0, TAGBOX), tag(3, TAGINT)] + base env5 = [b1, b4, b5] - snap5 = Snapshot(snap4, env5) + snap5 = TopSnapshot(snap4, [], env5) b4.set_forwarded(FakeVirtualInfo(True)) b5.set_forwarded(FakeVirtualInfo(True)) @@ -974,8 +973,11 @@ assert liveboxes5 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX), b4: tag(0, TAGVIRTUAL), b5: tag(1, TAGVIRTUAL)} - assert unpack_numbering(numb5) == [tag(0, TAGBOX), tag(0, TAGVIRTUAL), - tag(1, TAGVIRTUAL), 2, 1] + unpack_numbering(numb4) + assert unpack_numbering(numb5) == [ + tag(0, TAGBOX), tag(0, TAGVIRTUAL), tag(1, TAGVIRTUAL), + 0, + 2, 1, tag(3, TAGINT), tag(0, TAGVIRTUAL), tag(0, TAGBOX), tag(3, TAGINT) + ] + base def test_ResumeDataLoopMemo_number_boxes(): memo = ResumeDataLoopMemo(FakeMetaInterpStaticData()) diff --git a/rpython/jit/metainterp/test/test_resumecode.py b/rpython/jit/metainterp/test/test_resumecode.py --- a/rpython/jit/metainterp/test/test_resumecode.py +++ b/rpython/jit/metainterp/test/test_resumecode.py @@ -15,15 +15,15 @@ [13000, 12000, 10000, 256, 255, 254, 257, -3, -1000] ] for l in examples: - n = create_numbering(l, 0) + n = create_numbering(l) assert unpack_numbering(n) == l @given(strategies.lists(strategies.integers(-2**15, 2**15-1))) def test_roundtrip(l): - n = create_numbering(l, 0) + n = create_numbering(l) assert unpack_numbering(n) == l @given(strategies.lists(strategies.integers(-2**15, 2**15-1))) def test_compressing(l): - n = create_numbering(l, 0) + n = create_numbering(l) assert len(n.code) <= len(l) * 3 From pypy.commits at gmail.com Sun Feb 21 12:50:07 2016 From: pypy.commits at gmail.com (fijal) Date: Sun, 21 Feb 2016 09:50:07 -0800 (PST) Subject: [pypy-commit] pypy jit-leaner-frontend: random progress Message-ID: <56c9f8cf.a3abc20a.adb05.3032@mx.google.com> Author: fijal Branch: jit-leaner-frontend Changeset: r82373:fcbf2f97bc45 Date: 2016-02-19 19:12 +0100 http://bitbucket.org/pypy/pypy/changeset/fcbf2f97bc45/ Log: random progress diff --git a/rpython/jit/metainterp/history.py b/rpython/jit/metainterp/history.py --- a/rpython/jit/metainterp/history.py +++ b/rpython/jit/metainterp/history.py @@ -652,9 +652,12 @@ self.trace = Trace(inpargs) + def any_operation(self): + return self.trace._count > 0 + @specialize.argtype(3) def record(self, opnum, argboxes, value, descr=None): - op = self.trace.record_op(self.operations, opnum, argboxes, value, descr) + op = self.trace.record_op(opnum, argboxes, descr) if value is None: assert op.type == 'v' elif isinstance(value, bool): @@ -751,7 +754,9 @@ self.jitcell_token_wrefs.append(weakref.ref(token)) def set_history(self, history): - self.operations = history.operations + # XXX think about something + pass + #self.operations = history.operations def aborted(self): self.aborted_count += 1 diff --git a/rpython/jit/metainterp/pyjitpl.py b/rpython/jit/metainterp/pyjitpl.py --- a/rpython/jit/metainterp/pyjitpl.py +++ b/rpython/jit/metainterp/pyjitpl.py @@ -1221,7 +1221,7 @@ @arguments("int", "boxes3", "jitcode_position", "boxes3", "orgpc") def opimpl_jit_merge_point(self, jdindex, greenboxes, jcposition, redboxes, orgpc): - any_operation = len(self.metainterp.history.operations) > 0 + any_operation = self.metainterp.history.any_operation() jitdriver_sd = self.metainterp.staticdata.jitdrivers_sd[jdindex] self.verify_green_args(jitdriver_sd, greenboxes) self.debug_merge_point(jitdriver_sd, jdindex, @@ -2050,13 +2050,13 @@ else: guard_op = self.history.record(opnum, moreargs, None) assert isinstance(guard_op, GuardResOp) - self.capture_resumedata(guard_op, resumepc) + self.capture_resumedata(resumepc) # <- records extra to history self.staticdata.profiler.count_ops(opnum, Counters.GUARDS) # count self.attach_debug_info(guard_op) return guard_op - def capture_resumedata(self, guard_op, resumepc=-1): + def capture_resumedata(self, resumepc=-1): virtualizable_boxes = None if (self.jitdriver_sd.virtualizable_info is not None or self.jitdriver_sd.greenfield_info is not None): @@ -2068,7 +2068,7 @@ if resumepc >= 0: frame.pc = resumepc resume.capture_resumedata(self.framestack, virtualizable_boxes, - self.virtualref_boxes, guard_op) + self.virtualref_boxes, self.history.trace) if self.framestack: self.framestack[-1].pc = saved_pc diff --git a/rpython/jit/metainterp/resoperation.py b/rpython/jit/metainterp/resoperation.py --- a/rpython/jit/metainterp/resoperation.py +++ b/rpython/jit/metainterp/resoperation.py @@ -1427,6 +1427,12 @@ return rop._ALWAYS_PURE_FIRST <= opnum <= rop._ALWAYS_PURE_LAST @staticmethod + def is_pure_with_descr(opnum, descr): + if rop.is_always_pure(opnum): + return True + xxxx + + @staticmethod def has_no_side_effect(opnum): return rop._NOSIDEEFFECT_FIRST <= opnum <= rop._NOSIDEEFFECT_LAST diff --git a/rpython/jit/metainterp/test/test_opencoder.py b/rpython/jit/metainterp/test/test_opencoder.py --- a/rpython/jit/metainterp/test/test_opencoder.py +++ b/rpython/jit/metainterp/test/test_opencoder.py @@ -3,6 +3,7 @@ from rpython.jit.metainterp.resoperation import rop, InputArgInt from rpython.jit.metainterp.history import ConstInt from rpython.jit.metainterp.optimizeopt.optimizer import Optimizer +from rpython.jit.metainterp import resume class TestOpencoder(object): def unpack(self, t): @@ -30,4 +31,14 @@ i0, i1 = InputArgInt(), InputArgInt() t = Trace([i0, i1]) add = t.record_op(rop.INT_ADD, [i0, i1]) - guard_op = t.record_op(rop.GUARD_FALSE, [add]) \ No newline at end of file + t.record_op(rop.GUARD_FALSE, [add]) + # now we write rd_snapshot and friends + virtualizable_boxes = [] + virutalref_boxes = [] + framestack = [] + framestack.xxx + resume.capture_resumedata(framestack, virtualizable_boxes, + virutalref_boxes, t) + (i0, i1), l = self.unpack(t) + assert l[1].opnum == rop.GUARD_FALSE + assert l[1].rd_snapshot == [i0, i1] \ No newline at end of file From pypy.commits at gmail.com Sun Feb 21 12:50:09 2016 From: pypy.commits at gmail.com (fijal) Date: Sun, 21 Feb 2016 09:50:09 -0800 (PST) Subject: [pypy-commit] pypy vlen-resume: (arigo, fijal, cfbolz) random progress Message-ID: <56c9f8d1.512f1c0a.5bc2.ffffe5be@mx.google.com> Author: fijal Branch: vlen-resume Changeset: r82374:bf67fc4e3ab1 Date: 2016-02-21 18:49 +0100 http://bitbucket.org/pypy/pypy/changeset/bf67fc4e3ab1/ Log: (arigo, fijal, cfbolz) random progress diff --git a/rpython/jit/metainterp/resume.py b/rpython/jit/metainterp/resume.py --- a/rpython/jit/metainterp/resume.py +++ b/rpython/jit/metainterp/resume.py @@ -212,7 +212,7 @@ c = len(snapshot.vable_boxes) for snapshot in lst: c += len(snapshot.boxes) - c += 2 * (len(lst) - 1) + 1 + c += 2 * (len(lst) - 1) + 1 + 1 return c def append(self, item): @@ -328,11 +328,12 @@ state.position -= len(snapshot_list[i].boxes) + 2 assert isinstance(topsnapshot, TopSnapshot) - special_boxes_size = (len(topsnapshot.vable_boxes) + + special_boxes_size = (1 + len(topsnapshot.vable_boxes) + 1 + len(topsnapshot.boxes)) assert state.position == special_boxes_size state.position = 0 + state.append(rffi.cast(rffi.SHORT, len(topsnapshot.vable_boxes))) self._number_boxes(topsnapshot.vable_boxes, optimizer, state) n = len(topsnapshot.boxes) assert not (n & 1) @@ -1447,12 +1448,14 @@ info = blackholeinterp.get_current_position_info() self._prepare_next_section(info) - def consume_virtualref_info(self, vrefinfo, end): + def consume_virtualref_info(self, vrefinfo, index): # we have to decode a list of references containing pairs - # [..., virtual, vref, ...] stopping at 'end' + # [..., virtual, vref, ...] and returns the index at the end + size, index = resumecode.numb_next_item(self.numb, index) if vrefinfo is None: - assert end == 0 - return + assert size == 0 + return index + xxxx assert (end & 1) == 0 self.cur_index = 0 for i in range(0, end, 2): @@ -1496,15 +1499,18 @@ load_value_of_type._annspecialcase_ = 'specialize:arg(1)' def consume_vref_and_vable(self, vrefinfo, vinfo, ginfo): + vable_size, index = resumecode.numb_next_item(self.numb, 0) if self.resume_after_guard_not_forced != 2: - end_vref = rffi.cast(lltype.Signed, self.numb.first_snapshot_size) if vinfo is not None: - end_vref = self.consume_vable_info(vinfo) + index = self.consume_vable_info(vinfo, index) if ginfo is not None: end_vref -= 1 xxxxxxxxxxxxxxx - self.consume_virtualref_info(vrefinfo, end_vref) - self.cur_index = rffi.cast(lltype.Signed, self.numb.first_snapshot_size) + index = self.consume_virtualref_info(vrefinfo, index) + else: + index = resumecode.numb_next_n_items(self.numb, vable_size, index) + xxxx + self.cur_index = index def allocate_with_vtable(self, descr=None): from rpython.jit.metainterp.executor import exec_new_with_vtable diff --git a/rpython/jit/metainterp/resumecode.py b/rpython/jit/metainterp/resumecode.py --- a/rpython/jit/metainterp/resumecode.py +++ b/rpython/jit/metainterp/resumecode.py @@ -1,11 +1,11 @@ """ Resume bytecode. It goes as following: - [ ] if vinfo is not None + [ ] if vinfo is not None -OR- - [] if ginfo is not None + [1 ] if ginfo is not None -OR- - [] if both are None + [0] if both are None [ ] for virtualrefs @@ -27,6 +27,7 @@ def create_numbering(lst): result = [] for item in lst: + item = rffi.cast(lltype.Signed, item) item *= 2 if item < 0: item = -1 - item diff --git a/rpython/jit/metainterp/test/test_resume.py b/rpython/jit/metainterp/test/test_resume.py --- a/rpython/jit/metainterp/test/test_resume.py +++ b/rpython/jit/metainterp/test/test_resume.py @@ -22,9 +22,12 @@ from rpython.jit.codewriter import heaptracker, longlong from rpython.jit.metainterp.resoperation import ResOperation, InputArgInt,\ InputArgRef, rop +from rpython.jit.metainterp.test.strategies import boxlists from rpython.rlib.debug import debug_start, debug_stop, debug_print,\ have_debug_prints +from hypothesis import given + class Storage: rd_frame_info_list = None rd_numb = None @@ -918,7 +921,7 @@ base = [0, 0, tag(0, TAGBOX), tag(1, TAGINT), tag(1, TAGBOX), tag(0, TAGBOX), tag(2, TAGINT)] - assert unpack_numbering(numb) == [2, tag(3, TAGINT), tag(2, TAGBOX), + assert unpack_numbering(numb) == [0, 2, tag(3, TAGINT), tag(2, TAGBOX), tag(0, TAGBOX), tag(1, TAGINT)] + base numb2, liveboxes2, v = memo.number(FakeOptimizer(), snap2, frameinfo) @@ -927,7 +930,7 @@ assert liveboxes2 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX), b3: tag(2, TAGBOX)} assert liveboxes2 is not liveboxes - assert unpack_numbering(numb2) == [2, tag(3, TAGINT), tag(2, TAGBOX), + assert unpack_numbering(numb2) == [0, 2, tag(3, TAGINT), tag(2, TAGBOX), tag(0, TAGBOX), tag(3, TAGINT)] + base env3 = [c3, b3, b1, c3] @@ -946,7 +949,7 @@ assert v == 0 assert liveboxes3 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX)} - assert unpack_numbering(numb3) == [2, tag(3, TAGINT), tag(4, TAGINT), + assert unpack_numbering(numb3) == [0, 2, tag(3, TAGINT), tag(4, TAGINT), tag(0, TAGBOX), tag(3, TAGINT)] + base # virtual @@ -959,7 +962,7 @@ assert liveboxes4 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX), b4: tag(0, TAGVIRTUAL)} - assert unpack_numbering(numb4) == [2, tag(3, TAGINT), tag(0, TAGVIRTUAL), + assert unpack_numbering(numb4) == [0, 2, tag(3, TAGINT), tag(0, TAGVIRTUAL), tag(0, TAGBOX), tag(3, TAGINT)] + base env5 = [b1, b4, b5] @@ -974,11 +977,29 @@ assert liveboxes5 == {b1: tag(0, TAGBOX), b2: tag(1, TAGBOX), b4: tag(0, TAGVIRTUAL), b5: tag(1, TAGVIRTUAL)} assert unpack_numbering(numb5) == [ - tag(0, TAGBOX), tag(0, TAGVIRTUAL), tag(1, TAGVIRTUAL), + 3, tag(0, TAGBOX), tag(0, TAGVIRTUAL), tag(1, TAGVIRTUAL), 0, 2, 1, tag(3, TAGINT), tag(0, TAGVIRTUAL), tag(0, TAGBOX), tag(3, TAGINT) ] + base + at given(boxlists) +def test_ResumeDataLoopMemo_random(lst): + s = TopSnapshot(None, [], lst) + frameinfo = FrameInfo(None, FakeJitCode("foo", 0), 0) + memo = ResumeDataLoopMemo(FakeMetaInterpStaticData()) + num, liveboxes, v = memo.number(FakeOptimizer(), s, frameinfo) + l = unpack_numbering(num) + assert l[-1] == 0 + assert l[0] == len(lst) + for i, item in enumerate(lst): + v, tag = untag(l[i + 1]) + if tag == TAGBOX: + assert l[i + 1] == liveboxes[item] + elif tag == TAGCONST: + assert memo.consts[v].getint() == item.getint() + elif tag == TAGINT: + assert v == item.getint() + def test_ResumeDataLoopMemo_number_boxes(): memo = ResumeDataLoopMemo(FakeMetaInterpStaticData()) b1, b2 = [InputArgInt(), InputArgInt()] @@ -1062,10 +1083,11 @@ storage = Storage() snapshot = Snapshot(None, [b1, ConstInt(1), b1, b2]) snapshot = Snapshot(snapshot, [ConstInt(2), ConstInt(3)]) - snapshot = Snapshot(snapshot, [b1, b2, b3]) - frameinfo = FrameInfo(FrameInfo(None, FakeJitCode("code1", 21), 22), - FakeJitCode("code2", 31), 32) - storage.rd_snapshot = snapshot + snapshot = Snapshot(snapshot, [b1, b2, b3]) + top_snapshot = TopSnapshot(snapshot, [], []) + frameinfo = FrameInfo(FrameInfo(FrameInfo(None, FakeJitCode("code1", 21), 22), + FakeJitCode("code2", 31), 32), FakeJitCode("code3", 41), 42) + storage.rd_snapshot = top_snapshot storage.rd_frame_info_list = frameinfo return storage @@ -1078,6 +1100,8 @@ assert storage.rd_snapshot is None cpu = MyCPU([]) reader = ResumeDataDirectReader(MyMetaInterp(cpu), storage, "deadframe") + reader.consume_vref_and_vable(None, None, None) + reader.cur_index += 2 # framestack _next_section(reader, sys.maxint, 2**16, -65) reader.cur_index += 2 # framestack _next_section(reader, 2, 3) From pypy.commits at gmail.com Sun Feb 21 16:37:39 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 21 Feb 2016 13:37:39 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: update the values Message-ID: <56ca2e23.463f1c0a.bfaac.2f66@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r704:2f40d6406537 Date: 2016-02-21 22:37 +0100 http://bitbucket.org/pypy/pypy.org/changeset/2f40d6406537/ Log: update the values diff --git a/don1.html b/don1.html --- a/don1.html +++ b/don1.html @@ -15,7 +15,7 @@ - $62898 of $105000 (59.9%) + $62907 of $105000 (59.9%)
    @@ -23,7 +23,7 @@
  • diff --git a/don4.html b/don4.html --- a/don4.html +++ b/don4.html @@ -17,7 +17,7 @@ 2nd call: - $30383 of $80000 (38.0%) + $30393 of $80000 (38.0%)
    @@ -25,7 +25,7 @@
  • From pypy.commits at gmail.com Sun Feb 21 16:48:46 2016 From: pypy.commits at gmail.com (cfbolz) Date: Sun, 21 Feb 2016 13:48:46 -0800 (PST) Subject: [pypy-commit] pypy default: (fijal, cfbolz): kill the promote_string in typeobject, it leads to huge Message-ID: <56ca30be.e83cc20a.b6d39.ffff9fbe@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82375:40f823984bbf Date: 2016-02-21 22:47 +0100 http://bitbucket.org/pypy/pypy/changeset/40f823984bbf/ Log: (fijal, cfbolz): kill the promote_string in typeobject, it leads to huge problems in code that uses getattr a lot (and doesn't improve a lot of cases either) diff --git a/pypy/objspace/std/typeobject.py b/pypy/objspace/std/typeobject.py --- a/pypy/objspace/std/typeobject.py +++ b/pypy/objspace/std/typeobject.py @@ -7,7 +7,7 @@ from pypy.interpreter.astcompiler.misc import mangle from rpython.rlib.jit import (promote, elidable_promote, we_are_jitted, - promote_string, elidable, dont_look_inside, unroll_safe) + elidable, dont_look_inside, unroll_safe) from rpython.rlib.objectmodel import current_object_addr_as_int, compute_hash from rpython.rlib.rarithmetic import intmask, r_uint @@ -402,7 +402,6 @@ if version_tag is None: tup = w_self._lookup_where(name) return tup - name = promote_string(name) tup_w = w_self._pure_lookup_where_with_method_cache(name, version_tag) w_class, w_value = tup_w if (space.config.objspace.std.withtypeversion and From pypy.commits at gmail.com Sun Feb 21 16:52:29 2016 From: pypy.commits at gmail.com (plan_rich) Date: Sun, 21 Feb 2016 13:52:29 -0800 (PST) Subject: [pypy-commit] pypy default: (mjacob, plan_rich) universal newlines enforced in the tokenizer. the compile builtin must convert crlf and cr to line feeds. py3.3 import mechanism relies on that Message-ID: <56ca319d.c1b3c20a.9ea7d.ffff8884@mx.google.com> Author: Richard Plangger Branch: Changeset: r82376:eff2a2c4481f Date: 2016-02-21 22:50 +0100 http://bitbucket.org/pypy/pypy/changeset/eff2a2c4481f/ Log: (mjacob, plan_rich) universal newlines enforced in the tokenizer. the compile builtin must convert crlf and cr to line feeds. py3.3 import mechanism relies on that diff --git a/pypy/interpreter/pyparser/pytokenizer.py b/pypy/interpreter/pyparser/pytokenizer.py --- a/pypy/interpreter/pyparser/pytokenizer.py +++ b/pypy/interpreter/pyparser/pytokenizer.py @@ -91,6 +91,7 @@ strstart = (0, 0, "") for line in lines: lnum = lnum + 1 + line = universal_newline(line) pos, max = 0, len(line) if contstr: @@ -259,3 +260,15 @@ token_list.append((tokens.ENDMARKER, '', lnum, pos, line)) return token_list + +def universal_newline(line): + if len(line) >= 2: + c0 = line[-2] + c1 = line[-1] + if c0 == '\r' and c1 == '\n': + return line[:-2] + '\n' + if len(line) >= 1: + c = line[-1] + if c == '\r': + return line[:-1] + '\n' + return line diff --git a/pypy/interpreter/pyparser/test/test_pyparse.py b/pypy/interpreter/pyparser/test/test_pyparse.py --- a/pypy/interpreter/pyparser/test/test_pyparse.py +++ b/pypy/interpreter/pyparser/test/test_pyparse.py @@ -158,3 +158,10 @@ def test_print_function(self): self.parse("from __future__ import print_function\nx = print\n") + + def test_universal_newlines(self): + fmt = 'stuff = """hello%sworld"""' + expected_tree = self.parse(fmt % '\n') + for linefeed in ["\r\n","\r"]: + tree = self.parse(fmt % linefeed) + assert expected_tree == tree From pypy.commits at gmail.com Sun Feb 21 17:06:04 2016 From: pypy.commits at gmail.com (plan_rich) Date: Sun, 21 Feb 2016 14:06:04 -0800 (PST) Subject: [pypy-commit] pypy py3k: (mjacob, plan_rich) merge default into py3k Message-ID: <56ca34cc.88c8c20a.c0cce.7e09@mx.google.com> Author: Richard Plangger Branch: py3k Changeset: r82377:c87ebc4db701 Date: 2016-02-21 23:05 +0100 http://bitbucket.org/pypy/pypy/changeset/c87ebc4db701/ Log: (mjacob, plan_rich) merge default into py3k diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -22,6 +22,7 @@ ^pypy/module/cpyext/test/.+\.obj$ ^pypy/module/cpyext/test/.+\.manifest$ ^pypy/module/test_lib_pypy/ctypes_tests/.+\.o$ +^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^pypy/module/cppyy/src/.+\.o$ ^pypy/module/cppyy/bench/.+\.so$ ^pypy/module/cppyy/bench/.+\.root$ @@ -35,7 +36,6 @@ ^pypy/module/test_lib_pypy/cffi_tests/__pycache__.+$ ^pypy/doc/.+\.html$ ^pypy/doc/config/.+\.rst$ -^pypy/doc/basicblock\.asc$ ^pypy/doc/.+\.svninfo$ ^rpython/translator/c/src/libffi_msvc/.+\.obj$ ^rpython/translator/c/src/libffi_msvc/.+\.dll$ @@ -45,53 +45,33 @@ ^rpython/translator/c/src/cjkcodecs/.+\.obj$ ^rpython/translator/c/src/stacklet/.+\.o$ ^rpython/translator/c/src/.+\.o$ -^rpython/translator/jvm/\.project$ -^rpython/translator/jvm/\.classpath$ -^rpython/translator/jvm/eclipse-bin$ -^rpython/translator/jvm/src/pypy/.+\.class$ -^rpython/translator/benchmark/docutils$ -^rpython/translator/benchmark/templess$ -^rpython/translator/benchmark/gadfly$ -^rpython/translator/benchmark/mako$ -^rpython/translator/benchmark/bench-custom\.benchmark_result$ -^rpython/translator/benchmark/shootout_benchmarks$ +^rpython/translator/llvm/.+\.so$ ^rpython/translator/goal/target.+-c$ ^rpython/translator/goal/.+\.exe$ ^rpython/translator/goal/.+\.dll$ ^pypy/goal/pypy-translation-snapshot$ ^pypy/goal/pypy-c -^pypy/goal/pypy-jvm -^pypy/goal/pypy-jvm.jar ^pypy/goal/.+\.exe$ ^pypy/goal/.+\.dll$ ^pypy/goal/.+\.lib$ ^pypy/_cache$ -^pypy/doc/statistic/.+\.html$ -^pypy/doc/statistic/.+\.eps$ -^pypy/doc/statistic/.+\.pdf$ -^rpython/translator/cli/src/pypylib\.dll$ -^rpython/translator/cli/src/query\.exe$ -^rpython/translator/cli/src/main\.exe$ +^lib-python/2.7/lib2to3/.+\.pickle$ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ ^lib_pypy/_libmpdec/.+.o$ -^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ ^include/.+\.inl$ ^pypy/doc/_build/.*$ ^pypy/doc/config/.+\.html$ ^pypy/doc/config/style\.css$ -^pypy/doc/jit/.+\.html$ -^pypy/doc/jit/style\.css$ ^pypy/doc/image/lattice1\.png$ ^pypy/doc/image/lattice2\.png$ ^pypy/doc/image/lattice3\.png$ ^pypy/doc/image/stackless_informal\.png$ ^pypy/doc/image/parsing_example.+\.png$ ^rpython/doc/_build/.*$ -^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^compiled ^.git/ ^release/ diff --git a/pypy/interpreter/pyparser/pytokenizer.py b/pypy/interpreter/pyparser/pytokenizer.py --- a/pypy/interpreter/pyparser/pytokenizer.py +++ b/pypy/interpreter/pyparser/pytokenizer.py @@ -106,6 +106,7 @@ strstart = (0, 0, "") for line in lines: lnum = lnum + 1 + line = universal_newline(line) pos, max = 0, len(line) if contstr: @@ -296,3 +297,15 @@ token_list.append((tokens.ENDMARKER, '', lnum, pos, line)) return token_list + +def universal_newline(line): + if len(line) >= 2: + c0 = line[-2] + c1 = line[-1] + if c0 == '\r' and c1 == '\n': + return line[:-2] + '\n' + if len(line) >= 1: + c = line[-1] + if c == '\r': + return line[:-1] + '\n' + return line diff --git a/pypy/interpreter/pyparser/test/test_pyparse.py b/pypy/interpreter/pyparser/test/test_pyparse.py --- a/pypy/interpreter/pyparser/test/test_pyparse.py +++ b/pypy/interpreter/pyparser/test/test_pyparse.py @@ -139,6 +139,13 @@ def test_print_function(self): self.parse("from __future__ import print_function\nx = print\n") + def test_universal_newlines(self): + fmt = 'stuff = """hello%sworld"""' + expected_tree = self.parse(fmt % '\n') + for linefeed in ["\r\n","\r"]: + tree = self.parse(fmt % linefeed) + assert expected_tree == tree + def test_py3k_reject_old_binary_literal(self): py.test.raises(SyntaxError, self.parse, '0777') diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -156,20 +156,6 @@ get_unique_interplevel_subclass._annspecialcase_ = "specialize:memo" _subclass_cache = {} -def enum_interplevel_subclasses(config, cls): - """Return a list of all the extra interp-level subclasses of 'cls' that - can be built by get_unique_interplevel_subclass().""" - result = [] - for flag1 in (False, True): - for flag2 in (False, True): - for flag3 in (False, True): - for flag4 in (False, True): - result.append(get_unique_interplevel_subclass( - config, cls, flag1, flag2, flag3, flag4)) - result = dict.fromkeys(result) - assert len(result) <= 6 - return result.keys() - def _getusercls(config, cls, wants_dict, wants_slots, wants_del, weakrefable): typedef = cls.typedef if wants_dict and typedef.hasdict: diff --git a/pypy/module/micronumpy/compile.py b/pypy/module/micronumpy/compile.py --- a/pypy/module/micronumpy/compile.py +++ b/pypy/module/micronumpy/compile.py @@ -196,6 +196,10 @@ def newfloat(self, f): return self.float(f) + def newslice(self, start, stop, step): + return SliceObject(self.int_w(start), self.int_w(stop), + self.int_w(step)) + def le(self, w_obj1, w_obj2): assert isinstance(w_obj1, boxes.W_GenericBox) assert isinstance(w_obj2, boxes.W_GenericBox) diff --git a/pypy/module/micronumpy/concrete.py b/pypy/module/micronumpy/concrete.py --- a/pypy/module/micronumpy/concrete.py +++ b/pypy/module/micronumpy/concrete.py @@ -12,8 +12,8 @@ ArrayArgumentException, W_NumpyObject from pypy.module.micronumpy.iterators import ArrayIter from pypy.module.micronumpy.strides import ( - IntegerChunk, SliceChunk, NewAxisChunk, EllipsisChunk, new_view, - calc_strides, calc_new_strides, shape_agreement, + IntegerChunk, SliceChunk, NewAxisChunk, EllipsisChunk, BooleanChunk, + new_view, calc_strides, calc_new_strides, shape_agreement, calculate_broadcast_strides, calc_backstrides, calc_start, is_c_contiguous, is_f_contiguous) from rpython.rlib.objectmodel import keepalive_until_here @@ -236,6 +236,7 @@ @jit.unroll_safe def _prepare_slice_args(self, space, w_idx): + from pypy.module.micronumpy import boxes if space.isinstance_w(w_idx, space.w_str): raise oefmt(space.w_IndexError, "only integers, slices (`:`), " "ellipsis (`...`), numpy.newaxis (`None`) and integer or " @@ -258,6 +259,7 @@ result = [] i = 0 has_ellipsis = False + has_filter = False for w_item in space.fixedview(w_idx): if space.is_w(w_item, space.w_Ellipsis): if has_ellipsis: @@ -272,6 +274,16 @@ elif space.isinstance_w(w_item, space.w_slice): result.append(SliceChunk(w_item)) i += 1 + elif isinstance(w_item, W_NDimArray) and w_item.get_dtype().is_bool(): + if has_filter: + # in CNumPy, the support for this is incomplete + raise oefmt(space.w_ValueError, + "an index can only have a single boolean mask; " + "use np.take or create a sinlge mask array") + has_filter = True + result.append(BooleanChunk(w_item)) + elif isinstance(w_item, boxes.W_GenericBox): + result.append(IntegerChunk(w_item.descr_int(space))) else: result.append(IntegerChunk(w_item)) i += 1 diff --git a/pypy/module/micronumpy/ndarray.py b/pypy/module/micronumpy/ndarray.py --- a/pypy/module/micronumpy/ndarray.py +++ b/pypy/module/micronumpy/ndarray.py @@ -107,8 +107,9 @@ arr = W_NDimArray(self.implementation.transpose(self, None)) return space.wrap(loop.tostring(space, arr)) - def getitem_filter(self, space, arr): - if arr.ndims() > 1 and arr.get_shape() != self.get_shape(): + def getitem_filter(self, space, arr, axis=0): + shape = self.get_shape() + if arr.ndims() > 1 and arr.get_shape() != shape: raise OperationError(space.w_IndexError, space.wrap( "boolean index array should have 1 dimension")) if arr.get_size() > self.get_size(): @@ -116,14 +117,14 @@ "index out of range for array")) size = loop.count_all_true(arr) if arr.ndims() == 1: - if self.ndims() > 1 and arr.get_shape()[0] != self.get_shape()[0]: + if self.ndims() > 1 and arr.get_shape()[0] != shape[axis]: msg = ("boolean index did not match indexed array along" - " dimension 0; dimension is %d but corresponding" - " boolean dimension is %d" % (self.get_shape()[0], + " dimension %d; dimension is %d but corresponding" + " boolean dimension is %d" % (axis, shape[axis], arr.get_shape()[0])) #warning = space.gettypefor(support.W_VisibleDeprecationWarning) space.warn(space.wrap(msg), space.w_VisibleDeprecationWarning) - res_shape = [size] + self.get_shape()[1:] + res_shape = shape[:axis] + [size] + shape[axis+1:] else: res_shape = [size] w_res = W_NDimArray.from_shape(space, res_shape, self.get_dtype(), @@ -149,6 +150,8 @@ def _prepare_array_index(self, space, w_index): if isinstance(w_index, W_NDimArray): return [], w_index.get_shape(), w_index.get_shape(), [w_index] + if isinstance(w_index, boxes.W_GenericBox): + return [], [1], [1], [w_index] w_lst = space.listview(w_index) for w_item in w_lst: if not (space.isinstance_w(w_item, space.w_int) or space.isinstance_w(w_item, space.w_float)): @@ -162,7 +165,14 @@ arr_index_in_shape = False prefix = [] for i, w_item in enumerate(w_lst): - if (isinstance(w_item, W_NDimArray) or + if isinstance(w_item, W_NDimArray) and w_item.get_dtype().is_bool(): + if w_item.ndims() > 0: + indexes_w[i] = w_item + else: + raise oefmt(space.w_IndexError, + "in the future, 0-d boolean arrays will be " + "interpreted as a valid boolean index") + elif (isinstance(w_item, W_NDimArray) or space.isinstance_w(w_item, space.w_list)): w_item = convert_to_array(space, w_item) if shape is None: @@ -232,6 +242,8 @@ raise oefmt(space.w_IndexError, "in the future, 0-d boolean arrays will be " "interpreted as a valid boolean index") + elif isinstance(w_idx, boxes.W_GenericBox): + w_ret = self.getitem_array_int(space, w_idx) else: try: w_ret = self.implementation.descr_getitem(space, self, w_idx) diff --git a/pypy/module/micronumpy/strides.py b/pypy/module/micronumpy/strides.py --- a/pypy/module/micronumpy/strides.py +++ b/pypy/module/micronumpy/strides.py @@ -77,14 +77,42 @@ backstride = base_stride * max(0, base_length - 1) return 0, base_length, base_stride, backstride +class BooleanChunk(BaseChunk): + input_dim = 1 + out_dim = 1 + def __init__(self, w_idx): + self.w_idx = w_idx + + def compute(self, space, base_length, base_stride): + raise oefmt(space.w_NotImplementedError, 'cannot reach') def new_view(space, w_arr, chunks): arr = w_arr.implementation - r = calculate_slice_strides(space, arr.shape, arr.start, arr.get_strides(), - arr.get_backstrides(), chunks) + dim = -1 + for i, c in enumerate(chunks): + if isinstance(c, BooleanChunk): + dim = i + break + if dim >= 0: + # filter by axis dim + filtr = chunks[dim] + assert isinstance(filtr, BooleanChunk) + w_arr = w_arr.getitem_filter(space, filtr.w_idx, axis=dim) + arr = w_arr.implementation + chunks[dim] = SliceChunk(space.newslice(space.wrap(0), + space.wrap(-1), space.w_None)) + r = calculate_slice_strides(space, arr.shape, arr.start, + arr.get_strides(), arr.get_backstrides(), chunks) + else: + r = calculate_slice_strides(space, arr.shape, arr.start, + arr.get_strides(), arr.get_backstrides(), chunks) shape, start, strides, backstrides = r - return W_NDimArray.new_slice(space, start, strides[:], backstrides[:], + w_ret = W_NDimArray.new_slice(space, start, strides[:], backstrides[:], shape[:], arr, w_arr) + if dim == 0: + # Do not return a view + return w_ret.descr_copy(space, space.wrap(w_ret.get_order())) + return w_ret @jit.unroll_safe def _extend_shape(old_shape, chunks): @@ -127,7 +155,7 @@ jit.isconstant(len(chunks))) def calculate_slice_strides(space, shape, start, strides, backstrides, chunks): """ - Note: `chunks` must contain exactly one EllipsisChunk object. + Note: `chunks` can contain at most one EllipsisChunk object. """ size = 0 used_dims = 0 diff --git a/pypy/module/micronumpy/test/test_deprecations.py b/pypy/module/micronumpy/test/test_deprecations.py --- a/pypy/module/micronumpy/test/test_deprecations.py +++ b/pypy/module/micronumpy/test/test_deprecations.py @@ -24,7 +24,7 @@ # boolean indexing matches the dims in index # to the first index.ndims in arr, not implemented in pypy yet raises(IndexError, arr.__getitem__, index) - raises(TypeError, arr.__getitem__, (slice(None), index)) + raises(IndexError, arr.__getitem__, (slice(None), index)) else: raises(np.VisibleDeprecationWarning, arr.__getitem__, index) raises(np.VisibleDeprecationWarning, arr.__getitem__, (slice(None), index)) diff --git a/pypy/module/micronumpy/test/test_ndarray.py b/pypy/module/micronumpy/test/test_ndarray.py --- a/pypy/module/micronumpy/test/test_ndarray.py +++ b/pypy/module/micronumpy/test/test_ndarray.py @@ -2532,6 +2532,25 @@ a[b] = np.array([[4.]]) assert (a == [[4., 4., 4.]]).all() + def test_indexing_by_boolean(self): + import numpy as np + a = np.arange(6).reshape(2,3) + assert (a[[True, False], :] == [[3, 4, 5], [0, 1, 2]]).all() + b = a[np.array([True, False]), :] + assert (b == [[0, 1, 2]]).all() + assert b.base is None + b = a[:, np.array([True, False, True])] + assert b.base is not None + b = a[np.array([True, False]), 0] + assert (b ==[0]).all() + + def test_scalar_indexing(self): + import numpy as np + a = np.arange(6).reshape(2,3) + i = np.dtype('int32').type(0) + assert (a[0] == a[i]).all() + + def test_ellipsis_indexing(self): import numpy as np import sys diff --git a/pypy/objspace/std/typeobject.py b/pypy/objspace/std/typeobject.py --- a/pypy/objspace/std/typeobject.py +++ b/pypy/objspace/std/typeobject.py @@ -7,7 +7,7 @@ from pypy.interpreter.astcompiler.misc import mangle from rpython.rlib.jit import (promote, elidable_promote, we_are_jitted, - promote_string, elidable, dont_look_inside, unroll_safe) + elidable, dont_look_inside, unroll_safe) from rpython.rlib.objectmodel import current_object_addr_as_int, compute_hash from rpython.rlib.rarithmetic import intmask, r_uint @@ -400,7 +400,6 @@ if version_tag is None: tup = w_self._lookup_where(name) return tup - name = promote_string(name) tup_w = w_self._pure_lookup_where_with_method_cache(name, version_tag) w_class, w_value = tup_w if (space.config.objspace.std.withtypeversion and diff --git a/rpython/jit/codewriter/support.py b/rpython/jit/codewriter/support.py --- a/rpython/jit/codewriter/support.py +++ b/rpython/jit/codewriter/support.py @@ -246,12 +246,12 @@ def _ll_2_int_floordiv_ovf_zer(x, y): if y == 0: raise ZeroDivisionError - if x == -sys.maxint - 1 and y == -1: - raise OverflowError - return llop.int_floordiv(lltype.Signed, x, y) + return _ll_2_int_floordiv_ovf(x, y) def _ll_2_int_floordiv_ovf(x, y): - if x == -sys.maxint - 1 and y == -1: + # intentionally not short-circuited to produce only one guard + # and to remove the check fully if one of the arguments is known + if (x == -sys.maxint - 1) & (y == -1): raise OverflowError return llop.int_floordiv(lltype.Signed, x, y) @@ -263,12 +263,11 @@ def _ll_2_int_mod_ovf_zer(x, y): if y == 0: raise ZeroDivisionError - if x == -sys.maxint - 1 and y == -1: - raise OverflowError - return llop.int_mod(lltype.Signed, x, y) + return _ll_2_int_mod_ovf(x, y) def _ll_2_int_mod_ovf(x, y): - if x == -sys.maxint - 1 and y == -1: + #see comment in _ll_2_int_floordiv_ovf + if (x == -sys.maxint - 1) & (y == -1): raise OverflowError return llop.int_mod(lltype.Signed, x, y) diff --git a/rpython/jit/metainterp/test/test_ajit.py b/rpython/jit/metainterp/test/test_ajit.py --- a/rpython/jit/metainterp/test/test_ajit.py +++ b/rpython/jit/metainterp/test/test_ajit.py @@ -1199,6 +1199,31 @@ (-sys.maxint-1) // (-6) + 100 * 8) + def test_overflow_fold_if_divisor_constant(self): + import sys + from rpython.rtyper.lltypesystem.lloperation import llop + myjitdriver = JitDriver(greens = [], reds = ['x', 'y', 'res']) + def f(x, y): + res = 0 + while y > 0: + myjitdriver.can_enter_jit(x=x, y=y, res=res) + myjitdriver.jit_merge_point(x=x, y=y, res=res) + try: + res += llop.int_floordiv_ovf(lltype.Signed, + x, 2) + res += llop.int_mod_ovf(lltype.Signed, + x, 2) + x += 5 + except OverflowError: + res += 100 + y -= 1 + return res + res = self.meta_interp(f, [-41, 8]) + # the guard_true are for the loop condition + # the guard_false needed to check whether an overflow can occur have + # been folded away + self.check_resops(guard_true=2, guard_false=0) + def test_isinstance(self): class A: pass diff --git a/rpython/jit/metainterp/test/test_tlc.py b/rpython/jit/metainterp/test/test_tlc.py --- a/rpython/jit/metainterp/test/test_tlc.py +++ b/rpython/jit/metainterp/test/test_tlc.py @@ -1,5 +1,4 @@ import py -from rpython.rtyper.module.support import LLSupport from rpython.jit.tl import tlc diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py --- a/rpython/rlib/rposix.py +++ b/rpython/rlib/rposix.py @@ -3,7 +3,6 @@ import errno from rpython.rtyper.lltypesystem.rffi import CConstant, CExternVariable, INT from rpython.rtyper.lltypesystem import lltype, ll2ctypes, rffi -from rpython.rtyper.module.support import StringTraits, UnicodeTraits from rpython.rtyper.tool import rffi_platform from rpython.tool.sourcetools import func_renamer from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -12,7 +11,7 @@ specialize, enforceargs, register_replacement_for, NOT_CONSTANT) from rpython.rlib.signature import signature from rpython.rlib import types -from rpython.annotator.model import s_Str0 +from rpython.annotator.model import s_Str0, s_Unicode0 from rpython.rlib import jit from rpython.translator.platform import platform from rpython.rlib import rstring @@ -342,6 +341,87 @@ rstring.check_str0(res) return res + +class StringTraits: + str = str + str0 = s_Str0 + CHAR = rffi.CHAR + CCHARP = rffi.CCHARP + charp2str = staticmethod(rffi.charp2str) + charpsize2str = staticmethod(rffi.charpsize2str) + scoped_str2charp = staticmethod(rffi.scoped_str2charp) + str2charp = staticmethod(rffi.str2charp) + free_charp = staticmethod(rffi.free_charp) + scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_buffer) + + @staticmethod + def posix_function_name(name): + return UNDERSCORE_ON_WIN32 + name + + @staticmethod + def ll_os_name(name): + return 'll_os.ll_os_' + name + + @staticmethod + @specialize.argtype(0) + def as_str(path): + assert path is not None + if isinstance(path, str): + return path + elif isinstance(path, unicode): + # This never happens in PyPy's Python interpreter! + # Only in raw RPython code that uses unicode strings. + # We implement python2 behavior: silently convert to ascii. + return path.encode('ascii') + else: + return path.as_bytes() + + @staticmethod + @specialize.argtype(0) + def as_str0(path): + res = StringTraits.as_str(path) + rstring.check_str0(res) + return res + + +class UnicodeTraits: + str = unicode + str0 = s_Unicode0 + CHAR = rffi.WCHAR_T + CCHARP = rffi.CWCHARP + charp2str = staticmethod(rffi.wcharp2unicode) + charpsize2str = staticmethod(rffi.wcharpsize2unicode) + str2charp = staticmethod(rffi.unicode2wcharp) + scoped_str2charp = staticmethod(rffi.scoped_unicode2wcharp) + free_charp = staticmethod(rffi.free_wcharp) + scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_unicodebuffer) + + @staticmethod + def posix_function_name(name): + return UNDERSCORE_ON_WIN32 + 'w' + name + + @staticmethod + @specialize.argtype(0) + def ll_os_name(name): + return 'll_os.ll_os_w' + name + + @staticmethod + @specialize.argtype(0) + def as_str(path): + assert path is not None + if isinstance(path, unicode): + return path + else: + return path.as_unicode() + + @staticmethod + @specialize.argtype(0) + def as_str0(path): + res = UnicodeTraits.as_str(path) + rstring.check_str0(res) + return res + + # Returns True when the unicode function should be called: # - on Windows # - if the path is Unicode. diff --git a/rpython/rlib/rposix_environ.py b/rpython/rlib/rposix_environ.py --- a/rpython/rlib/rposix_environ.py +++ b/rpython/rlib/rposix_environ.py @@ -2,10 +2,10 @@ import sys from rpython.annotator import model as annmodel from rpython.rlib.objectmodel import enforceargs +from rpython.rlib.rposix import _WIN32, StringTraits, UnicodeTraits from rpython.rtyper.controllerentry import Controller from rpython.rtyper.extfunc import register_external from rpython.rtyper.lltypesystem import rffi, lltype -from rpython.rtyper.module.support import _WIN32, StringTraits, UnicodeTraits from rpython.translator.tool.cbuild import ExternalCompilationInfo str0 = annmodel.s_Str0 diff --git a/rpython/rtyper/module/test/test_posix.py b/rpython/rlib/test/test_posix.py rename from rpython/rtyper/module/test/test_posix.py rename to rpython/rlib/test/test_posix.py diff --git a/rpython/rlib/test/test_rerased.py b/rpython/rlib/test/test_rerased.py --- a/rpython/rlib/test/test_rerased.py +++ b/rpython/rlib/test/test_rerased.py @@ -192,7 +192,7 @@ def interpret(self, *args, **kwargs): kwargs["taggedpointers"] = True - return BaseRtypingTest.interpret(self, *args, **kwargs) + return BaseRtypingTest.interpret(*args, **kwargs) def test_rtype_1(self): def f(): return eraseX(X()) diff --git a/rpython/rtyper/lltypesystem/ll2ctypes.py b/rpython/rtyper/lltypesystem/ll2ctypes.py --- a/rpython/rtyper/lltypesystem/ll2ctypes.py +++ b/rpython/rtyper/lltypesystem/ll2ctypes.py @@ -463,6 +463,9 @@ def remove_regular_struct_content(container): STRUCT = container._TYPE + if isinstance(STRUCT, lltype.FixedSizeArray): + del container._items + return for field_name in STRUCT._names: FIELDTYPE = getattr(STRUCT, field_name) if not isinstance(FIELDTYPE, lltype.ContainerType): @@ -642,6 +645,12 @@ cobj = lltype2ctypes(value) setattr(self._storage.contents, field_name, cobj) + def getitem(self, index, uninitialized_ok=False): + return getattr(self, "item%s" % index) + + def setitem(self, index, value): + setattr(self, "item%s" % index, value) + class _array_mixin(_parentable_mixin): """Mixin added to _array containers when they become ctypes-based.""" __slots__ = () diff --git a/rpython/rtyper/lltypesystem/lltype.py b/rpython/rtyper/lltypesystem/lltype.py --- a/rpython/rtyper/lltypesystem/lltype.py +++ b/rpython/rtyper/lltypesystem/lltype.py @@ -1761,7 +1761,10 @@ def __new__(self, TYPE, n=None, initialization=None, parent=None, parentindex=None): - my_variety = _struct_variety(TYPE._names) + if isinstance(TYPE, FixedSizeArray): + my_variety = _fixedsizearray + else: + my_variety = _struct_variety(TYPE._names) return object.__new__(my_variety) def __init__(self, TYPE, n=None, initialization=None, parent=None, @@ -1771,7 +1774,6 @@ raise TypeError("%r is not variable-sized" % (TYPE,)) if n is None and TYPE._arrayfld is not None: raise TypeError("%r is variable-sized" % (TYPE,)) - first, FIRSTTYPE = TYPE._first_struct() for fld, typ in TYPE._flds.items(): if fld == TYPE._arrayfld: value = _array(typ, n, initialization=initialization, @@ -1814,23 +1816,48 @@ raise UninitializedMemoryAccess("%r.%s"%(self, field_name)) return r - # for FixedSizeArray kind of structs: + +class _fixedsizearray(_struct): + def __init__(self, TYPE, n=None, initialization=None, parent=None, + parentindex=None): + _parentable.__init__(self, TYPE) + if n is not None: + raise TypeError("%r is not variable-sized" % (TYPE,)) + typ = TYPE.OF + storage = [] + for i, fld in enumerate(TYPE._names): + value = typ._allocate(initialization=initialization, + parent=self, parentindex=fld) + storage.append(value) + self._items = storage + if parent is not None: + self._setparentstructure(parent, parentindex) def getlength(self): - assert isinstance(self._TYPE, FixedSizeArray) return self._TYPE.length def getbounds(self): return 0, self.getlength() def getitem(self, index, uninitialized_ok=False): - assert isinstance(self._TYPE, FixedSizeArray) - return self._getattr('item%d' % index, uninitialized_ok) + assert 0 <= index < self.getlength() + return self._items[index] def setitem(self, index, value): - assert isinstance(self._TYPE, FixedSizeArray) - setattr(self, 'item%d' % index, value) + assert 0 <= index < self.getlength() + self._items[index] = value + def __getattr__(self, name): + # obscure + if name.startswith("item"): + return self.getitem(int(name[len('item'):])) + return _struct.__getattr__(self, name) + + def __setattr__(self, name, value): + if name.startswith("item"): + self.setitem(int(name[len('item'):]), value) + return + _struct.__setattr__(self, name, value) class _array(_parentable): _kind = "array" diff --git a/rpython/rtyper/lltypesystem/module/ll_math.py b/rpython/rtyper/lltypesystem/module/ll_math.py --- a/rpython/rtyper/lltypesystem/module/ll_math.py +++ b/rpython/rtyper/lltypesystem/module/ll_math.py @@ -6,8 +6,8 @@ from rpython.translator import cdir from rpython.rlib import jit, rposix from rpython.rlib.rfloat import INFINITY, NAN, isfinite, isinf, isnan +from rpython.rlib.rposix import UNDERSCORE_ON_WIN32 from rpython.rtyper.lltypesystem import lltype, rffi -from rpython.rtyper.module.support import UNDERSCORE_ON_WIN32 from rpython.tool.sourcetools import func_with_new_name from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.translator.platform import platform diff --git a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py --- a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py +++ b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py @@ -11,12 +11,12 @@ from rpython.rtyper.lltypesystem.ll2ctypes import _llgcopaque from rpython.rtyper.annlowlevel import llhelper from rpython.rlib import rposix +from rpython.rlib.rposix import UNDERSCORE_ON_WIN32 from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.translator import cdir from rpython.tool.udir import udir from rpython.rtyper.test.test_llinterp import interpret from rpython.annotator.annrpython import RPythonAnnotator -from rpython.rtyper.module.support import UNDERSCORE_ON_WIN32 from rpython.rtyper.rtyper import RPythonTyper from rpython.rlib.rarithmetic import r_uint, get_long_pattern, is_emulated_long from rpython.rlib.rarithmetic import is_valid_int diff --git a/rpython/rtyper/module/__init__.py b/rpython/rtyper/module/__init__.py deleted file mode 100644 --- a/rpython/rtyper/module/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/rpython/rtyper/module/support.py b/rpython/rtyper/module/support.py deleted file mode 100644 --- a/rpython/rtyper/module/support.py +++ /dev/null @@ -1,139 +0,0 @@ -import sys - -from rpython.annotator import model as annmodel -from rpython.rtyper.lltypesystem import lltype, rffi -from rpython.rlib.objectmodel import specialize -from rpython.rlib import rstring - -_WIN32 = sys.platform.startswith('win') -UNDERSCORE_ON_WIN32 = '_' if _WIN32 else '' - -# utility conversion functions -class LLSupport: - _mixin_ = True - - def to_rstr(s): - from rpython.rtyper.lltypesystem.rstr import STR, mallocstr - if s is None: - return lltype.nullptr(STR) - p = mallocstr(len(s)) - for i in range(len(s)): - p.chars[i] = s[i] - return p - to_rstr = staticmethod(to_rstr) - - def to_runicode(s): - from rpython.rtyper.lltypesystem.rstr import UNICODE, mallocunicode - if s is None: - return lltype.nullptr(UNICODE) - p = mallocunicode(len(s)) - for i in range(len(s)): - p.chars[i] = s[i] - return p - to_runicode = staticmethod(to_runicode) - - def from_rstr(rs): - if not rs: # null pointer - return None - else: - return ''.join([rs.chars[i] for i in range(len(rs.chars))]) - from_rstr = staticmethod(from_rstr) - - def from_rstr_nonnull(rs): - assert rs - return ''.join([rs.chars[i] for i in range(len(rs.chars))]) - from_rstr_nonnull = staticmethod(from_rstr_nonnull) - - -class StringTraits: - str = str - str0 = annmodel.s_Str0 - CHAR = rffi.CHAR - CCHARP = rffi.CCHARP - charp2str = staticmethod(rffi.charp2str) - charpsize2str = staticmethod(rffi.charpsize2str) - scoped_str2charp = staticmethod(rffi.scoped_str2charp) - str2charp = staticmethod(rffi.str2charp) - free_charp = staticmethod(rffi.free_charp) - scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_buffer) - - @staticmethod - def posix_function_name(name): - return UNDERSCORE_ON_WIN32 + name - - @staticmethod - def ll_os_name(name): - return 'll_os.ll_os_' + name - - @staticmethod - @specialize.argtype(0) - def as_str(path): - assert path is not None - if isinstance(path, str): - return path - elif isinstance(path, unicode): - # This never happens in PyPy's Python interpreter! - # Only in raw RPython code that uses unicode strings. - # We implement python2 behavior: silently convert to ascii. - return path.encode('ascii') - else: - return path.as_bytes() - - @staticmethod - @specialize.argtype(0) - def as_str0(path): - res = StringTraits.as_str(path) - rstring.check_str0(res) - return res - -class UnicodeTraits: - str = unicode - str0 = annmodel.s_Unicode0 - CHAR = rffi.WCHAR_T - CCHARP = rffi.CWCHARP - charp2str = staticmethod(rffi.wcharp2unicode) - charpsize2str = staticmethod(rffi.wcharpsize2unicode) - str2charp = staticmethod(rffi.unicode2wcharp) - scoped_str2charp = staticmethod(rffi.scoped_unicode2wcharp) - free_charp = staticmethod(rffi.free_wcharp) - scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_unicodebuffer) - - @staticmethod - def posix_function_name(name): - return UNDERSCORE_ON_WIN32 + 'w' + name - - @staticmethod - @specialize.argtype(0) - def ll_os_name(name): - return 'll_os.ll_os_w' + name - - @staticmethod - @specialize.argtype(0) - def as_str(path): - assert path is not None - if isinstance(path, unicode): - return path - else: - return path.as_unicode() - - @staticmethod - @specialize.argtype(0) - def as_str0(path): - res = UnicodeTraits.as_str(path) - rstring.check_str0(res) - return res - -def ll_strcpy(dst_s, src_s, n): - dstchars = dst_s.chars - srcchars = src_s.chars - i = 0 - while i < n: - dstchars[i] = srcchars[i] - i += 1 - -def _ll_strfill(dst_s, srcchars, n): - dstchars = dst_s.chars - i = 0 - while i < n: - dstchars[i] = srcchars[i] - i += 1 diff --git a/rpython/rtyper/module/test/__init__.py b/rpython/rtyper/module/test/__init__.py deleted file mode 100644 --- a/rpython/rtyper/module/test/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/rpython/rtyper/module/test/test_ll_strtod.py b/rpython/rtyper/module/test/test_ll_strtod.py deleted file mode 100644 --- a/rpython/rtyper/module/test/test_ll_strtod.py +++ /dev/null @@ -1,13 +0,0 @@ -import py - -from rpython.rtyper.test.tool import BaseRtypingTest -from rpython.rlib import rfloat - -class TestStrtod(BaseRtypingTest): - def test_formatd(self): - for flags in [0, - rfloat.DTSF_ADD_DOT_0]: - def f(y): - return rfloat.formatd(y, 'g', 2, flags) - - assert self.ll_to_string(self.interpret(f, [3.0])) == f(3.0) diff --git a/rpython/rtyper/test/test_rfloat.py b/rpython/rtyper/test/test_rfloat.py --- a/rpython/rtyper/test/test_rfloat.py +++ b/rpython/rtyper/test/test_rfloat.py @@ -204,6 +204,14 @@ res = self.ll_to_string(self.interpret(f, [10/3.0])) assert res == '3.33' + def test_formatd_g(self): + from rpython.rlib import rfloat + for flags in [0, rfloat.DTSF_ADD_DOT_0]: + def f(y): + return rfloat.formatd(y, 'g', 2, flags) + + assert self.ll_to_string(self.interpret(f, [3.0])) == f(3.0) + def test_formatd_repr(self): from rpython.rlib.rfloat import formatd def f(x): diff --git a/rpython/rtyper/test/test_rpbc.py b/rpython/rtyper/test/test_rpbc.py --- a/rpython/rtyper/test/test_rpbc.py +++ b/rpython/rtyper/test/test_rpbc.py @@ -1945,7 +1945,7 @@ def interpret(self, fn, args, **kwds): kwds['config'] = self.config - return TestRPBC.interpret(self, fn, args, **kwds) + return TestRPBC.interpret(fn, args, **kwds) def test_smallfuncsets_basic(): from rpython.translator.translator import TranslationContext, graphof diff --git a/rpython/rtyper/test/tool.py b/rpython/rtyper/test/tool.py --- a/rpython/rtyper/test/tool.py +++ b/rpython/rtyper/test/tool.py @@ -5,22 +5,27 @@ class BaseRtypingTest(object): FLOAT_PRECISION = 8 - def gengraph(self, func, argtypes=[], viewbefore='auto', policy=None, + @staticmethod + def gengraph(func, argtypes=[], viewbefore='auto', policy=None, backendopt=False, config=None): return gengraph(func, argtypes, viewbefore, policy, backendopt=backendopt, config=config) - def interpret(self, fn, args, **kwds): + @staticmethod + def interpret(fn, args, **kwds): return interpret(fn, args, **kwds) - def interpret_raises(self, exc, fn, args, **kwds): + @staticmethod + def interpret_raises(exc, fn, args, **kwds): return interpret_raises(exc, fn, args, **kwds) - def float_eq(self, x, y): + @staticmethod + def float_eq(x, y): return x == y - def float_eq_approx(self, x, y): - maxError = 10**-self.FLOAT_PRECISION + @classmethod + def float_eq_approx(cls, x, y): + maxError = 10**-cls.FLOAT_PRECISION if abs(x-y) < maxError: return True @@ -31,45 +36,66 @@ return relativeError < maxError - def is_of_type(self, x, type_): + @staticmethod + def is_of_type(x, type_): return type(x) is type_ - def _skip_llinterpreter(self, reason): + @staticmethod + def _skip_llinterpreter(reason): py.test.skip("lltypesystem doesn't support %s, yet" % reason) - def ll_to_string(self, s): + @staticmethod + def ll_to_string(s): if not s: return None return ''.join(s.chars) - def ll_to_unicode(self, s): + @staticmethod + def ll_to_unicode(s): return u''.join(s.chars) - def string_to_ll(self, s): - from rpython.rtyper.module.support import LLSupport - return LLSupport.to_rstr(s) + @staticmethod + def string_to_ll(s): + from rpython.rtyper.lltypesystem.rstr import STR, mallocstr + if s is None: + return lltype.nullptr(STR) + p = mallocstr(len(s)) + for i in range(len(s)): + p.chars[i] = s[i] + return p - def unicode_to_ll(self, s): - from rpython.rtyper.module.support import LLSupport - return LLSupport.to_runicode(s) + @staticmethod + def unicode_to_ll(s): + from rpython.rtyper.lltypesystem.rstr import UNICODE, mallocunicode + if s is None: + return lltype.nullptr(UNICODE) + p = mallocunicode(len(s)) + for i in range(len(s)): + p.chars[i] = s[i] + return p - def ll_to_list(self, l): + @staticmethod + def ll_to_list(l): r = [] items = l.ll_items() for i in range(l.ll_length()): r.append(items[i]) return r - def ll_unpack_tuple(self, t, length): + @staticmethod + def ll_unpack_tuple(t, length): return tuple([getattr(t, 'item%d' % i) for i in range(length)]) - def get_callable(self, fnptr): + @staticmethod + def get_callable(fnptr): return fnptr._obj._callable - def class_name(self, value): + @staticmethod + def class_name(value): return ''.join(value.super.typeptr.name.chars) - def read_attr(self, value, attr_name): + @staticmethod + def read_attr(value, attr_name): value = value._obj while value is not None: attr = getattr(value, "inst_" + attr_name, None) @@ -79,6 +105,7 @@ return attr raise AttributeError() - def is_of_instance_type(self, val): + @staticmethod + def is_of_instance_type(val): T = lltype.typeOf(val) return isinstance(T, lltype.Ptr) and isinstance(T.TO, lltype.GcStruct) From pypy.commits at gmail.com Sun Feb 21 17:07:29 2016 From: pypy.commits at gmail.com (plan_rich) Date: Sun, 21 Feb 2016 14:07:29 -0800 (PST) Subject: [pypy-commit] pypy py3.3: (mjacob, plan_rich) merge py3k into py3.3 Message-ID: <56ca3521.034cc20a.d605e.ffff889c@mx.google.com> Author: Richard Plangger Branch: py3.3 Changeset: r82378:d5aed0c8694d Date: 2016-02-21 23:06 +0100 http://bitbucket.org/pypy/pypy/changeset/d5aed0c8694d/ Log: (mjacob, plan_rich) merge py3k into py3.3 diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -22,6 +22,7 @@ ^pypy/module/cpyext/test/.+\.obj$ ^pypy/module/cpyext/test/.+\.manifest$ ^pypy/module/test_lib_pypy/ctypes_tests/.+\.o$ +^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^pypy/module/cppyy/src/.+\.o$ ^pypy/module/cppyy/bench/.+\.so$ ^pypy/module/cppyy/bench/.+\.root$ @@ -35,7 +36,6 @@ ^pypy/module/test_lib_pypy/cffi_tests/__pycache__.+$ ^pypy/doc/.+\.html$ ^pypy/doc/config/.+\.rst$ -^pypy/doc/basicblock\.asc$ ^pypy/doc/.+\.svninfo$ ^rpython/translator/c/src/libffi_msvc/.+\.obj$ ^rpython/translator/c/src/libffi_msvc/.+\.dll$ @@ -45,53 +45,33 @@ ^rpython/translator/c/src/cjkcodecs/.+\.obj$ ^rpython/translator/c/src/stacklet/.+\.o$ ^rpython/translator/c/src/.+\.o$ -^rpython/translator/jvm/\.project$ -^rpython/translator/jvm/\.classpath$ -^rpython/translator/jvm/eclipse-bin$ -^rpython/translator/jvm/src/pypy/.+\.class$ -^rpython/translator/benchmark/docutils$ -^rpython/translator/benchmark/templess$ -^rpython/translator/benchmark/gadfly$ -^rpython/translator/benchmark/mako$ -^rpython/translator/benchmark/bench-custom\.benchmark_result$ -^rpython/translator/benchmark/shootout_benchmarks$ +^rpython/translator/llvm/.+\.so$ ^rpython/translator/goal/target.+-c$ ^rpython/translator/goal/.+\.exe$ ^rpython/translator/goal/.+\.dll$ ^pypy/goal/pypy-translation-snapshot$ ^pypy/goal/pypy-c -^pypy/goal/pypy-jvm -^pypy/goal/pypy-jvm.jar ^pypy/goal/.+\.exe$ ^pypy/goal/.+\.dll$ ^pypy/goal/.+\.lib$ ^pypy/_cache$ -^pypy/doc/statistic/.+\.html$ -^pypy/doc/statistic/.+\.eps$ -^pypy/doc/statistic/.+\.pdf$ -^rpython/translator/cli/src/pypylib\.dll$ -^rpython/translator/cli/src/query\.exe$ -^rpython/translator/cli/src/main\.exe$ +^lib-python/2.7/lib2to3/.+\.pickle$ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ ^lib_pypy/_libmpdec/.+.o$ -^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ ^include/.+\.inl$ ^pypy/doc/_build/.*$ ^pypy/doc/config/.+\.html$ ^pypy/doc/config/style\.css$ -^pypy/doc/jit/.+\.html$ -^pypy/doc/jit/style\.css$ ^pypy/doc/image/lattice1\.png$ ^pypy/doc/image/lattice2\.png$ ^pypy/doc/image/lattice3\.png$ ^pypy/doc/image/stackless_informal\.png$ ^pypy/doc/image/parsing_example.+\.png$ ^rpython/doc/_build/.*$ -^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^compiled ^.git/ ^release/ diff --git a/pypy/interpreter/pyparser/pytokenizer.py b/pypy/interpreter/pyparser/pytokenizer.py --- a/pypy/interpreter/pyparser/pytokenizer.py +++ b/pypy/interpreter/pyparser/pytokenizer.py @@ -106,6 +106,7 @@ strstart = (0, 0, "") for line in lines: lnum = lnum + 1 + line = universal_newline(line) pos, max = 0, len(line) if contstr: @@ -296,3 +297,15 @@ token_list.append((tokens.ENDMARKER, '', lnum, pos, line)) return token_list + +def universal_newline(line): + if len(line) >= 2: + c0 = line[-2] + c1 = line[-1] + if c0 == '\r' and c1 == '\n': + return line[:-2] + '\n' + if len(line) >= 1: + c = line[-1] + if c == '\r': + return line[:-1] + '\n' + return line diff --git a/pypy/interpreter/pyparser/test/test_pyparse.py b/pypy/interpreter/pyparser/test/test_pyparse.py --- a/pypy/interpreter/pyparser/test/test_pyparse.py +++ b/pypy/interpreter/pyparser/test/test_pyparse.py @@ -139,6 +139,13 @@ def test_print_function(self): self.parse("from __future__ import print_function\nx = print\n") + def test_universal_newlines(self): + fmt = 'stuff = """hello%sworld"""' + expected_tree = self.parse(fmt % '\n') + for linefeed in ["\r\n","\r"]: + tree = self.parse(fmt % linefeed) + assert expected_tree == tree + def test_py3k_reject_old_binary_literal(self): py.test.raises(SyntaxError, self.parse, '0777') diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -156,20 +156,6 @@ get_unique_interplevel_subclass._annspecialcase_ = "specialize:memo" _subclass_cache = {} -def enum_interplevel_subclasses(config, cls): - """Return a list of all the extra interp-level subclasses of 'cls' that - can be built by get_unique_interplevel_subclass().""" - result = [] - for flag1 in (False, True): - for flag2 in (False, True): - for flag3 in (False, True): - for flag4 in (False, True): - result.append(get_unique_interplevel_subclass( - config, cls, flag1, flag2, flag3, flag4)) - result = dict.fromkeys(result) - assert len(result) <= 6 - return result.keys() - def _getusercls(config, cls, wants_dict, wants_slots, wants_del, weakrefable): typedef = cls.typedef if wants_dict and typedef.hasdict: diff --git a/pypy/module/micronumpy/compile.py b/pypy/module/micronumpy/compile.py --- a/pypy/module/micronumpy/compile.py +++ b/pypy/module/micronumpy/compile.py @@ -196,6 +196,10 @@ def newfloat(self, f): return self.float(f) + def newslice(self, start, stop, step): + return SliceObject(self.int_w(start), self.int_w(stop), + self.int_w(step)) + def le(self, w_obj1, w_obj2): assert isinstance(w_obj1, boxes.W_GenericBox) assert isinstance(w_obj2, boxes.W_GenericBox) diff --git a/pypy/module/micronumpy/concrete.py b/pypy/module/micronumpy/concrete.py --- a/pypy/module/micronumpy/concrete.py +++ b/pypy/module/micronumpy/concrete.py @@ -12,8 +12,8 @@ ArrayArgumentException, W_NumpyObject from pypy.module.micronumpy.iterators import ArrayIter from pypy.module.micronumpy.strides import ( - IntegerChunk, SliceChunk, NewAxisChunk, EllipsisChunk, new_view, - calc_strides, calc_new_strides, shape_agreement, + IntegerChunk, SliceChunk, NewAxisChunk, EllipsisChunk, BooleanChunk, + new_view, calc_strides, calc_new_strides, shape_agreement, calculate_broadcast_strides, calc_backstrides, calc_start, is_c_contiguous, is_f_contiguous) from rpython.rlib.objectmodel import keepalive_until_here @@ -236,6 +236,7 @@ @jit.unroll_safe def _prepare_slice_args(self, space, w_idx): + from pypy.module.micronumpy import boxes if space.isinstance_w(w_idx, space.w_str): raise oefmt(space.w_IndexError, "only integers, slices (`:`), " "ellipsis (`...`), numpy.newaxis (`None`) and integer or " @@ -258,6 +259,7 @@ result = [] i = 0 has_ellipsis = False + has_filter = False for w_item in space.fixedview(w_idx): if space.is_w(w_item, space.w_Ellipsis): if has_ellipsis: @@ -272,6 +274,16 @@ elif space.isinstance_w(w_item, space.w_slice): result.append(SliceChunk(w_item)) i += 1 + elif isinstance(w_item, W_NDimArray) and w_item.get_dtype().is_bool(): + if has_filter: + # in CNumPy, the support for this is incomplete + raise oefmt(space.w_ValueError, + "an index can only have a single boolean mask; " + "use np.take or create a sinlge mask array") + has_filter = True + result.append(BooleanChunk(w_item)) + elif isinstance(w_item, boxes.W_GenericBox): + result.append(IntegerChunk(w_item.descr_int(space))) else: result.append(IntegerChunk(w_item)) i += 1 diff --git a/pypy/module/micronumpy/ndarray.py b/pypy/module/micronumpy/ndarray.py --- a/pypy/module/micronumpy/ndarray.py +++ b/pypy/module/micronumpy/ndarray.py @@ -107,8 +107,9 @@ arr = W_NDimArray(self.implementation.transpose(self, None)) return space.wrap(loop.tostring(space, arr)) - def getitem_filter(self, space, arr): - if arr.ndims() > 1 and arr.get_shape() != self.get_shape(): + def getitem_filter(self, space, arr, axis=0): + shape = self.get_shape() + if arr.ndims() > 1 and arr.get_shape() != shape: raise OperationError(space.w_IndexError, space.wrap( "boolean index array should have 1 dimension")) if arr.get_size() > self.get_size(): @@ -116,14 +117,14 @@ "index out of range for array")) size = loop.count_all_true(arr) if arr.ndims() == 1: - if self.ndims() > 1 and arr.get_shape()[0] != self.get_shape()[0]: + if self.ndims() > 1 and arr.get_shape()[0] != shape[axis]: msg = ("boolean index did not match indexed array along" - " dimension 0; dimension is %d but corresponding" - " boolean dimension is %d" % (self.get_shape()[0], + " dimension %d; dimension is %d but corresponding" + " boolean dimension is %d" % (axis, shape[axis], arr.get_shape()[0])) #warning = space.gettypefor(support.W_VisibleDeprecationWarning) space.warn(space.wrap(msg), space.w_VisibleDeprecationWarning) - res_shape = [size] + self.get_shape()[1:] + res_shape = shape[:axis] + [size] + shape[axis+1:] else: res_shape = [size] w_res = W_NDimArray.from_shape(space, res_shape, self.get_dtype(), @@ -149,6 +150,8 @@ def _prepare_array_index(self, space, w_index): if isinstance(w_index, W_NDimArray): return [], w_index.get_shape(), w_index.get_shape(), [w_index] + if isinstance(w_index, boxes.W_GenericBox): + return [], [1], [1], [w_index] w_lst = space.listview(w_index) for w_item in w_lst: if not (space.isinstance_w(w_item, space.w_int) or space.isinstance_w(w_item, space.w_float)): @@ -162,7 +165,14 @@ arr_index_in_shape = False prefix = [] for i, w_item in enumerate(w_lst): - if (isinstance(w_item, W_NDimArray) or + if isinstance(w_item, W_NDimArray) and w_item.get_dtype().is_bool(): + if w_item.ndims() > 0: + indexes_w[i] = w_item + else: + raise oefmt(space.w_IndexError, + "in the future, 0-d boolean arrays will be " + "interpreted as a valid boolean index") + elif (isinstance(w_item, W_NDimArray) or space.isinstance_w(w_item, space.w_list)): w_item = convert_to_array(space, w_item) if shape is None: @@ -232,6 +242,8 @@ raise oefmt(space.w_IndexError, "in the future, 0-d boolean arrays will be " "interpreted as a valid boolean index") + elif isinstance(w_idx, boxes.W_GenericBox): + w_ret = self.getitem_array_int(space, w_idx) else: try: w_ret = self.implementation.descr_getitem(space, self, w_idx) diff --git a/pypy/module/micronumpy/strides.py b/pypy/module/micronumpy/strides.py --- a/pypy/module/micronumpy/strides.py +++ b/pypy/module/micronumpy/strides.py @@ -77,14 +77,42 @@ backstride = base_stride * max(0, base_length - 1) return 0, base_length, base_stride, backstride +class BooleanChunk(BaseChunk): + input_dim = 1 + out_dim = 1 + def __init__(self, w_idx): + self.w_idx = w_idx + + def compute(self, space, base_length, base_stride): + raise oefmt(space.w_NotImplementedError, 'cannot reach') def new_view(space, w_arr, chunks): arr = w_arr.implementation - r = calculate_slice_strides(space, arr.shape, arr.start, arr.get_strides(), - arr.get_backstrides(), chunks) + dim = -1 + for i, c in enumerate(chunks): + if isinstance(c, BooleanChunk): + dim = i + break + if dim >= 0: + # filter by axis dim + filtr = chunks[dim] + assert isinstance(filtr, BooleanChunk) + w_arr = w_arr.getitem_filter(space, filtr.w_idx, axis=dim) + arr = w_arr.implementation + chunks[dim] = SliceChunk(space.newslice(space.wrap(0), + space.wrap(-1), space.w_None)) + r = calculate_slice_strides(space, arr.shape, arr.start, + arr.get_strides(), arr.get_backstrides(), chunks) + else: + r = calculate_slice_strides(space, arr.shape, arr.start, + arr.get_strides(), arr.get_backstrides(), chunks) shape, start, strides, backstrides = r - return W_NDimArray.new_slice(space, start, strides[:], backstrides[:], + w_ret = W_NDimArray.new_slice(space, start, strides[:], backstrides[:], shape[:], arr, w_arr) + if dim == 0: + # Do not return a view + return w_ret.descr_copy(space, space.wrap(w_ret.get_order())) + return w_ret @jit.unroll_safe def _extend_shape(old_shape, chunks): @@ -127,7 +155,7 @@ jit.isconstant(len(chunks))) def calculate_slice_strides(space, shape, start, strides, backstrides, chunks): """ - Note: `chunks` must contain exactly one EllipsisChunk object. + Note: `chunks` can contain at most one EllipsisChunk object. """ size = 0 used_dims = 0 diff --git a/pypy/module/micronumpy/test/test_deprecations.py b/pypy/module/micronumpy/test/test_deprecations.py --- a/pypy/module/micronumpy/test/test_deprecations.py +++ b/pypy/module/micronumpy/test/test_deprecations.py @@ -24,7 +24,7 @@ # boolean indexing matches the dims in index # to the first index.ndims in arr, not implemented in pypy yet raises(IndexError, arr.__getitem__, index) - raises(TypeError, arr.__getitem__, (slice(None), index)) + raises(IndexError, arr.__getitem__, (slice(None), index)) else: raises(np.VisibleDeprecationWarning, arr.__getitem__, index) raises(np.VisibleDeprecationWarning, arr.__getitem__, (slice(None), index)) diff --git a/pypy/module/micronumpy/test/test_ndarray.py b/pypy/module/micronumpy/test/test_ndarray.py --- a/pypy/module/micronumpy/test/test_ndarray.py +++ b/pypy/module/micronumpy/test/test_ndarray.py @@ -2532,6 +2532,25 @@ a[b] = np.array([[4.]]) assert (a == [[4., 4., 4.]]).all() + def test_indexing_by_boolean(self): + import numpy as np + a = np.arange(6).reshape(2,3) + assert (a[[True, False], :] == [[3, 4, 5], [0, 1, 2]]).all() + b = a[np.array([True, False]), :] + assert (b == [[0, 1, 2]]).all() + assert b.base is None + b = a[:, np.array([True, False, True])] + assert b.base is not None + b = a[np.array([True, False]), 0] + assert (b ==[0]).all() + + def test_scalar_indexing(self): + import numpy as np + a = np.arange(6).reshape(2,3) + i = np.dtype('int32').type(0) + assert (a[0] == a[i]).all() + + def test_ellipsis_indexing(self): import numpy as np import sys diff --git a/pypy/objspace/std/typeobject.py b/pypy/objspace/std/typeobject.py --- a/pypy/objspace/std/typeobject.py +++ b/pypy/objspace/std/typeobject.py @@ -8,7 +8,7 @@ from pypy.interpreter.astcompiler.misc import mangle from rpython.rlib.jit import (promote, elidable_promote, we_are_jitted, - promote_string, elidable, dont_look_inside, unroll_safe) + elidable, dont_look_inside, unroll_safe) from rpython.rlib.objectmodel import current_object_addr_as_int, compute_hash from rpython.rlib.rarithmetic import intmask, r_uint @@ -402,7 +402,6 @@ if version_tag is None: tup = w_self._lookup_where(name) return tup - name = promote_string(name) tup_w = w_self._pure_lookup_where_with_method_cache(name, version_tag) w_class, w_value = tup_w if (space.config.objspace.std.withtypeversion and diff --git a/rpython/jit/codewriter/support.py b/rpython/jit/codewriter/support.py --- a/rpython/jit/codewriter/support.py +++ b/rpython/jit/codewriter/support.py @@ -246,12 +246,12 @@ def _ll_2_int_floordiv_ovf_zer(x, y): if y == 0: raise ZeroDivisionError - if x == -sys.maxint - 1 and y == -1: - raise OverflowError - return llop.int_floordiv(lltype.Signed, x, y) + return _ll_2_int_floordiv_ovf(x, y) def _ll_2_int_floordiv_ovf(x, y): - if x == -sys.maxint - 1 and y == -1: + # intentionally not short-circuited to produce only one guard + # and to remove the check fully if one of the arguments is known + if (x == -sys.maxint - 1) & (y == -1): raise OverflowError return llop.int_floordiv(lltype.Signed, x, y) @@ -263,12 +263,11 @@ def _ll_2_int_mod_ovf_zer(x, y): if y == 0: raise ZeroDivisionError - if x == -sys.maxint - 1 and y == -1: - raise OverflowError - return llop.int_mod(lltype.Signed, x, y) + return _ll_2_int_mod_ovf(x, y) def _ll_2_int_mod_ovf(x, y): - if x == -sys.maxint - 1 and y == -1: + #see comment in _ll_2_int_floordiv_ovf + if (x == -sys.maxint - 1) & (y == -1): raise OverflowError return llop.int_mod(lltype.Signed, x, y) diff --git a/rpython/jit/metainterp/test/test_ajit.py b/rpython/jit/metainterp/test/test_ajit.py --- a/rpython/jit/metainterp/test/test_ajit.py +++ b/rpython/jit/metainterp/test/test_ajit.py @@ -1199,6 +1199,31 @@ (-sys.maxint-1) // (-6) + 100 * 8) + def test_overflow_fold_if_divisor_constant(self): + import sys + from rpython.rtyper.lltypesystem.lloperation import llop + myjitdriver = JitDriver(greens = [], reds = ['x', 'y', 'res']) + def f(x, y): + res = 0 + while y > 0: + myjitdriver.can_enter_jit(x=x, y=y, res=res) + myjitdriver.jit_merge_point(x=x, y=y, res=res) + try: + res += llop.int_floordiv_ovf(lltype.Signed, + x, 2) + res += llop.int_mod_ovf(lltype.Signed, + x, 2) + x += 5 + except OverflowError: + res += 100 + y -= 1 + return res + res = self.meta_interp(f, [-41, 8]) + # the guard_true are for the loop condition + # the guard_false needed to check whether an overflow can occur have + # been folded away + self.check_resops(guard_true=2, guard_false=0) + def test_isinstance(self): class A: pass diff --git a/rpython/jit/metainterp/test/test_tlc.py b/rpython/jit/metainterp/test/test_tlc.py --- a/rpython/jit/metainterp/test/test_tlc.py +++ b/rpython/jit/metainterp/test/test_tlc.py @@ -1,5 +1,4 @@ import py -from rpython.rtyper.module.support import LLSupport from rpython.jit.tl import tlc diff --git a/rpython/rlib/rposix.py b/rpython/rlib/rposix.py --- a/rpython/rlib/rposix.py +++ b/rpython/rlib/rposix.py @@ -3,7 +3,6 @@ import errno from rpython.rtyper.lltypesystem.rffi import CConstant, CExternVariable, INT from rpython.rtyper.lltypesystem import lltype, ll2ctypes, rffi -from rpython.rtyper.module.support import StringTraits, UnicodeTraits from rpython.rtyper.tool import rffi_platform from rpython.tool.sourcetools import func_renamer from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -12,7 +11,7 @@ specialize, enforceargs, register_replacement_for, NOT_CONSTANT) from rpython.rlib.signature import signature from rpython.rlib import types -from rpython.annotator.model import s_Str0 +from rpython.annotator.model import s_Str0, s_Unicode0 from rpython.rlib import jit from rpython.translator.platform import platform from rpython.rlib import rstring @@ -358,6 +357,87 @@ rstring.check_str0(res) return res + +class StringTraits: + str = str + str0 = s_Str0 + CHAR = rffi.CHAR + CCHARP = rffi.CCHARP + charp2str = staticmethod(rffi.charp2str) + charpsize2str = staticmethod(rffi.charpsize2str) + scoped_str2charp = staticmethod(rffi.scoped_str2charp) + str2charp = staticmethod(rffi.str2charp) + free_charp = staticmethod(rffi.free_charp) + scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_buffer) + + @staticmethod + def posix_function_name(name): + return UNDERSCORE_ON_WIN32 + name + + @staticmethod + def ll_os_name(name): + return 'll_os.ll_os_' + name + + @staticmethod + @specialize.argtype(0) + def as_str(path): + assert path is not None + if isinstance(path, str): + return path + elif isinstance(path, unicode): + # This never happens in PyPy's Python interpreter! + # Only in raw RPython code that uses unicode strings. + # We implement python2 behavior: silently convert to ascii. + return path.encode('ascii') + else: + return path.as_bytes() + + @staticmethod + @specialize.argtype(0) + def as_str0(path): + res = StringTraits.as_str(path) + rstring.check_str0(res) + return res + + +class UnicodeTraits: + str = unicode + str0 = s_Unicode0 + CHAR = rffi.WCHAR_T + CCHARP = rffi.CWCHARP + charp2str = staticmethod(rffi.wcharp2unicode) + charpsize2str = staticmethod(rffi.wcharpsize2unicode) + str2charp = staticmethod(rffi.unicode2wcharp) + scoped_str2charp = staticmethod(rffi.scoped_unicode2wcharp) + free_charp = staticmethod(rffi.free_wcharp) + scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_unicodebuffer) + + @staticmethod + def posix_function_name(name): + return UNDERSCORE_ON_WIN32 + 'w' + name + + @staticmethod + @specialize.argtype(0) + def ll_os_name(name): + return 'll_os.ll_os_w' + name + + @staticmethod + @specialize.argtype(0) + def as_str(path): + assert path is not None + if isinstance(path, unicode): + return path + else: + return path.as_unicode() + + @staticmethod + @specialize.argtype(0) + def as_str0(path): + res = UnicodeTraits.as_str(path) + rstring.check_str0(res) + return res + + # Returns True when the unicode function should be called: # - on Windows # - if the path is Unicode. diff --git a/rpython/rlib/rposix_environ.py b/rpython/rlib/rposix_environ.py --- a/rpython/rlib/rposix_environ.py +++ b/rpython/rlib/rposix_environ.py @@ -2,10 +2,10 @@ import sys from rpython.annotator import model as annmodel from rpython.rlib.objectmodel import enforceargs +from rpython.rlib.rposix import _WIN32, StringTraits, UnicodeTraits from rpython.rtyper.controllerentry import Controller from rpython.rtyper.extfunc import register_external from rpython.rtyper.lltypesystem import rffi, lltype -from rpython.rtyper.module.support import _WIN32, StringTraits, UnicodeTraits from rpython.translator.tool.cbuild import ExternalCompilationInfo str0 = annmodel.s_Str0 diff --git a/rpython/rtyper/module/test/test_posix.py b/rpython/rlib/test/test_posix.py rename from rpython/rtyper/module/test/test_posix.py rename to rpython/rlib/test/test_posix.py diff --git a/rpython/rlib/test/test_rerased.py b/rpython/rlib/test/test_rerased.py --- a/rpython/rlib/test/test_rerased.py +++ b/rpython/rlib/test/test_rerased.py @@ -192,7 +192,7 @@ def interpret(self, *args, **kwargs): kwargs["taggedpointers"] = True - return BaseRtypingTest.interpret(self, *args, **kwargs) + return BaseRtypingTest.interpret(*args, **kwargs) def test_rtype_1(self): def f(): return eraseX(X()) diff --git a/rpython/rtyper/lltypesystem/ll2ctypes.py b/rpython/rtyper/lltypesystem/ll2ctypes.py --- a/rpython/rtyper/lltypesystem/ll2ctypes.py +++ b/rpython/rtyper/lltypesystem/ll2ctypes.py @@ -463,6 +463,9 @@ def remove_regular_struct_content(container): STRUCT = container._TYPE + if isinstance(STRUCT, lltype.FixedSizeArray): + del container._items + return for field_name in STRUCT._names: FIELDTYPE = getattr(STRUCT, field_name) if not isinstance(FIELDTYPE, lltype.ContainerType): @@ -642,6 +645,12 @@ cobj = lltype2ctypes(value) setattr(self._storage.contents, field_name, cobj) + def getitem(self, index, uninitialized_ok=False): + return getattr(self, "item%s" % index) + + def setitem(self, index, value): + setattr(self, "item%s" % index, value) + class _array_mixin(_parentable_mixin): """Mixin added to _array containers when they become ctypes-based.""" __slots__ = () diff --git a/rpython/rtyper/lltypesystem/lltype.py b/rpython/rtyper/lltypesystem/lltype.py --- a/rpython/rtyper/lltypesystem/lltype.py +++ b/rpython/rtyper/lltypesystem/lltype.py @@ -1761,7 +1761,10 @@ def __new__(self, TYPE, n=None, initialization=None, parent=None, parentindex=None): - my_variety = _struct_variety(TYPE._names) + if isinstance(TYPE, FixedSizeArray): + my_variety = _fixedsizearray + else: + my_variety = _struct_variety(TYPE._names) return object.__new__(my_variety) def __init__(self, TYPE, n=None, initialization=None, parent=None, @@ -1771,7 +1774,6 @@ raise TypeError("%r is not variable-sized" % (TYPE,)) if n is None and TYPE._arrayfld is not None: raise TypeError("%r is variable-sized" % (TYPE,)) - first, FIRSTTYPE = TYPE._first_struct() for fld, typ in TYPE._flds.items(): if fld == TYPE._arrayfld: value = _array(typ, n, initialization=initialization, @@ -1814,23 +1816,48 @@ raise UninitializedMemoryAccess("%r.%s"%(self, field_name)) return r - # for FixedSizeArray kind of structs: + +class _fixedsizearray(_struct): + def __init__(self, TYPE, n=None, initialization=None, parent=None, + parentindex=None): + _parentable.__init__(self, TYPE) + if n is not None: + raise TypeError("%r is not variable-sized" % (TYPE,)) + typ = TYPE.OF + storage = [] + for i, fld in enumerate(TYPE._names): + value = typ._allocate(initialization=initialization, + parent=self, parentindex=fld) + storage.append(value) + self._items = storage + if parent is not None: + self._setparentstructure(parent, parentindex) def getlength(self): - assert isinstance(self._TYPE, FixedSizeArray) return self._TYPE.length def getbounds(self): return 0, self.getlength() def getitem(self, index, uninitialized_ok=False): - assert isinstance(self._TYPE, FixedSizeArray) - return self._getattr('item%d' % index, uninitialized_ok) + assert 0 <= index < self.getlength() + return self._items[index] def setitem(self, index, value): - assert isinstance(self._TYPE, FixedSizeArray) - setattr(self, 'item%d' % index, value) + assert 0 <= index < self.getlength() + self._items[index] = value + def __getattr__(self, name): + # obscure + if name.startswith("item"): + return self.getitem(int(name[len('item'):])) + return _struct.__getattr__(self, name) + + def __setattr__(self, name, value): + if name.startswith("item"): + self.setitem(int(name[len('item'):]), value) + return + _struct.__setattr__(self, name, value) class _array(_parentable): _kind = "array" diff --git a/rpython/rtyper/lltypesystem/module/ll_math.py b/rpython/rtyper/lltypesystem/module/ll_math.py --- a/rpython/rtyper/lltypesystem/module/ll_math.py +++ b/rpython/rtyper/lltypesystem/module/ll_math.py @@ -6,8 +6,8 @@ from rpython.translator import cdir from rpython.rlib import jit, rposix from rpython.rlib.rfloat import INFINITY, NAN, isfinite, isinf, isnan +from rpython.rlib.rposix import UNDERSCORE_ON_WIN32 from rpython.rtyper.lltypesystem import lltype, rffi -from rpython.rtyper.module.support import UNDERSCORE_ON_WIN32 from rpython.tool.sourcetools import func_with_new_name from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.translator.platform import platform diff --git a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py --- a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py +++ b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py @@ -11,12 +11,12 @@ from rpython.rtyper.lltypesystem.ll2ctypes import _llgcopaque from rpython.rtyper.annlowlevel import llhelper from rpython.rlib import rposix +from rpython.rlib.rposix import UNDERSCORE_ON_WIN32 from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.translator import cdir from rpython.tool.udir import udir from rpython.rtyper.test.test_llinterp import interpret from rpython.annotator.annrpython import RPythonAnnotator -from rpython.rtyper.module.support import UNDERSCORE_ON_WIN32 from rpython.rtyper.rtyper import RPythonTyper from rpython.rlib.rarithmetic import r_uint, get_long_pattern, is_emulated_long from rpython.rlib.rarithmetic import is_valid_int diff --git a/rpython/rtyper/module/__init__.py b/rpython/rtyper/module/__init__.py deleted file mode 100644 --- a/rpython/rtyper/module/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/rpython/rtyper/module/support.py b/rpython/rtyper/module/support.py deleted file mode 100644 --- a/rpython/rtyper/module/support.py +++ /dev/null @@ -1,139 +0,0 @@ -import sys - -from rpython.annotator import model as annmodel -from rpython.rtyper.lltypesystem import lltype, rffi -from rpython.rlib.objectmodel import specialize -from rpython.rlib import rstring - -_WIN32 = sys.platform.startswith('win') -UNDERSCORE_ON_WIN32 = '_' if _WIN32 else '' - -# utility conversion functions -class LLSupport: - _mixin_ = True - - def to_rstr(s): - from rpython.rtyper.lltypesystem.rstr import STR, mallocstr - if s is None: - return lltype.nullptr(STR) - p = mallocstr(len(s)) - for i in range(len(s)): - p.chars[i] = s[i] - return p - to_rstr = staticmethod(to_rstr) - - def to_runicode(s): - from rpython.rtyper.lltypesystem.rstr import UNICODE, mallocunicode - if s is None: - return lltype.nullptr(UNICODE) - p = mallocunicode(len(s)) - for i in range(len(s)): - p.chars[i] = s[i] - return p - to_runicode = staticmethod(to_runicode) - - def from_rstr(rs): - if not rs: # null pointer - return None - else: - return ''.join([rs.chars[i] for i in range(len(rs.chars))]) - from_rstr = staticmethod(from_rstr) - - def from_rstr_nonnull(rs): - assert rs - return ''.join([rs.chars[i] for i in range(len(rs.chars))]) - from_rstr_nonnull = staticmethod(from_rstr_nonnull) - - -class StringTraits: - str = str - str0 = annmodel.s_Str0 - CHAR = rffi.CHAR - CCHARP = rffi.CCHARP - charp2str = staticmethod(rffi.charp2str) - charpsize2str = staticmethod(rffi.charpsize2str) - scoped_str2charp = staticmethod(rffi.scoped_str2charp) - str2charp = staticmethod(rffi.str2charp) - free_charp = staticmethod(rffi.free_charp) - scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_buffer) - - @staticmethod - def posix_function_name(name): - return UNDERSCORE_ON_WIN32 + name - - @staticmethod - def ll_os_name(name): - return 'll_os.ll_os_' + name - - @staticmethod - @specialize.argtype(0) - def as_str(path): - assert path is not None - if isinstance(path, str): - return path - elif isinstance(path, unicode): - # This never happens in PyPy's Python interpreter! - # Only in raw RPython code that uses unicode strings. - # We implement python2 behavior: silently convert to ascii. - return path.encode('ascii') - else: - return path.as_bytes() - - @staticmethod - @specialize.argtype(0) - def as_str0(path): - res = StringTraits.as_str(path) - rstring.check_str0(res) - return res - -class UnicodeTraits: - str = unicode - str0 = annmodel.s_Unicode0 - CHAR = rffi.WCHAR_T - CCHARP = rffi.CWCHARP - charp2str = staticmethod(rffi.wcharp2unicode) - charpsize2str = staticmethod(rffi.wcharpsize2unicode) - str2charp = staticmethod(rffi.unicode2wcharp) - scoped_str2charp = staticmethod(rffi.scoped_unicode2wcharp) - free_charp = staticmethod(rffi.free_wcharp) - scoped_alloc_buffer = staticmethod(rffi.scoped_alloc_unicodebuffer) - - @staticmethod - def posix_function_name(name): - return UNDERSCORE_ON_WIN32 + 'w' + name - - @staticmethod - @specialize.argtype(0) - def ll_os_name(name): - return 'll_os.ll_os_w' + name - - @staticmethod - @specialize.argtype(0) - def as_str(path): - assert path is not None - if isinstance(path, unicode): - return path - else: - return path.as_unicode() - - @staticmethod - @specialize.argtype(0) - def as_str0(path): - res = UnicodeTraits.as_str(path) - rstring.check_str0(res) - return res - -def ll_strcpy(dst_s, src_s, n): - dstchars = dst_s.chars - srcchars = src_s.chars - i = 0 - while i < n: - dstchars[i] = srcchars[i] - i += 1 - -def _ll_strfill(dst_s, srcchars, n): - dstchars = dst_s.chars - i = 0 - while i < n: - dstchars[i] = srcchars[i] - i += 1 diff --git a/rpython/rtyper/module/test/__init__.py b/rpython/rtyper/module/test/__init__.py deleted file mode 100644 --- a/rpython/rtyper/module/test/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/rpython/rtyper/module/test/test_ll_strtod.py b/rpython/rtyper/module/test/test_ll_strtod.py deleted file mode 100644 --- a/rpython/rtyper/module/test/test_ll_strtod.py +++ /dev/null @@ -1,13 +0,0 @@ -import py - -from rpython.rtyper.test.tool import BaseRtypingTest -from rpython.rlib import rfloat - -class TestStrtod(BaseRtypingTest): - def test_formatd(self): - for flags in [0, - rfloat.DTSF_ADD_DOT_0]: - def f(y): - return rfloat.formatd(y, 'g', 2, flags) - - assert self.ll_to_string(self.interpret(f, [3.0])) == f(3.0) diff --git a/rpython/rtyper/test/test_rfloat.py b/rpython/rtyper/test/test_rfloat.py --- a/rpython/rtyper/test/test_rfloat.py +++ b/rpython/rtyper/test/test_rfloat.py @@ -204,6 +204,14 @@ res = self.ll_to_string(self.interpret(f, [10/3.0])) assert res == '3.33' + def test_formatd_g(self): + from rpython.rlib import rfloat + for flags in [0, rfloat.DTSF_ADD_DOT_0]: + def f(y): + return rfloat.formatd(y, 'g', 2, flags) + + assert self.ll_to_string(self.interpret(f, [3.0])) == f(3.0) + def test_formatd_repr(self): from rpython.rlib.rfloat import formatd def f(x): diff --git a/rpython/rtyper/test/test_rpbc.py b/rpython/rtyper/test/test_rpbc.py --- a/rpython/rtyper/test/test_rpbc.py +++ b/rpython/rtyper/test/test_rpbc.py @@ -1945,7 +1945,7 @@ def interpret(self, fn, args, **kwds): kwds['config'] = self.config - return TestRPBC.interpret(self, fn, args, **kwds) + return TestRPBC.interpret(fn, args, **kwds) def test_smallfuncsets_basic(): from rpython.translator.translator import TranslationContext, graphof diff --git a/rpython/rtyper/test/tool.py b/rpython/rtyper/test/tool.py --- a/rpython/rtyper/test/tool.py +++ b/rpython/rtyper/test/tool.py @@ -5,22 +5,27 @@ class BaseRtypingTest(object): FLOAT_PRECISION = 8 - def gengraph(self, func, argtypes=[], viewbefore='auto', policy=None, + @staticmethod + def gengraph(func, argtypes=[], viewbefore='auto', policy=None, backendopt=False, config=None): return gengraph(func, argtypes, viewbefore, policy, backendopt=backendopt, config=config) - def interpret(self, fn, args, **kwds): + @staticmethod + def interpret(fn, args, **kwds): return interpret(fn, args, **kwds) - def interpret_raises(self, exc, fn, args, **kwds): + @staticmethod + def interpret_raises(exc, fn, args, **kwds): return interpret_raises(exc, fn, args, **kwds) - def float_eq(self, x, y): + @staticmethod + def float_eq(x, y): return x == y - def float_eq_approx(self, x, y): - maxError = 10**-self.FLOAT_PRECISION + @classmethod + def float_eq_approx(cls, x, y): + maxError = 10**-cls.FLOAT_PRECISION if abs(x-y) < maxError: return True @@ -31,45 +36,66 @@ return relativeError < maxError - def is_of_type(self, x, type_): + @staticmethod + def is_of_type(x, type_): return type(x) is type_ - def _skip_llinterpreter(self, reason): + @staticmethod + def _skip_llinterpreter(reason): py.test.skip("lltypesystem doesn't support %s, yet" % reason) - def ll_to_string(self, s): + @staticmethod + def ll_to_string(s): if not s: return None return ''.join(s.chars) - def ll_to_unicode(self, s): + @staticmethod + def ll_to_unicode(s): return u''.join(s.chars) - def string_to_ll(self, s): - from rpython.rtyper.module.support import LLSupport - return LLSupport.to_rstr(s) + @staticmethod + def string_to_ll(s): + from rpython.rtyper.lltypesystem.rstr import STR, mallocstr + if s is None: + return lltype.nullptr(STR) + p = mallocstr(len(s)) + for i in range(len(s)): + p.chars[i] = s[i] + return p - def unicode_to_ll(self, s): - from rpython.rtyper.module.support import LLSupport - return LLSupport.to_runicode(s) + @staticmethod + def unicode_to_ll(s): + from rpython.rtyper.lltypesystem.rstr import UNICODE, mallocunicode + if s is None: + return lltype.nullptr(UNICODE) + p = mallocunicode(len(s)) + for i in range(len(s)): + p.chars[i] = s[i] + return p - def ll_to_list(self, l): + @staticmethod + def ll_to_list(l): r = [] items = l.ll_items() for i in range(l.ll_length()): r.append(items[i]) return r - def ll_unpack_tuple(self, t, length): + @staticmethod + def ll_unpack_tuple(t, length): return tuple([getattr(t, 'item%d' % i) for i in range(length)]) - def get_callable(self, fnptr): + @staticmethod + def get_callable(fnptr): return fnptr._obj._callable - def class_name(self, value): + @staticmethod + def class_name(value): return ''.join(value.super.typeptr.name.chars) - def read_attr(self, value, attr_name): + @staticmethod + def read_attr(value, attr_name): value = value._obj while value is not None: attr = getattr(value, "inst_" + attr_name, None) @@ -79,6 +105,7 @@ return attr raise AttributeError() - def is_of_instance_type(self, val): + @staticmethod + def is_of_instance_type(val): T = lltype.typeOf(val) return isinstance(T, lltype.Ptr) and isinstance(T.TO, lltype.GcStruct) From pypy.commits at gmail.com Sun Feb 21 20:10:10 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 21 Feb 2016 17:10:10 -0800 (PST) Subject: [pypy-commit] pypy default: Fix translation. Message-ID: <56ca5ff2.8e811c0a.91fcf.60f6@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82379:337c536c54c1 Date: 2016-02-22 02:04 +0100 http://bitbucket.org/pypy/pypy/changeset/337c536c54c1/ Log: Fix translation. diff --git a/pypy/interpreter/pyparser/pytokenizer.py b/pypy/interpreter/pyparser/pytokenizer.py --- a/pypy/interpreter/pyparser/pytokenizer.py +++ b/pypy/interpreter/pyparser/pytokenizer.py @@ -261,14 +261,13 @@ token_list.append((tokens.ENDMARKER, '', lnum, pos, line)) return token_list + def universal_newline(line): - if len(line) >= 2: - c0 = line[-2] - c1 = line[-1] - if c0 == '\r' and c1 == '\n': - return line[:-2] + '\n' - if len(line) >= 1: - c = line[-1] - if c == '\r': - return line[:-1] + '\n' + # show annotator that indexes below are non-negative + line_len_m2 = len(line) - 2 + if line_len_m2 >= 0 and line[-2] == '\r' and line[-1] == '\n': + return line[:line_len_m2] + '\n' + line_len_m1 = len(line) - 1 + if line_len_m1 >= 0 and line[-1] == '\r': + return line[:line_len_m1] + '\n' return line From pypy.commits at gmail.com Sun Feb 21 20:10:12 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 21 Feb 2016 17:10:12 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default Message-ID: <56ca5ff4.0357c20a.b1898.ffffa163@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82380:688a132b06b6 Date: 2016-02-22 02:04 +0100 http://bitbucket.org/pypy/pypy/changeset/688a132b06b6/ Log: hg merge default diff --git a/pypy/interpreter/pyparser/pytokenizer.py b/pypy/interpreter/pyparser/pytokenizer.py --- a/pypy/interpreter/pyparser/pytokenizer.py +++ b/pypy/interpreter/pyparser/pytokenizer.py @@ -298,14 +298,13 @@ token_list.append((tokens.ENDMARKER, '', lnum, pos, line)) return token_list + def universal_newline(line): - if len(line) >= 2: - c0 = line[-2] - c1 = line[-1] - if c0 == '\r' and c1 == '\n': - return line[:-2] + '\n' - if len(line) >= 1: - c = line[-1] - if c == '\r': - return line[:-1] + '\n' + # show annotator that indexes below are non-negative + line_len_m2 = len(line) - 2 + if line_len_m2 >= 0 and line[-2] == '\r' and line[-1] == '\n': + return line[:line_len_m2] + '\n' + line_len_m1 = len(line) - 1 + if line_len_m1 >= 0 and line[-1] == '\r': + return line[:line_len_m1] + '\n' return line From pypy.commits at gmail.com Sun Feb 21 20:10:14 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 21 Feb 2016 17:10:14 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56ca5ff6.45611c0a.ad355.57c7@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82381:9234b0c20972 Date: 2016-02-22 02:05 +0100 http://bitbucket.org/pypy/pypy/changeset/9234b0c20972/ Log: hg merge py3k diff --git a/pypy/interpreter/pyparser/pytokenizer.py b/pypy/interpreter/pyparser/pytokenizer.py --- a/pypy/interpreter/pyparser/pytokenizer.py +++ b/pypy/interpreter/pyparser/pytokenizer.py @@ -298,14 +298,13 @@ token_list.append((tokens.ENDMARKER, '', lnum, pos, line)) return token_list + def universal_newline(line): - if len(line) >= 2: - c0 = line[-2] - c1 = line[-1] - if c0 == '\r' and c1 == '\n': - return line[:-2] + '\n' - if len(line) >= 1: - c = line[-1] - if c == '\r': - return line[:-1] + '\n' + # show annotator that indexes below are non-negative + line_len_m2 = len(line) - 2 + if line_len_m2 >= 0 and line[-2] == '\r' and line[-1] == '\n': + return line[:line_len_m2] + '\n' + line_len_m1 = len(line) - 1 + if line_len_m1 >= 0 and line[-1] == '\r': + return line[:line_len_m1] + '\n' return line From pypy.commits at gmail.com Mon Feb 22 05:10:06 2016 From: pypy.commits at gmail.com (cfbolz) Date: Mon, 22 Feb 2016 02:10:06 -0800 (PST) Subject: [pypy-commit] pypy default: fix fallout from 4ee6b2b67a6b Message-ID: <56cade7e.46fac20a.90d99.353c@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82382:47e7d8d28c74 Date: 2016-02-22 11:00 +0100 http://bitbucket.org/pypy/pypy/changeset/47e7d8d28c74/ Log: fix fallout from 4ee6b2b67a6b diff --git a/pypy/module/pypyjit/test_pypy_c/test_string.py b/pypy/module/pypyjit/test_pypy_c/test_string.py --- a/pypy/module/pypyjit/test_pypy_c/test_string.py +++ b/pypy/module/pypyjit/test_pypy_c/test_string.py @@ -28,7 +28,6 @@ guard_true(i14, descr=...) guard_not_invalidated(descr=...) i16 = int_eq(i6, %d) - guard_false(i16, descr=...) i15 = int_mod(i6, i10) i17 = int_rshift(i15, %d) i18 = int_and(i10, i17) @@ -68,7 +67,6 @@ guard_true(i11, descr=...) guard_not_invalidated(descr=...) i13 = int_eq(i6, %d) # value provided below - guard_false(i13, descr=...) i15 = int_mod(i6, 10) i17 = int_rshift(i15, %d) # value provided below i18 = int_and(10, i17) From pypy.commits at gmail.com Mon Feb 22 05:10:08 2016 From: pypy.commits at gmail.com (cfbolz) Date: Mon, 22 Feb 2016 02:10:08 -0800 (PST) Subject: [pypy-commit] pypy default: (cfbolz, fijal around) kill test belonging to 40f823984bbf Message-ID: <56cade80.2aacc20a.b189d.351f@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82383:d883e5c610d6 Date: 2016-02-22 11:03 +0100 http://bitbucket.org/pypy/pypy/changeset/d883e5c610d6/ Log: (cfbolz, fijal around) kill test belonging to 40f823984bbf diff --git a/pypy/module/pypyjit/test_pypy_c/test_string.py b/pypy/module/pypyjit/test_pypy_c/test_string.py --- a/pypy/module/pypyjit/test_pypy_c/test_string.py +++ b/pypy/module/pypyjit/test_pypy_c/test_string.py @@ -142,43 +142,6 @@ jump(..., descr=...) """) - def test_getattr_promote(self): - def main(n): - class A(object): - def meth_a(self): - return 1 - def meth_b(self): - return 2 - a = A() - - l = ['a', 'b'] - s = 0 - for i in range(n): - name = 'meth_' + l[i & 1] - meth = getattr(a, name) # ID: getattr - s += meth() - return s - - log = self.run(main, [1000]) - assert log.result == main(1000) - loops = log.loops_by_filename(self.filepath) - assert len(loops) == 1 - for loop in loops: - assert loop.match_by_id('getattr',''' - guard_not_invalidated? - i32 = strlen(p31) - i34 = int_add(5, i32) - p35 = newstr(i34) - strsetitem(p35, 0, 109) - strsetitem(p35, 1, 101) - strsetitem(p35, 2, 116) - strsetitem(p35, 3, 104) - strsetitem(p35, 4, 95) - copystrcontent(p31, p35, 0, 5, i32) - i49 = call_i(ConstClass(_ll_2_str_eq_nonnull__rpy_stringPtr_rpy_stringPtr), p35, ConstPtr(ptr48), descr=) - guard_value(i49, 1, descr=...) - ''') - def test_remove_duplicate_method_calls(self): def main(n): lst = [] From pypy.commits at gmail.com Mon Feb 22 05:45:15 2016 From: pypy.commits at gmail.com (mattip) Date: Mon, 22 Feb 2016 02:45:15 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: close numpy donation campaign Message-ID: <56cae6bb.e906c20a.22696.44e8@mx.google.com> Author: mattip Branch: extradoc Changeset: r705:62fbad1ffe3d Date: 2016-02-22 11:44 +0100 http://bitbucket.org/pypy/pypy.org/changeset/62fbad1ffe3d/ Log: close numpy donation campaign diff --git a/don1.html b/don1.html --- a/don1.html +++ b/don1.html @@ -3,7 +3,7 @@ Donate towards STM in pypy
    Donate towards py3k in pypy
    Donate towards general pypy progress
    - Donate towards NumPy in pypy
    +
  • - $62907 of $105000 (59.9%) + $62926 of $105000 (59.9%)
    @@ -23,7 +23,7 @@
  • From pypy.commits at gmail.com Tue Feb 23 12:02:13 2016 From: pypy.commits at gmail.com (cfbolz) Date: Tue, 23 Feb 2016 09:02:13 -0800 (PST) Subject: [pypy-commit] pypy default: (fijal, cfbolz): make it possible to insert enter_portal_frame and Message-ID: <56cc9095.85b01c0a.2f328.5c54@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82453:97c9937d38ad Date: 2016-02-23 18:00 +0100 http://bitbucket.org/pypy/pypy/changeset/97c9937d38ad/ Log: (fijal, cfbolz): make it possible to insert enter_portal_frame and leave_portal_frame explicitly, for the weird interpreters that need that. diff --git a/rpython/jit/codewriter/jtransform.py b/rpython/jit/codewriter/jtransform.py --- a/rpython/jit/codewriter/jtransform.py +++ b/rpython/jit/codewriter/jtransform.py @@ -2042,6 +2042,11 @@ self.vable_flags[op.args[0]] = op.args[2].value return [] + def rewrite_op_jit_enter_portal_frame(self, op): + return [op] + def rewrite_op_jit_leave_portal_frame(self, op): + return [op] + # --------- # ll_math.sqrt_nonneg() diff --git a/rpython/jit/metainterp/blackhole.py b/rpython/jit/metainterp/blackhole.py --- a/rpython/jit/metainterp/blackhole.py +++ b/rpython/jit/metainterp/blackhole.py @@ -944,6 +944,14 @@ pass @arguments("i") + def bhimpl_jit_enter_portal_frame(x): + pass + + @arguments() + def bhimpl_jit_leave_portal_frame(): + pass + + @arguments("i") def bhimpl_int_assert_green(x): pass @arguments("r") diff --git a/rpython/jit/metainterp/pyjitpl.py b/rpython/jit/metainterp/pyjitpl.py --- a/rpython/jit/metainterp/pyjitpl.py +++ b/rpython/jit/metainterp/pyjitpl.py @@ -1358,6 +1358,17 @@ self.metainterp.attach_debug_info(op) @arguments("box") + def opimpl_jit_enter_portal_frame(self, uniqueidbox): + unique_id = uniqueidbox.getint() + jd_no = self.metainterp.jitdriver_sd.mainjitcode.index # fish + self.metainterp.enter_portal_frame(jd_no, unique_id) + + @arguments() + def opimpl_jit_leave_portal_frame(self): + jd_no = self.metainterp.jitdriver_sd.mainjitcode.index # fish + self.metainterp.leave_portal_frame(jd_no) + + @arguments("box") def _opimpl_assert_green(self, box): if not isinstance(box, Const): msg = "assert_green failed at %s:%d" % ( diff --git a/rpython/jit/metainterp/test/test_jitdriver.py b/rpython/jit/metainterp/test/test_jitdriver.py --- a/rpython/jit/metainterp/test/test_jitdriver.py +++ b/rpython/jit/metainterp/test/test_jitdriver.py @@ -213,6 +213,21 @@ if op.getopname() == 'enter_portal_frame': assert op.getarg(0).getint() == 0 assert op.getarg(1).getint() == 1 - + + def test_manual_leave_enter_portal_frame(self): + from rpython.rlib import jit + driver = JitDriver(greens=[], reds='auto', is_recursive=True) + + def f(arg): + i = 0 + while i < 100: + driver.jit_merge_point() + jit.enter_portal_frame(42) + jit.leave_portal_frame() + i += 1 + + self.meta_interp(f, [0]) + self.check_resops(enter_portal_frame=1, leave_portal_frame=1) + class TestLLtype(MultipleJitDriversTests, LLJitMixin): pass diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py --- a/rpython/rlib/jit.py +++ b/rpython/rlib/jit.py @@ -1168,6 +1168,24 @@ hop.exception_is_here() return hop.genop('jit_conditional_call', args_v) +def enter_portal_frame(unique_id): + """call this when starting to interpret a function. calling this is not + necessary for almost all interpreters. The only exception is stackless + interpreters where the portal never calls itself. + """ + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop + llop.jit_enter_portal_frame(lltype.Void, unique_id) + +def leave_portal_frame(): + """call this after the end of executing a function. calling this is not + necessary for almost all interpreters. The only exception is stackless + interpreters where the portal never calls itself. + """ + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop + llop.jit_leave_portal_frame(lltype.Void) + class Counters(object): counters=""" TRACING diff --git a/rpython/rlib/test/test_jit.py b/rpython/rlib/test/test_jit.py --- a/rpython/rlib/test/test_jit.py +++ b/rpython/rlib/test/test_jit.py @@ -4,7 +4,8 @@ from rpython.annotator.model import UnionError from rpython.rlib.jit import (hint, we_are_jitted, JitDriver, elidable_promote, JitHintError, oopspec, isconstant, conditional_call, - elidable, unroll_safe, dont_look_inside) + elidable, unroll_safe, dont_look_inside, + enter_portal_frame, leave_portal_frame) from rpython.rlib.rarithmetic import r_uint from rpython.rtyper.test.tool import BaseRtypingTest from rpython.rtyper.lltypesystem import lltype @@ -300,3 +301,11 @@ mix = MixLevelHelperAnnotator(t.rtyper) mix.getgraph(later, [annmodel.s_Bool], annmodel.s_None) mix.finish() + + def test_enter_leave_portal_frame(self): + from rpython.translator.interactive import Translation + def g(): + enter_portal_frame(1) + leave_portal_frame() + t = Translation(g, []) + t.compile_c() # does not crash diff --git a/rpython/rtyper/lltypesystem/lloperation.py b/rpython/rtyper/lltypesystem/lloperation.py --- a/rpython/rtyper/lltypesystem/lloperation.py +++ b/rpython/rtyper/lltypesystem/lloperation.py @@ -453,6 +453,8 @@ 'jit_record_exact_class' : LLOp(canrun=True), 'jit_ffi_save_result': LLOp(canrun=True), 'jit_conditional_call': LLOp(), + 'jit_enter_portal_frame': LLOp(canrun=True), + 'jit_leave_portal_frame': LLOp(canrun=True), 'get_exception_addr': LLOp(), 'get_exc_value_addr': LLOp(), 'do_malloc_fixedsize':LLOp(canmallocgc=True), diff --git a/rpython/rtyper/lltypesystem/opimpl.py b/rpython/rtyper/lltypesystem/opimpl.py --- a/rpython/rtyper/lltypesystem/opimpl.py +++ b/rpython/rtyper/lltypesystem/opimpl.py @@ -624,6 +624,12 @@ def op_jit_ffi_save_result(*args): pass +def op_jit_enter_portal_frame(x): + pass + +def op_jit_leave_portal_frame(): + pass + def op_get_group_member(TYPE, grpptr, memberoffset): from rpython.rtyper.lltypesystem import llgroup assert isinstance(memberoffset, llgroup.GroupMemberOffset) diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py --- a/rpython/translator/c/funcgen.py +++ b/rpython/translator/c/funcgen.py @@ -842,6 +842,12 @@ def OP_JIT_FFI_SAVE_RESULT(self, op): return '/* JIT_FFI_SAVE_RESULT %s */' % op + def OP_JIT_ENTER_PORTAL_FRAME(self, op): + return '/* JIT_ENTER_PORTAL_FRAME %s */' % op + + def OP_JIT_LEAVE_PORTAL_FRAME(self, op): + return '/* JIT_LEAVE_PORTAL_FRAME %s */' % op + def OP_GET_GROUP_MEMBER(self, op): typename = self.db.gettype(op.result.concretetype) return '%s = (%s)_OP_GET_GROUP_MEMBER(%s, %s);' % ( From pypy.commits at gmail.com Tue Feb 23 12:02:19 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 23 Feb 2016 09:02:19 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: update the values Message-ID: <56cc909b.865a1c0a.de9bd.0821@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r708:944dcc74ad0a Date: 2016-02-23 18:02 +0100 http://bitbucket.org/pypy/pypy.org/changeset/944dcc74ad0a/ Log: update the values diff --git a/don1.html b/don1.html --- a/don1.html +++ b/don1.html @@ -15,7 +15,7 @@ - $62907 of $105000 (59.9%) + $62926 of $105000 (59.9%)
    @@ -23,7 +23,7 @@
  • From pypy.commits at gmail.com Tue Feb 23 12:04:27 2016 From: pypy.commits at gmail.com (arigo) Date: Tue, 23 Feb 2016 09:04:27 -0800 (PST) Subject: [pypy-commit] pypy.org pypy3-update: Backed out changeset ab7abb3e75bb Message-ID: <56cc911b.046f1c0a.c7219.4e11@mx.google.com> Author: Armin Rigo Branch: pypy3-update Changeset: r709:3041468d1ea7 Date: 2016-02-23 18:04 +0100 http://bitbucket.org/pypy/pypy.org/changeset/3041468d1ea7/ Log: Backed out changeset ab7abb3e75bb diff --git a/don1.html b/don1.html --- a/don1.html +++ b/don1.html @@ -15,7 +15,7 @@ - $62926 of $105000 (59.9%) + $62907 of $105000 (59.9%)
    @@ -23,7 +23,7 @@
  • From pypy.commits at gmail.com Tue Feb 23 12:20:19 2016 From: pypy.commits at gmail.com (rlamy) Date: Tue, 23 Feb 2016 09:20:19 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: Manually expand obscure macro Message-ID: <56cc94d3.02931c0a.1a676.50f0@mx.google.com> Author: Ronan Lamy Branch: cpyext-ext Changeset: r82454:58459ae8908d Date: 2016-02-23 18:17 +0100 http://bitbucket.org/pypy/pypy/changeset/58459ae8908d/ Log: Manually expand obscure macro diff --git a/pypy/module/cpyext/test/foo.c b/pypy/module/cpyext/test/foo.c --- a/pypy/module/cpyext/test/foo.c +++ b/pypy/module/cpyext/test/foo.c @@ -182,7 +182,7 @@ {"float_member", T_FLOAT, offsetof(fooobject, foo_float), 0, NULL}, {"double_member", T_DOUBLE, offsetof(fooobject, foo_double), 0, NULL}, {"longlong_member", T_LONGLONG, offsetof(fooobject, foo_longlong), 0, NULL}, - {"ulonglong_member", T_ULONGLONG, offsetof(fooobject, foo_ulonglong), 0, NULL}, + {"ulonglong_member", T_ULONGLONG, offsetof(fooobject, foo_ulonglong), 0, NULL}, {"ssizet_member", T_PYSSIZET, offsetof(fooobject, foo_ssizet), 0, NULL}, {NULL} /* Sentinel */ }; @@ -450,7 +450,7 @@ if ((foop = newfooobject()) == NULL) { return NULL; } - + return (PyObject *)foop; } @@ -666,7 +666,7 @@ PyMethodDescr_TypePtr == PyGetSetDescr_TypePtr || PyMemberDescr_TypePtr == PyGetSetDescr_TypePtr) { - PyErr_Format(PyExc_RuntimeError, + PyErr_Format(PyExc_RuntimeError, "at least two of the 'Py{Method,Member,GetSet}Descr_Type's are the same\n" "(in cmp_docstring %s %d)", __FILE__, __LINE__); return NULL; @@ -695,7 +695,20 @@ _CMPDOC(CFunction, new->m_ml->ml_doc, new->m_ml->ml_name); } else if (_TESTDOC1(Type)) { - _CMPDOC(Type, new->tp_doc, new->tp_name); + PyTypeObject *new = (PyTypeObject *)obj; + if (!(new->tp_doc)) { + PyErr_Format(PyExc_RuntimeError, "Type '%s' %s", new->tp_name, msg); + return NULL; + } + else { + if (strcmp(new->tp_doc, docstr) != 0) + { + PyErr_Format(PyExc_RuntimeError, + "%s's docstring '%s' is not '%s'", + new->tp_name, new->tp_doc, docstr); + return NULL; + } + } } else if (_TESTDOC2(MemberDescr)) { _CMPDOC(MemberDescr, new->d_member->doc, new->d_member->name); @@ -718,13 +731,13 @@ attr_as_str = PyString_AS_STRING(doc_attr); if (strcmp(attr_as_str, docstr) != 0) - { - PyErr_Format(PyExc_RuntimeError, - "objects's docstring '%s' is not '%s'", - attr_as_str, docstr); + { + PyErr_Format(PyExc_RuntimeError, + "objects's docstring '%s' is not '%s'", + attr_as_str, docstr); Py_XDECREF(doc_attr); - return NULL; - } + return NULL; + } Py_XDECREF(doc_attr); Py_RETURN_NONE; } @@ -782,7 +795,7 @@ return; if (PyType_Ready(&SimplePropertyType) < 0) return; - + SimplePropertyType.tp_new = PyType_GenericNew; InitErrType.tp_new = PyType_GenericNew; From pypy.commits at gmail.com Tue Feb 23 12:21:02 2016 From: pypy.commits at gmail.com (fijal) Date: Tue, 23 Feb 2016 09:21:02 -0800 (PST) Subject: [pypy-commit] pypy default: look inside tuple hash, improves mdp Message-ID: <56cc94fe.455e1c0a.e1938.ffff842f@mx.google.com> Author: fijal Branch: Changeset: r82456:3c94bed8d07e Date: 2016-02-23 18:20 +0100 http://bitbucket.org/pypy/pypy/changeset/3c94bed8d07e/ Log: look inside tuple hash, improves mdp diff --git a/pypy/objspace/std/tupleobject.py b/pypy/objspace/std/tupleobject.py --- a/pypy/objspace/std/tupleobject.py +++ b/pypy/objspace/std/tupleobject.py @@ -30,6 +30,11 @@ contains_jmp = jit.JitDriver(greens = ['tp'], reds = 'auto', name = 'tuple.contains') +hash_driver = jit.JitDriver( + name='tuple.hash', + greens=['w_type'], + reds='auto') + class W_AbstractTupleObject(W_Root): __slots__ = () @@ -262,8 +267,14 @@ def length(self): return len(self.wrappeditems) - @jit.look_inside_iff(lambda self, _1: _unroll_condition(self)) def descr_hash(self, space): + if _unroll_condition(self): + return self._descr_hash_unroll(space) + else: + return self._descr_hash_jitdriver(space) + + @jit.unroll_safe + def _descr_hash_unroll(self, space): mult = 1000003 x = 0x345678 z = len(self.wrappeditems) @@ -275,6 +286,20 @@ x += 97531 return space.wrap(intmask(x)) + def _descr_hash_jitdriver(self, space): + mult = 1000003 + x = 0x345678 + z = len(self.wrappeditems) + w_type = space.type(self.wrappeditems[0]) + for w_item in self.wrappeditems: + hash_driver.jit_merge_point(w_type=w_type) + y = space.hash_w(w_item) + x = (x ^ y) * mult + z -= 1 + mult += 82520 + z + z + x += 97531 + return space.wrap(intmask(x)) + def descr_eq(self, space, w_other): if not isinstance(w_other, W_AbstractTupleObject): return space.w_NotImplemented diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py --- a/rpython/rlib/jit.py +++ b/rpython/rlib/jit.py @@ -284,7 +284,7 @@ def loop_unrolling_heuristic(lst, size, cutoff=2): """ In which cases iterating over items of lst can be unrolled """ - return isvirtual(lst) or (isconstant(size) and size <= cutoff) + return size == 0 or isvirtual(lst) or (isconstant(size) and size <= cutoff) class Entry(ExtRegistryEntry): _about_ = hint From pypy.commits at gmail.com Tue Feb 23 12:21:01 2016 From: pypy.commits at gmail.com (fijal) Date: Tue, 23 Feb 2016 09:21:01 -0800 (PST) Subject: [pypy-commit] pypy look-inside-tuple-hash: close to be merged branch Message-ID: <56cc94fd.49f9c20a.6cc7.6a07@mx.google.com> Author: fijal Branch: look-inside-tuple-hash Changeset: r82455:6b597b2e2fae Date: 2016-02-23 18:19 +0100 http://bitbucket.org/pypy/pypy/changeset/6b597b2e2fae/ Log: close to be merged branch From pypy.commits at gmail.com Tue Feb 23 12:23:20 2016 From: pypy.commits at gmail.com (fijal) Date: Tue, 23 Feb 2016 09:23:20 -0800 (PST) Subject: [pypy-commit] pypy default: bump the years Message-ID: <56cc9588.6bb8c20a.27bc4.ffffa250@mx.google.com> Author: fijal Branch: Changeset: r82457:39e6f53a3270 Date: 2016-02-23 18:22 +0100 http://bitbucket.org/pypy/pypy/changeset/39e6f53a3270/ Log: bump the years diff --git a/pypy/module/sys/app.py b/pypy/module/sys/app.py --- a/pypy/module/sys/app.py +++ b/pypy/module/sys/app.py @@ -70,11 +70,11 @@ return None copyright_str = """ -Copyright 2003-2014 PyPy development team. +Copyright 2003-2016 PyPy development team. All Rights Reserved. For further information, see -Portions Copyright (c) 2001-2014 Python Software Foundation. +Portions Copyright (c) 2001-2016 Python Software Foundation. All Rights Reserved. Portions Copyright (c) 2000 BeOpen.com. From pypy.commits at gmail.com Tue Feb 23 12:33:33 2016 From: pypy.commits at gmail.com (cfbolz) Date: Tue, 23 Feb 2016 09:33:33 -0800 (PST) Subject: [pypy-commit] pypy statistics-maps: update contributor list Message-ID: <56cc97ed.d4e41c0a.15ec3.5210@mx.google.com> Author: Carl Friedrich Bolz Branch: statistics-maps Changeset: r82458:6ed007073e26 Date: 2016-02-23 18:32 +0100 http://bitbucket.org/pypy/pypy/changeset/6ed007073e26/ Log: update contributor list diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -41,29 +41,29 @@ Amaury Forgeot d'Arc Antonio Cuni Samuele Pedroni + Matti Picus Alex Gaynor Brian Kearns - Matti Picus Philip Jenvey Michael Hudson + Ronan Lamy David Schneider + Manuel Jacob Holger Krekel Christian Tismer Hakan Ardo - Manuel Jacob - Ronan Lamy Benjamin Peterson + Richard Plangger Anders Chrigstrom Eric van Riet Paap Wim Lavrijsen - Richard Plangger Richard Emslie Alexander Schremmer Dan Villiom Podlaski Christiansen + Remi Meier Lukas Diekmann Sven Hager Anders Lehmann - Remi Meier Aurelien Campeas Niklaus Haldimann Camillo Bruni @@ -72,8 +72,8 @@ Romain Guillebert Leonardo Santagada Seo Sanghyeon + Ronny Pfannschmidt Justin Peel - Ronny Pfannschmidt David Edelsohn Anders Hammarquist Jakub Gustak @@ -95,6 +95,7 @@ Tyler Wade Michael Foord Stephan Diehl + Vincent Legoll Stefan Schwarzer Valentino Volonghi Tomek Meka @@ -105,9 +106,9 @@ Jean-Paul Calderone Timo Paulssen Squeaky + Marius Gedminas Alexandre Fayolle Simon Burton - Marius Gedminas Martin Matusiak Konstantin Lopuhin Wenzhu Man @@ -116,16 +117,20 @@ Ivan Sichmann Freitas Greg Price Dario Bertini + Stefano Rivera Mark Pearse Simon Cross Andreas Stührk - Stefano Rivera + Edd Barrett Jean-Philippe St. Pierre Guido van Rossum Pavel Vinogradov + Jeremy Thurgood Paweł Piotr Przeradowski + Spenser Bauman Paul deGrandis Ilya Osadchiy + marky1991 Tobias Oberstein Adrian Kuhn Boris Feigin @@ -134,14 +139,12 @@ Georg Brandl Bert Freudenberg Stian Andreassen - Edd Barrett + Tobias Pape Wanja Saatkamp Gerald Klix Mike Blume - Tobias Pape Oscar Nierstrasz Stefan H. Muller - Jeremy Thurgood Rami Chowdhury Eugene Oden Henry Mason @@ -153,6 +156,8 @@ Lukas Renggli Guenter Jantzen Ned Batchelder + Tim Felgentreff + Anton Gulenko Amit Regmi Ben Young Nicolas Chauvat @@ -162,12 +167,12 @@ Nicholas Riley Jason Chu Igor Trindade Oliveira - Tim Felgentreff + Yichao Yu Rocco Moretti Gintautas Miliauskas Michael Twomey Lucian Branescu Mihaila - Yichao Yu + Devin Jeanpierre Gabriel Lavoie Olivier Dormond Jared Grubb @@ -191,33 +196,33 @@ Stanislaw Halik Mikael Schönenberg Berkin Ilbeyi - Elmo M?ntynen + Elmo Mäntynen + Faye Zhao Jonathan David Riehl Anders Qvist Corbin Simpson Chirag Jadwani Beatrice During Alex Perry - Vincent Legoll + Vaibhav Sood Alan McIntyre - Spenser Bauman + William Leslie Alexander Sedov Attila Gobi + Jasper.Schulz Christopher Pope - Devin Jeanpierre - Vaibhav Sood Christian Tismer Marc Abramowitz Dan Stromberg Arjun Naik Valentina Mukhamedzhanova Stefano Parmesan + Mark Young Alexis Daboville Jens-Uwe Mager Carl Meyer Karl Ramm Pieter Zieschang - Anton Gulenko Gabriel Lukas Vacek Andrew Dalke @@ -225,6 +230,7 @@ Jakub Stasiak Nathan Taylor Vladimir Kryachko + Omer Katz Jacek Generowicz Alejandro J. Cura Jacob Oscarson @@ -239,6 +245,7 @@ Lars Wassermann Philipp Rustemeuer Henrik Vendelbo + Richard Lancaster Dan Buch Miguel de Val Borro Artur Lisiecki @@ -250,18 +257,18 @@ Tomo Cocoa Kim Jin Su Toni Mattis + Amber Brown Lucas Stadler Julian Berman Markus Holtermann roberto at goyle Yury V. Zaytsev Anna Katrina Dominguez - William Leslie Bobby Impollonia - Faye Zhao timo at eistee.fritz.box Andrew Thompson Yusei Tahara + Aaron Tubbs Ben Darnell Roberto De Ioris Juan Francisco Cantero Hurtado @@ -273,6 +280,7 @@ Christopher Armstrong Michael Hudson-Doyle Anders Sigfridsson + Nikolay Zinov Yasir Suhail Jason Michalski rafalgalczynski at gmail.com @@ -282,6 +290,7 @@ Gustavo Niemeyer Stephan Busemann Rafał Gałczyński + Matt Bogosian Christian Muirhead Berker Peksag James Lan @@ -316,9 +325,9 @@ Stefan Marr jiaaro Mads Kiilerich - Richard Lancaster opassembler.py Antony Lee + Jason Madden Yaroslav Fedevych Jim Hunziker Markus Unterwaditzer @@ -327,6 +336,7 @@ squeaky Zearin soareschen + Jonas Pfannschmidt Kurt Griffiths Mike Bayer Matthew Miller diff --git a/pypy/doc/tool/makecontributor.py b/pypy/doc/tool/makecontributor.py --- a/pypy/doc/tool/makecontributor.py +++ b/pypy/doc/tool/makecontributor.py @@ -72,6 +72,7 @@ 'Anton Gulenko':['anton gulenko', 'anton_gulenko'], 'Richard Lancaster':['richardlancaster'], 'William Leslie':['William ML Leslie'], + 'Spenser Bauman':['Spenser Andrew Bauman'], } alias_map = {} diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -159,12 +159,12 @@ if cls is W_IntObject: # this means that the class stored in the storage is an # IntMutableCell - jit.jit_debug("map: known class int", objectmodel.compute_unique_id(attr)) + jit.jit_debug("map: read known class int", objectmodel.compute_unique_id(attr)) assert isinstance(result, IntMutableCell) return W_IntObject(result.intvalue) if cls is W_FloatObject: # ditto - jit.jit_debug("map: known class float", objectmodel.compute_unique_id(attr)) + jit.jit_debug("map: read known class float", objectmodel.compute_unique_id(attr)) assert isinstance(result, FloatMutableCell) return W_FloatObject(result.floatvalue) jit.jit_debug("map: recorded exact class", objectmodel.compute_unique_id(attr)) diff --git a/pypy/tool/mapstatsdot.py b/pypy/tool/mapstatsdot.py --- a/pypy/tool/mapstatsdot.py +++ b/pypy/tool/mapstatsdot.py @@ -1,5 +1,6 @@ #! /usr/bin/env python import sys +sys.setrecursionlimit(100000) class Getattrwrap(object): def __init__(self, obj): @@ -61,7 +62,7 @@ seen.add(self) if hasattr(self, 'back'): if self not in self.back.transitions: - output.edge(self.back.id, self.id, dir="none") + output.edge(self.back.id, self.id, dir="back") self.back.dot(output, seen) if not self.instances: return @@ -70,7 +71,10 @@ fillcolor=self.getfillcolor()) for next, count in self.transitions.iteritems(): next.dot(output, seen) - output.edge(self.id, next.id, label=str(count)) + args = {} + if getattr(next, 'back', None) is not self: + args = dict(style="dotted") + output.edge(self.id, next.id, label=str(count), **args) return node def getfillcolor(self): @@ -80,13 +84,22 @@ class Terminator(Map): + def __repr__(self): + return "Terminator(%s)" % (self.w_cls) def fill(self, content): Map.fill(self, content) self.w_cls = content.w_cls + self.w_cls_module = content.w_cls_module def getlabel(self): + if self.w_cls_module is not None: + return self.w_cls + "\\l" + self.w_cls_module return self.w_cls + def get_chain(self): + return [self] + + class Attribute(Map): def fill(self, content): Map.fill(self, content) @@ -128,6 +141,14 @@ assert int(index) == self.nametype self.reads = count + def get_chain(self): + l = [] + while isinstance(self, Attribute): + l.append((self.name, self.nametype)) + self = self.back + l.reverse() + return self.get_chain() + l + def getlabel(self): if self.nametype == 0: name = self.name @@ -143,7 +164,7 @@ for write, count in self.writes.items(): label.append(" %s: %s" % (write, count)) if self.number_unnecessary_writes and self.constant: - assert len(self.writes) == 1 + assert len(self.writes) <= 1 label[-1] += " (%s unnecessary)" % (self.number_unnecessary_writes, ) if not self.ever_mutated: label.append('immutable') @@ -193,7 +214,21 @@ goodattrs = 0 unnecessary = 0 + seen_sorted_chains = set() + duplicate_orders = 0 + duplicate_order_reads = 0 + all_instances = 0 + for mp in allmaps: + chain = mp.get_chain() + chain.sort() + if tuple(chain) in seen_sorted_chains: + duplicate_orders += 1 + duplicate_order_reads += mp.reads + print >> sys.stderr, chain, mp.instances + else: + seen_sorted_chains.add(tuple(chain)) + if not isinstance(mp, Attribute): continue totalwrites += sum(mp.writes.values()) @@ -210,6 +245,8 @@ print >> sys.stderr, "reads:", totalreads, goodreads, float(goodreads) / totalreads print >> sys.stderr, "writes:", totalwrites, goodwrites, float(goodwrites) / totalwrites print >> sys.stderr, "unnecessary writes:", unnecessary, totalwrites, float(unnecessary) / totalwrites + print >> sys.stderr, "wrongly ordered:", duplicate_orders, totalattrs, float(duplicate_orders) / totalattrs + print >> sys.stderr, "wrongly ordered reads:", duplicate_order_reads, totalreads, float(duplicate_order_reads) / totalreads print >> sys.stderr, "attrs:", totalattrs, goodattrs, float(goodattrs) / totalattrs print >> sys.stderr, "reads / writes", float(totalreads) / totalwrites diff --git a/rpython/jit/metainterp/logger.py b/rpython/jit/metainterp/logger.py --- a/rpython/jit/metainterp/logger.py +++ b/rpython/jit/metainterp/logger.py @@ -6,6 +6,7 @@ from rpython.rlib.rarithmetic import r_uint from rpython.rtyper.lltypesystem import lltype, llmemory, rffi +JIT_LOG_VERSION = 1.0 class Logger(object): def __init__(self, metainterp_sd, guard_number=False): diff --git a/rpython/jit/metainterp/warmspot.py b/rpython/jit/metainterp/warmspot.py --- a/rpython/jit/metainterp/warmspot.py +++ b/rpython/jit/metainterp/warmspot.py @@ -1030,7 +1030,11 @@ checkgraph(origportalgraph) def add_finish(self): + from rpython.jit.metainterp.logger import JIT_LOG_VERSION def finish(): + debug_start("jit-log-opt-version") + debug_print(JIT_LOG_VERSION) + debug_stop("jit-log-opt-version") if self.metainterp_sd.profiler.initialized: self.metainterp_sd.profiler.finish() self.metainterp_sd.cpu.finish_once() From pypy.commits at gmail.com Tue Feb 23 12:40:37 2016 From: pypy.commits at gmail.com (fijal) Date: Tue, 23 Feb 2016 09:40:37 -0800 (PST) Subject: [pypy-commit] pypy default: transplant sensible parts of 6ed007073e26; Message-ID: <56cc9995.8205c20a.16a79.ffffae7f@mx.google.com> Author: fijal Branch: Changeset: r82459:7d17d8205bfd Date: 2016-02-23 18:39 +0100 http://bitbucket.org/pypy/pypy/changeset/7d17d8205bfd/ Log: transplant sensible parts of 6ed007073e26; diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -41,29 +41,29 @@ Amaury Forgeot d'Arc Antonio Cuni Samuele Pedroni + Matti Picus Alex Gaynor Brian Kearns - Matti Picus Philip Jenvey Michael Hudson + Ronan Lamy David Schneider + Manuel Jacob Holger Krekel Christian Tismer Hakan Ardo - Manuel Jacob - Ronan Lamy Benjamin Peterson + Richard Plangger Anders Chrigstrom Eric van Riet Paap Wim Lavrijsen - Richard Plangger Richard Emslie Alexander Schremmer Dan Villiom Podlaski Christiansen + Remi Meier Lukas Diekmann Sven Hager Anders Lehmann - Remi Meier Aurelien Campeas Niklaus Haldimann Camillo Bruni @@ -72,8 +72,8 @@ Romain Guillebert Leonardo Santagada Seo Sanghyeon + Ronny Pfannschmidt Justin Peel - Ronny Pfannschmidt David Edelsohn Anders Hammarquist Jakub Gustak @@ -95,6 +95,7 @@ Tyler Wade Michael Foord Stephan Diehl + Vincent Legoll Stefan Schwarzer Valentino Volonghi Tomek Meka @@ -105,9 +106,9 @@ Jean-Paul Calderone Timo Paulssen Squeaky + Marius Gedminas Alexandre Fayolle Simon Burton - Marius Gedminas Martin Matusiak Konstantin Lopuhin Wenzhu Man @@ -116,16 +117,20 @@ Ivan Sichmann Freitas Greg Price Dario Bertini + Stefano Rivera Mark Pearse Simon Cross Andreas Stührk - Stefano Rivera + Edd Barrett Jean-Philippe St. Pierre Guido van Rossum Pavel Vinogradov + Jeremy Thurgood Paweł Piotr Przeradowski + Spenser Bauman Paul deGrandis Ilya Osadchiy + marky1991 Tobias Oberstein Adrian Kuhn Boris Feigin @@ -134,14 +139,12 @@ Georg Brandl Bert Freudenberg Stian Andreassen - Edd Barrett + Tobias Pape Wanja Saatkamp Gerald Klix Mike Blume - Tobias Pape Oscar Nierstrasz Stefan H. Muller - Jeremy Thurgood Rami Chowdhury Eugene Oden Henry Mason @@ -153,6 +156,8 @@ Lukas Renggli Guenter Jantzen Ned Batchelder + Tim Felgentreff + Anton Gulenko Amit Regmi Ben Young Nicolas Chauvat @@ -162,12 +167,12 @@ Nicholas Riley Jason Chu Igor Trindade Oliveira - Tim Felgentreff + Yichao Yu Rocco Moretti Gintautas Miliauskas Michael Twomey Lucian Branescu Mihaila - Yichao Yu + Devin Jeanpierre Gabriel Lavoie Olivier Dormond Jared Grubb @@ -191,33 +196,33 @@ Stanislaw Halik Mikael Schönenberg Berkin Ilbeyi - Elmo M?ntynen + Elmo Mäntynen + Faye Zhao Jonathan David Riehl Anders Qvist Corbin Simpson Chirag Jadwani Beatrice During Alex Perry - Vincent Legoll + Vaibhav Sood Alan McIntyre - Spenser Bauman + William Leslie Alexander Sedov Attila Gobi + Jasper.Schulz Christopher Pope - Devin Jeanpierre - Vaibhav Sood Christian Tismer Marc Abramowitz Dan Stromberg Arjun Naik Valentina Mukhamedzhanova Stefano Parmesan + Mark Young Alexis Daboville Jens-Uwe Mager Carl Meyer Karl Ramm Pieter Zieschang - Anton Gulenko Gabriel Lukas Vacek Andrew Dalke @@ -225,6 +230,7 @@ Jakub Stasiak Nathan Taylor Vladimir Kryachko + Omer Katz Jacek Generowicz Alejandro J. Cura Jacob Oscarson @@ -239,6 +245,7 @@ Lars Wassermann Philipp Rustemeuer Henrik Vendelbo + Richard Lancaster Dan Buch Miguel de Val Borro Artur Lisiecki @@ -250,18 +257,18 @@ Tomo Cocoa Kim Jin Su Toni Mattis + Amber Brown Lucas Stadler Julian Berman Markus Holtermann roberto at goyle Yury V. Zaytsev Anna Katrina Dominguez - William Leslie Bobby Impollonia - Faye Zhao timo at eistee.fritz.box Andrew Thompson Yusei Tahara + Aaron Tubbs Ben Darnell Roberto De Ioris Juan Francisco Cantero Hurtado @@ -273,6 +280,7 @@ Christopher Armstrong Michael Hudson-Doyle Anders Sigfridsson + Nikolay Zinov Yasir Suhail Jason Michalski rafalgalczynski at gmail.com @@ -282,6 +290,7 @@ Gustavo Niemeyer Stephan Busemann Rafał Gałczyński + Matt Bogosian Christian Muirhead Berker Peksag James Lan @@ -316,9 +325,9 @@ Stefan Marr jiaaro Mads Kiilerich - Richard Lancaster opassembler.py Antony Lee + Jason Madden Yaroslav Fedevych Jim Hunziker Markus Unterwaditzer @@ -327,6 +336,7 @@ squeaky Zearin soareschen + Jonas Pfannschmidt Kurt Griffiths Mike Bayer Matthew Miller diff --git a/pypy/doc/tool/makecontributor.py b/pypy/doc/tool/makecontributor.py --- a/pypy/doc/tool/makecontributor.py +++ b/pypy/doc/tool/makecontributor.py @@ -72,6 +72,7 @@ 'Anton Gulenko':['anton gulenko', 'anton_gulenko'], 'Richard Lancaster':['richardlancaster'], 'William Leslie':['William ML Leslie'], + 'Spenser Bauman':['Spenser Andrew Bauman'], } alias_map = {} From pypy.commits at gmail.com Tue Feb 23 16:10:20 2016 From: pypy.commits at gmail.com (plan_rich) Date: Tue, 23 Feb 2016 13:10:20 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (remi, plan_rich) refactored free_reg into two lists (caller saved and callee saved list) Message-ID: <56cccabc.e6bbc20a.d2a3a.ffffa5c7@mx.google.com> Author: Richard Plangger Branch: fix-longevity Changeset: r82460:98153a101dda Date: 2016-02-23 19:09 +0100 http://bitbucket.org/pypy/pypy/changeset/98153a101dda/ Log: (remi, plan_rich) refactored free_reg into two lists (caller saved and callee saved list) diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -1,7 +1,7 @@ import os -from rpython.jit.metainterp.history import Const, REF, JitCellToken +from rpython.jit.metainterp.history import Const, Box, REF, JitCellToken from rpython.rlib.objectmodel import we_are_translated, specialize -from rpython.jit.metainterp.resoperation import rop, AbstractValue +from rpython.jit.metainterp.resoperation import rop from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.lltypesystem.lloperation import llop @@ -10,7 +10,7 @@ except ImportError: OrderedDict = dict # too bad -class TempVar(AbstractValue): +class TempBox(Box): def __init__(self): pass @@ -267,7 +267,6 @@ raise NotImplementedError("Purely abstract") class RegisterManager(object): - """ Class that keeps track of register allocations """ box_types = None # or a list of acceptable types @@ -276,10 +275,10 @@ save_around_call_regs = [] frame_reg = None - def __init__(self, live_ranges, frame_manager=None, assembler=None): + def __init__(self, longevity, frame_manager=None, assembler=None): self.free_regs = self.all_regs[:] self.free_regs.reverse() - self.live_ranges = live_ranges + self.longevity = longevity self.temp_boxes = [] if not we_are_translated(): self.reg_bindings = OrderedDict() @@ -293,19 +292,19 @@ def is_still_alive(self, v): # Check if 'v' is alive at the current position. # Return False if the last usage is strictly before. - return self.live_ranges.last_use(v) >= self.position + return self.longevity[v][1] >= self.position def stays_alive(self, v): # Check if 'v' stays alive after the current position. # Return False if the last usage is before or at position. - return self.live_ranges.last_use(v) > self.position + return self.longevity[v][1] > self.position def next_instruction(self, incr=1): self.position += incr def _check_type(self, v): if not we_are_translated() and self.box_types is not None: - assert isinstance(v, TempVar) or v.type in self.box_types + assert isinstance(v, TempBox) or v.type in self.box_types def possibly_free_var(self, v): """ If v is stored in a register and v is not used beyond the @@ -315,7 +314,7 @@ self._check_type(v) if isinstance(v, Const): return - if not self.live_ranges.exists(v) or self.live_ranges.last_use(v) <= self.position: + if v not in self.longevity or self.longevity[v][1] <= self.position: if v in self.reg_bindings: self.free_regs.append(self.reg_bindings[v]) del self.reg_bindings[v] @@ -347,9 +346,9 @@ else: assert len(self.reg_bindings) + len(self.free_regs) == len(self.all_regs) assert len(self.temp_boxes) == 0 - if self.live_ranges.longevity: + if self.longevity: for v in self.reg_bindings: - assert self.live_ranges.last_use(v) > self.position + assert self.longevity[v][1] > self.position def try_allocate_reg(self, v, selected_reg=None, need_lower_byte=False): """ Try to allocate a register, if we have one free. @@ -425,7 +424,7 @@ continue if need_lower_byte and reg in self.no_lower_byte_regs: continue - max_age = self.live_ranges.last_use(next) + max_age = self.longevity[next][1] if cur_max_age < max_age: cur_max_age = max_age candidate = next @@ -443,8 +442,8 @@ Will not spill a variable from 'forbidden_vars'. """ self._check_type(v) - if isinstance(v, TempVar): - self.live_ranges.new_live_range(v, self.position, self.position) + if isinstance(v, TempBox): + self.longevity[v] = (self.position, self.position) loc = self.try_allocate_reg(v, selected_reg, need_lower_byte=need_lower_byte) if loc: @@ -554,14 +553,13 @@ loc = self.force_allocate_reg(v, forbidden_vars) self.assembler.regalloc_mov(prev_loc, loc) assert v in self.reg_bindings - if self.live_ranges.last_use(v) > self.position: + if self.longevity[v][1] > self.position: # we need to find a new place for variable v and # store result in the same place loc = self.reg_bindings[v] del self.reg_bindings[v] - if self.frame_manager.get(v) is None or self.free_regs: + if self.frame_manager.get(v) is None: self._move_variable_away(v, loc) - self.reg_bindings[result_v] = loc else: self._reallocate_from_to(v, result_v) @@ -583,7 +581,7 @@ 1 (save all), or 2 (save default+PTRs). """ for v, reg in self.reg_bindings.items(): - if v not in force_store and self.live_ranges.last_use(v) <= self.position: + if v not in force_store and self.longevity[v][1] <= self.position: # variable dies del self.reg_bindings[v] self.free_regs.append(reg) @@ -635,78 +633,69 @@ locs = [] base_ofs = self.assembler.cpu.get_baseofs_of_frame_field() for box in inputargs: - assert not isinstance(box, Const) + assert isinstance(box, Box) loc = self.fm.get_new_loc(box) locs.append(loc.value - base_ofs) - if looptoken.compiled_loop_token is not None: # <- for tests + if looptoken.compiled_loop_token is not None: + # for tests looptoken.compiled_loop_token._ll_initial_locs = locs - def next_op_can_accept_cc(self, operations, i): - op = operations[i] - next_op = operations[i + 1] - opnum = next_op.getopnum() - if (opnum != rop.GUARD_TRUE and opnum != rop.GUARD_FALSE - and opnum != rop.COND_CALL): + def can_merge_with_next_guard(self, op, i, operations): + if (op.getopnum() == rop.CALL_MAY_FORCE or + op.getopnum() == rop.CALL_ASSEMBLER or + op.getopnum() == rop.CALL_RELEASE_GIL): + assert operations[i + 1].getopnum() == rop.GUARD_NOT_FORCED + return True + if not op.is_comparison(): + if op.is_ovf(): + if (operations[i + 1].getopnum() != rop.GUARD_NO_OVERFLOW and + operations[i + 1].getopnum() != rop.GUARD_OVERFLOW): + not_implemented("int_xxx_ovf not followed by " + "guard_(no)_overflow") + return True return False - if next_op.getarg(0) is not op: + if (operations[i + 1].getopnum() != rop.GUARD_TRUE and + operations[i + 1].getopnum() != rop.GUARD_FALSE): return False - if self.longevity[op][1] > i + 1: + if operations[i + 1].getarg(0) is not op.result: return False - if opnum != rop.COND_CALL: - if op in operations[i + 1].getfailargs(): - return False - else: - if op in operations[i + 1].getarglist()[1:]: - return False + if (self.longevity[op.result][1] > i + 1 or + op.result in operations[i + 1].getfailargs()): + return False return True - def locs_for_call_assembler(self, op): + def locs_for_call_assembler(self, op, guard_op): descr = op.getdescr() assert isinstance(descr, JitCellToken) if op.numargs() == 2: self.rm._sync_var(op.getarg(1)) return [self.loc(op.getarg(0)), self.fm.loc(op.getarg(1))] else: - assert op.numargs() == 1 return [self.loc(op.getarg(0))] -class LiveRanges(object): - def __init__(self, longevity, last_real_usage, dist_to_next_call): - self.longevity = longevity - self.last_real_usage = last_real_usage - self.dist_to_next_call = dist_to_next_call - - def exists(self, var): - return var in self.longevity - - def last_use(self, var): - return self.longevity[var][1] - - def new_live_range(self, var, start, end): - self.longevity[var] = (start, end) - -def compute_var_live_ranges(inputargs, operations): +def compute_vars_longevity(inputargs, operations): # compute a dictionary that maps variables to index in # operations that is a "last-time-seen" - # returns a Longevity object with longevity/useful. Non-useful variables are ones that + # returns a pair longevity/useful. Non-useful variables are ones that # never appear in the assembler or it does not matter if they appear on # stack or in registers. Main example is loop arguments that go # only to guard operations or to jump or to finish + produced = {} last_used = {} last_real_usage = {} - dist_to_next_call = [0] * len(operations) - last_call_pos = -1 for i in range(len(operations)-1, -1, -1): op = operations[i] - if op.type != 'v': - if op not in last_used and op.has_no_side_effect(): + if op.result: + if op.result not in last_used and op.has_no_side_effect(): continue + assert op.result not in produced + produced[op.result] = i opnum = op.getopnum() for j in range(op.numargs()): arg = op.getarg(j) - if isinstance(arg, Const): + if not isinstance(arg, Box): continue if arg not in last_used: last_used[arg] = i @@ -717,40 +706,26 @@ for arg in op.getfailargs(): if arg is None: # hole continue - assert not isinstance(arg, Const) + assert isinstance(arg, Box) if arg not in last_used: last_used[arg] = i - if op.is_call(): - last_call_pos = i - dist_to_next_call[i] = last_call_pos - i # longevity = {} - for i, arg in enumerate(operations): - if arg.type != 'v' and arg in last_used: - assert not isinstance(arg, Const) - assert i < last_used[arg] - longevity[arg] = (i, last_used[arg]) + for arg in produced: + if arg in last_used: + assert isinstance(arg, Box) + assert produced[arg] < last_used[arg] + longevity[arg] = (produced[arg], last_used[arg]) del last_used[arg] for arg in inputargs: - assert not isinstance(arg, Const) + assert isinstance(arg, Box) if arg not in last_used: longevity[arg] = (-1, -1) else: longevity[arg] = (0, last_used[arg]) del last_used[arg] assert len(last_used) == 0 - - if not we_are_translated(): - produced = {} - for arg in inputargs: - produced[arg] = None - for op in operations: - for arg in op.getarglist(): - if not isinstance(arg, Const): - assert arg in produced - produced[op] = None - - return LiveRanges(longevity, last_real_usage, dist_to_next_call) + return longevity, last_real_usage def is_comparison_or_ovf_op(opnum): from rpython.jit.metainterp.resoperation import opclasses @@ -759,7 +734,7 @@ # any instance field, we can use a fake object class Fake(cls): pass - op = Fake() + op = Fake(None) return op.is_comparison() or op.is_ovf() def valid_addressing_size(size): From pypy.commits at gmail.com Tue Feb 23 17:06:49 2016 From: pypy.commits at gmail.com (mjacob) Date: Tue, 23 Feb 2016 14:06:49 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge 1df21a900a84 Message-ID: <56ccd7f9.aa0ac20a.a89ce.366c@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82461:fa1b4e83847a Date: 2016-02-23 16:59 +0100 http://bitbucket.org/pypy/pypy/changeset/fa1b4e83847a/ Log: hg merge 1df21a900a84 diff --git a/pypy/module/cpyext/bytesobject.py b/pypy/module/cpyext/bytesobject.py --- a/pypy/module/cpyext/bytesobject.py +++ b/pypy/module/cpyext/bytesobject.py @@ -8,7 +8,6 @@ PyObject, PyObjectP, Py_DecRef, make_ref, from_ref, track_reference, make_typedescr, get_typedescr) - ## ## Implementation of PyBytesObject ## ================================ @@ -144,8 +143,6 @@ ref_str.c_buffer = rffi.str2charp(s) return ref_str.c_buffer -#_______________________________________________________________________ - @cpython_api([PyObject, rffi.CCHARPP, rffi.CArrayPtr(Py_ssize_t)], rffi.INT_real, error=-1) def PyBytes_AsStringAndSize(space, ref, buffer, length): if not PyBytes_Check(space, ref): @@ -228,9 +225,9 @@ if w_newpart is None or not PyBytes_Check(space, ref[0]) or \ not PyBytes_Check(space, w_newpart): - Py_DecRef(space, ref[0]) - ref[0] = lltype.nullptr(PyObject.TO) - return + Py_DecRef(space, ref[0]) + ref[0] = lltype.nullptr(PyObject.TO) + return w_str = from_ref(space, ref[0]) w_newstr = space.add(w_str, w_newpart) Py_DecRef(space, ref[0]) From pypy.commits at gmail.com Tue Feb 23 17:06:51 2016 From: pypy.commits at gmail.com (mjacob) Date: Tue, 23 Feb 2016 14:06:51 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default Message-ID: <56ccd7fb.2aacc20a.b189d.fffff93c@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82462:842942cabb34 Date: 2016-02-23 22:26 +0100 http://bitbucket.org/pypy/pypy/changeset/842942cabb34/ Log: hg merge default diff too long, truncating to 2000 out of 5375 lines diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py --- a/lib_pypy/_pypy_testcapi.py +++ b/lib_pypy/_pypy_testcapi.py @@ -7,6 +7,7 @@ content = fid.read() # from cffi's Verifier() key = '\x00'.join([sys.version[:3], content]) + key += 'cpyext-gc-support-2' # this branch requires recompilation! if sys.version_info >= (3,): key = key.encode('utf-8') k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) diff --git a/pypy/doc/discussion/rawrefcount.rst b/pypy/doc/discussion/rawrefcount.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/discussion/rawrefcount.rst @@ -0,0 +1,158 @@ +====================== +Rawrefcount and the GC +====================== + + +GC Interface +------------ + +"PyObject" is a raw structure with at least two fields, ob_refcnt and +ob_pypy_link. The ob_refcnt is the reference counter as used on +CPython. If the PyObject structure is linked to a live PyPy object, +its current address is stored in ob_pypy_link and ob_refcnt is bumped +by either the constant REFCNT_FROM_PYPY, or the constant +REFCNT_FROM_PYPY_LIGHT (== REFCNT_FROM_PYPY + SOME_HUGE_VALUE) +(to mean "light finalizer"). + +Most PyPy objects exist outside cpyext, and conversely in cpyext it is +possible that a lot of PyObjects exist without being seen by the rest +of PyPy. At the interface, however, we can "link" a PyPy object and a +PyObject. There are two kinds of link: + +rawrefcount.create_link_pypy(p, ob) + + Makes a link between an exising object gcref 'p' and a newly + allocated PyObject structure 'ob'. ob->ob_refcnt must be + initialized to either REFCNT_FROM_PYPY, or + REFCNT_FROM_PYPY_LIGHT. (The second case is an optimization: + when the GC finds the PyPy object and PyObject no longer + referenced, it can just free() the PyObject.) + +rawrefcount.create_link_pyobj(p, ob) + + Makes a link from an existing PyObject structure 'ob' to a newly + allocated W_CPyExtPlaceHolderObject 'p'. You must also add + REFCNT_FROM_PYPY to ob->ob_refcnt. For cases where the PyObject + contains all the data, and the PyPy object is just a proxy. The + W_CPyExtPlaceHolderObject should have only a field that contains + the address of the PyObject, but that's outside the scope of the + GC. + +rawrefcount.from_obj(p) + + If there is a link from object 'p' made with create_link_pypy(), + returns the corresponding 'ob'. Otherwise, returns NULL. + +rawrefcount.to_obj(Class, ob) + + Returns ob->ob_pypy_link, cast to an instance of 'Class'. + + +Collection logic +---------------- + +Objects existing purely on the C side have ob->ob_pypy_link == 0; +these are purely reference counted. On the other hand, if +ob->ob_pypy_link != 0, then ob->ob_refcnt is at least REFCNT_FROM_PYPY +and the object is part of a "link". + +The idea is that links whose 'p' is not reachable from other PyPy +objects *and* whose 'ob->ob_refcnt' is REFCNT_FROM_PYPY or +REFCNT_FROM_PYPY_LIGHT are the ones who die. But it is more messy +because PyObjects still (usually) need to have a tp_dealloc called, +and this cannot occur immediately (and can do random things like +accessing other references this object points to, or resurrecting the +object). + +Let P = list of links created with rawrefcount.create_link_pypy() +and O = list of links created with rawrefcount.create_link_pyobj(). +The PyPy objects in the list O are all W_CPyExtPlaceHolderObject: all +the data is in the PyObjects, and all outsite references (if any) are +in C, as "PyObject *" fields. + +So, during the collection we do this about P links: + + for (p, ob) in P: + if ob->ob_refcnt != REFCNT_FROM_PYPY + and ob->ob_refcnt != REFCNT_FROM_PYPY_LIGHT: + mark 'p' as surviving, as well as all its dependencies + +At the end of the collection, the P and O links are both handled like +this: + + for (p, ob) in P + O: + if p is not surviving: # even if 'ob' might be surviving + unlink p and ob + if ob->ob_refcnt == REFCNT_FROM_PYPY_LIGHT: + free(ob) + elif ob->ob_refcnt > REFCNT_FROM_PYPY_LIGHT: + ob->ob_refcnt -= REFCNT_FROM_PYPY_LIGHT + else: + ob->ob_refcnt -= REFCNT_FROM_PYPY + if ob->ob_refcnt == 0: + invoke _Py_Dealloc(ob) later, outside the GC + + +GC Implementation +----------------- + +We need two copies of both the P list and O list, for young or old +objects. All four lists can be regular AddressLists of 'ob' objects. + +We also need an AddressDict mapping 'p' to 'ob' for all links in the P +list, and update it when PyPy objects move. + + +Further notes +------------- + +XXX +XXX the rest is the ideal world, but as a first step, we'll look +XXX for the minimal tweaks needed to adapt the existing cpyext +XXX + +For objects that are opaque in CPython, like , we always create +a PyPy object, and then when needed we make an empty PyObject and +attach it with create_link_pypy()/REFCNT_FROM_PYPY_LIGHT. + +For and objects, the corresponding PyObjects contain a +"long" or "double" field too. We link them with create_link_pypy() +and we can use REFCNT_FROM_PYPY_LIGHT too: 'tp_dealloc' doesn't +need to be called, and instead just calling free() is fine. + +For objects, we need both a PyPy and a PyObject side. These +are made with create_link_pypy()/REFCNT_FROM_PYPY. + +For custom PyXxxObjects allocated from the C extension module, we +need create_link_pyobj(). + +For or objects coming from PyPy, we use +create_link_pypy()/REFCNT_FROM_PYPY_LIGHT with a PyObject +preallocated with the size of the string. We copy the string +lazily into that area if PyString_AS_STRING() is called. + +For , , or objects in the C extension +module, we first allocate it as only a PyObject, which supports +mutation of the data from C, like CPython. When it is exported to +PyPy we could make a W_CPyExtPlaceHolderObject with +create_link_pyobj(). + +For objects coming from PyPy, if they are not specialized, +then the PyPy side holds a regular reference to the items. Then we +can allocate a PyTupleObject and store in it borrowed PyObject +pointers to the items. Such a case is created with +create_link_pypy()/REFCNT_FROM_PYPY_LIGHT. If it is specialized, +then it doesn't work because the items are created just-in-time on the +PyPy side. In this case, the PyTupleObject needs to hold real +references to the PyObject items, and we use create_link_pypy()/ +REFCNT_FROM_PYPY. In all cases, we have a C array of PyObjects +that we can directly return from PySequence_Fast_ITEMS, PyTuple_ITEMS, +PyTuple_GetItem, and so on. + +For objects coming from PyPy, we can use a cpyext list +strategy. The list turns into a PyListObject, as if it had been +allocated from C in the first place. The special strategy can hold +(only) a direct reference to the PyListObject, and we can use either +create_link_pyobj() or create_link_pypy() (to be decided). +PySequence_Fast_ITEMS then works for lists too, and PyList_GetItem +can return a borrowed reference, and so on. diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -51,7 +51,7 @@ class W_Root(object): """This is the abstract root class of all wrapped objects that live in a 'normal' object space like StdObjSpace.""" - __slots__ = () + __slots__ = ('__weakref__',) user_overridden_class = False def getdict(self, space): diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -248,7 +248,7 @@ def user_setup(self, space, w_subtype): self.space = space self.w__class__ = w_subtype - self.user_setup_slots(w_subtype.nslots) + self.user_setup_slots(w_subtype.layout.nslots) def user_setup_slots(self, nslots): assert nslots == 0 diff --git a/pypy/module/cpyext/__init__.py b/pypy/module/cpyext/__init__.py --- a/pypy/module/cpyext/__init__.py +++ b/pypy/module/cpyext/__init__.py @@ -58,7 +58,6 @@ import pypy.module.cpyext.funcobject import pypy.module.cpyext.frameobject import pypy.module.cpyext.classobject -import pypy.module.cpyext.pypyintf import pypy.module.cpyext.exception import pypy.module.cpyext.memoryobject import pypy.module.cpyext.codecs diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -9,7 +9,7 @@ from rpython.rtyper.tool import rffi_platform from rpython.rtyper.lltypesystem import ll2ctypes from rpython.rtyper.annlowlevel import llhelper -from rpython.rlib.objectmodel import we_are_translated +from rpython.rlib.objectmodel import we_are_translated, keepalive_until_here from rpython.translator import cdir from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.translator.gensupp import NameManager @@ -29,13 +29,13 @@ from rpython.rlib.rposix import is_valid_fd, validate_fd from rpython.rlib.unroll import unrolling_iterable from rpython.rlib.objectmodel import specialize -from rpython.rlib.exports import export_struct from pypy.module import exceptions from pypy.module.exceptions import interp_exceptions # CPython 2.4 compatibility from py.builtin import BaseException from rpython.tool.sourcetools import func_with_new_name from rpython.rtyper.lltypesystem.lloperation import llop +from rpython.rlib import rawrefcount DEBUG_WRAPPER = True @@ -194,7 +194,7 @@ class ApiFunction: def __init__(self, argtypes, restype, callable, error=_NOT_SPECIFIED, - c_name=None, gil=None): + c_name=None, gil=None, result_borrowed=False): self.argtypes = argtypes self.restype = restype self.functype = lltype.Ptr(lltype.FuncType(argtypes, restype)) @@ -211,6 +211,11 @@ self.argnames = argnames[1:] assert len(self.argnames) == len(self.argtypes) self.gil = gil + self.result_borrowed = result_borrowed + # + def get_llhelper(space): + return llhelper(self.functype, self.get_wrapper(space)) + self.get_llhelper = get_llhelper def __repr__(self): return "" % (self.callable.__name__,) @@ -218,13 +223,6 @@ def _freeze_(self): return True - def get_llhelper(self, space): - llh = getattr(self, '_llhelper', None) - if llh is None: - llh = llhelper(self.functype, self.get_wrapper(space)) - self._llhelper = llh - return llh - @specialize.memo() def get_wrapper(self, space): wrapper = getattr(self, '_wrapper', None) @@ -237,7 +235,7 @@ return wrapper def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header='pypy_decl.h', - gil=None): + gil=None, result_borrowed=False): """ Declares a function to be exported. - `argtypes`, `restype` are lltypes and describe the function signature. @@ -266,13 +264,15 @@ rffi.cast(restype, 0) == 0) def decorate(func): + func._always_inline_ = 'try' func_name = func.func_name if header is not None: c_name = None else: c_name = func_name api_function = ApiFunction(argtypes, restype, func, error, - c_name=c_name, gil=gil) + c_name=c_name, gil=gil, + result_borrowed=result_borrowed) func.api_func = api_function if header is not None: @@ -283,6 +283,10 @@ raise ValueError("function %s has no return value for exceptions" % func) def make_unwrapper(catch_exception): + # ZZZ is this whole logic really needed??? It seems to be only + # for RPython code calling PyXxx() functions directly. I would + # think that usually directly calling the function is clean + # enough now names = api_function.argnames types_names_enum_ui = unrolling_iterable(enumerate( zip(api_function.argtypes, @@ -290,56 +294,58 @@ @specialize.ll() def unwrapper(space, *args): - from pypy.module.cpyext.pyobject import Py_DecRef - from pypy.module.cpyext.pyobject import make_ref, from_ref - from pypy.module.cpyext.pyobject import Reference + from pypy.module.cpyext.pyobject import Py_DecRef, is_pyobj + from pypy.module.cpyext.pyobject import from_ref, as_pyobj newargs = () - to_decref = [] + keepalives = () assert len(args) == len(api_function.argtypes) for i, (ARG, is_wrapped) in types_names_enum_ui: input_arg = args[i] if is_PyObject(ARG) and not is_wrapped: - # build a reference - if input_arg is None: - arg = lltype.nullptr(PyObject.TO) - elif isinstance(input_arg, W_Root): - ref = make_ref(space, input_arg) - to_decref.append(ref) - arg = rffi.cast(ARG, ref) + # build a 'PyObject *' (not holding a reference) + if not is_pyobj(input_arg): + keepalives += (input_arg,) + arg = rffi.cast(ARG, as_pyobj(space, input_arg)) + else: + arg = rffi.cast(ARG, input_arg) + elif is_PyObject(ARG) and is_wrapped: + # build a W_Root, possibly from a 'PyObject *' + if is_pyobj(input_arg): + arg = from_ref(space, input_arg) else: arg = input_arg - elif is_PyObject(ARG) and is_wrapped: - # convert to a wrapped object - if input_arg is None: - arg = input_arg - elif isinstance(input_arg, W_Root): - arg = input_arg - else: - try: - arg = from_ref(space, - rffi.cast(PyObject, input_arg)) - except TypeError, e: - err = OperationError(space.w_TypeError, - space.wrap( - "could not cast arg to PyObject")) - if not catch_exception: - raise err - state = space.fromcache(State) - state.set_exception(err) - if is_PyObject(restype): - return None - else: - return api_function.error_value + + ## ZZZ: for is_pyobj: + ## try: + ## arg = from_ref(space, + ## rffi.cast(PyObject, input_arg)) + ## except TypeError, e: + ## err = OperationError(space.w_TypeError, + ## space.wrap( + ## "could not cast arg to PyObject")) + ## if not catch_exception: + ## raise err + ## state = space.fromcache(State) + ## state.set_exception(err) + ## if is_PyObject(restype): + ## return None + ## else: + ## return api_function.error_value else: - # convert to a wrapped object + # arg is not declared as PyObject, no magic arg = input_arg newargs += (arg, ) - try: + if not catch_exception: + try: + res = func(space, *newargs) + finally: + keepalive_until_here(*keepalives) + else: + # non-rpython variant + assert not we_are_translated() try: res = func(space, *newargs) except OperationError, e: - if not catch_exception: - raise if not hasattr(api_function, "error_value"): raise state = space.fromcache(State) @@ -348,21 +354,13 @@ return None else: return api_function.error_value - if not we_are_translated(): - got_integer = isinstance(res, (int, long, float)) - assert got_integer == expect_integer,'got %r not integer' % res - if res is None: - return None - elif isinstance(res, Reference): - return res.get_wrapped(space) - else: - return res - finally: - for arg in to_decref: - Py_DecRef(space, arg) + # 'keepalives' is alive here (it's not rpython) + got_integer = isinstance(res, (int, long, float)) + assert got_integer == expect_integer, ( + 'got %r not integer' % (res,)) + return res unwrapper.func = func unwrapper.api_func = api_function - unwrapper._always_inline_ = 'try' return unwrapper unwrapper_catch = make_unwrapper(True) @@ -501,7 +499,7 @@ GLOBALS['%s#' % (cpyname, )] = ('PyTypeObject*', pypyexpr) for cpyname in '''PyMethodObject PyListObject PyLongObject - PyDictObject PyTupleObject'''.split(): + PyDictObject'''.split(): FORWARD_DECLS.append('typedef struct { PyObject_HEAD } %s' % (cpyname, )) build_exported_objects() @@ -512,14 +510,16 @@ return {"PyObject*": PyObject, "PyTypeObject*": PyTypeObjectPtr, "PyDateTime_CAPI*": lltype.Ptr(PyDateTime_CAPI)}[ctype] +# Note: as a special case, "PyObject" is the pointer type in RPython, +# corresponding to "PyObject *" in C. We do that only for PyObject. +# For example, "PyTypeObject" is the struct type even in RPython. PyTypeObject = lltype.ForwardReference() PyTypeObjectPtr = lltype.Ptr(PyTypeObject) -# It is important that these PyObjects are allocated in a raw fashion -# Thus we cannot save a forward pointer to the wrapped object -# So we need a forward and backward mapping in our State instance PyObjectStruct = lltype.ForwardReference() PyObject = lltype.Ptr(PyObjectStruct) -PyObjectFields = (("ob_refcnt", lltype.Signed), ("ob_type", PyTypeObjectPtr)) +PyObjectFields = (("ob_refcnt", lltype.Signed), + ("ob_pypy_link", lltype.Signed), + ("ob_type", PyTypeObjectPtr)) PyVarObjectFields = PyObjectFields + (("ob_size", Py_ssize_t), ) cpython_struct('PyObject', PyObjectFields, PyObjectStruct) PyVarObjectStruct = cpython_struct("PyVarObject", PyVarObjectFields) @@ -616,8 +616,8 @@ @specialize.ll() def wrapper(*args): - from pypy.module.cpyext.pyobject import make_ref, from_ref - from pypy.module.cpyext.pyobject import Reference + from pypy.module.cpyext.pyobject import make_ref, from_ref, is_pyobj + from pypy.module.cpyext.pyobject import as_pyobj # we hope that malloc removal removes the newtuple() that is # inserted exactly here by the varargs specializer if gil_acquire: @@ -626,6 +626,7 @@ llop.gc_stack_bottom(lltype.Void) # marker for trackgcroot.py retval = fatal_value boxed_args = () + tb = None try: if not we_are_translated() and DEBUG_WRAPPER: print >>sys.stderr, callable, @@ -633,10 +634,8 @@ for i, (typ, is_wrapped) in argtypes_enum_ui: arg = args[i] if is_PyObject(typ) and is_wrapped: - if arg: - arg_conv = from_ref(space, rffi.cast(PyObject, arg)) - else: - arg_conv = None + assert is_pyobj(arg) + arg_conv = from_ref(space, rffi.cast(PyObject, arg)) else: arg_conv = arg boxed_args += (arg_conv, ) @@ -651,6 +650,7 @@ except BaseException, e: failed = True if not we_are_translated(): + tb = sys.exc_info()[2] message = repr(e) import traceback traceback.print_exc() @@ -669,29 +669,34 @@ retval = error_value elif is_PyObject(callable.api_func.restype): - if result is None: - retval = rffi.cast(callable.api_func.restype, - make_ref(space, None)) - elif isinstance(result, Reference): - retval = result.get_ref(space) - elif not rffi._isllptr(result): - retval = rffi.cast(callable.api_func.restype, - make_ref(space, result)) + if is_pyobj(result): + retval = result else: - retval = result + if result is not None: + if callable.api_func.result_borrowed: + retval = as_pyobj(space, result) + else: + retval = make_ref(space, result) + retval = rffi.cast(callable.api_func.restype, retval) + else: + retval = lltype.nullptr(PyObject.TO) elif callable.api_func.restype is not lltype.Void: retval = rffi.cast(callable.api_func.restype, result) except Exception, e: print 'Fatal error in cpyext, CPython compatibility layer, calling', callable.__name__ print 'Either report a bug or consider not using this particular extension' if not we_are_translated(): + if tb is None: + tb = sys.exc_info()[2] import traceback traceback.print_exc() - print str(e) + if sys.stdout == sys.__stdout__: + import pdb; pdb.post_mortem(tb) # we can't do much here, since we're in ctypes, swallow else: print str(e) pypy_debug_catch_fatal_exception() + assert False rffi.stackcounter.stacks_counter -= 1 if gil_release: rgil.release() @@ -825,6 +830,19 @@ outputfilename=str(udir / "module_cache" / "pypyapi")) modulename = py.path.local(eci.libraries[-1]) + def dealloc_trigger(): + from pypy.module.cpyext.pyobject import _Py_Dealloc + print 'dealloc_trigger...' + while True: + ob = rawrefcount.next_dead(PyObject) + if not ob: + break + print ob + _Py_Dealloc(space, ob) + print 'dealloc_trigger DONE' + return "RETRY" + rawrefcount.init(dealloc_trigger) + run_bootstrap_functions(space) # load the bridge, and init structure @@ -834,9 +852,9 @@ space.fromcache(State).install_dll(eci) # populate static data - builder = StaticObjectBuilder(space) + builder = space.fromcache(StaticObjectBuilder) for name, (typ, expr) in GLOBALS.iteritems(): - from pypy.module import cpyext + from pypy.module import cpyext # for the eval() below w_obj = eval(expr) if name.endswith('#'): name = name[:-1] @@ -892,27 +910,44 @@ class StaticObjectBuilder: def __init__(self, space): self.space = space - self.to_attach = [] + self.static_pyobjs = [] + self.static_objs_w = [] + self.cpyext_type_init = None + # + # add a "method" that is overridden in setup_library() + # ('self.static_pyobjs' is completely ignored in that case) + self.get_static_pyobjs = lambda: self.static_pyobjs def prepare(self, py_obj, w_obj): - from pypy.module.cpyext.pyobject import track_reference - py_obj.c_ob_refcnt = 1 - track_reference(self.space, py_obj, w_obj) - self.to_attach.append((py_obj, w_obj)) + "NOT_RPYTHON" + if py_obj: + py_obj.c_ob_refcnt = 1 # 1 for kept immortal + self.static_pyobjs.append(py_obj) + self.static_objs_w.append(w_obj) def attach_all(self): + # this is RPython, called once in pypy-c when it imports cpyext from pypy.module.cpyext.pyobject import get_typedescr, make_ref from pypy.module.cpyext.typeobject import finish_type_1, finish_type_2 + from pypy.module.cpyext.pyobject import track_reference + # space = self.space - space._cpyext_type_init = [] - for py_obj, w_obj in self.to_attach: + static_pyobjs = self.get_static_pyobjs() + static_objs_w = self.static_objs_w + for i in range(len(static_objs_w)): + track_reference(space, static_pyobjs[i], static_objs_w[i]) + # + self.cpyext_type_init = [] + for i in range(len(static_objs_w)): + py_obj = static_pyobjs[i] + w_obj = static_objs_w[i] w_type = space.type(w_obj) - typedescr = get_typedescr(w_type.instancetypedef) + typedescr = get_typedescr(w_type.layout.typedef) py_obj.c_ob_type = rffi.cast(PyTypeObjectPtr, make_ref(space, w_type)) typedescr.attach(space, py_obj, w_obj) - cpyext_type_init = space._cpyext_type_init - del space._cpyext_type_init + cpyext_type_init = self.cpyext_type_init + self.cpyext_type_init = None for pto, w_type in cpyext_type_init: finish_type_1(space, pto) finish_type_2(space, pto, w_type) @@ -1064,7 +1099,7 @@ if name.endswith('#'): structs.append('%s %s;' % (typ[:-1], name[:-1])) elif name.startswith('PyExc_'): - structs.append('extern PyTypeObject _%s;' % (name,)) + structs.append('PyTypeObject _%s;' % (name,)) structs.append('PyObject* %s = (PyObject*)&_%s;' % (name, name)) elif typ == 'PyDateTime_CAPI*': structs.append('%s %s = NULL;' % (typ, name)) @@ -1116,10 +1151,8 @@ def setup_library(space): "NOT_RPYTHON" - from pypy.module.cpyext.pyobject import make_ref use_micronumpy = setup_micronumpy(space) - - export_symbols = list(FUNCTIONS) + SYMBOLS_C + list(GLOBALS) + export_symbols = sorted(FUNCTIONS) + sorted(SYMBOLS_C) + sorted(GLOBALS) from rpython.translator.c.database import LowLevelDatabase db = LowLevelDatabase() @@ -1135,41 +1168,37 @@ run_bootstrap_functions(space) setup_va_functions(eci) - from pypy.module import cpyext # for eval() below - - # Set up the types. Needs a special case, because of the - # immediate cycle involving 'c_ob_type', and because we don't - # want these types to be Py_TPFLAGS_HEAPTYPE. - static_types = {} - for name, (typ, expr) in GLOBALS.items(): - if typ == 'PyTypeObject*': - pto = lltype.malloc(PyTypeObject, immortal=True, - zero=True, flavor='raw') - pto.c_ob_refcnt = 1 - pto.c_tp_basicsize = -1 - static_types[name] = pto - builder = StaticObjectBuilder(space) - for name, pto in static_types.items(): - pto.c_ob_type = static_types['PyType_Type#'] - w_type = eval(GLOBALS[name][1]) - builder.prepare(rffi.cast(PyObject, pto), w_type) - builder.attach_all() - - # populate static data - for name, (typ, expr) in GLOBALS.iteritems(): - name = name.replace("#", "") - if name.startswith('PyExc_'): + # emit uninitialized static data + builder = space.fromcache(StaticObjectBuilder) + lines = ['PyObject *pypy_static_pyobjs[] = {\n'] + include_lines = ['RPY_EXTERN PyObject *pypy_static_pyobjs[];\n'] + for name, (typ, expr) in sorted(GLOBALS.items()): + if name.endswith('#'): + assert typ in ('PyObject*', 'PyTypeObject*', 'PyIntObject*') + typ, name = typ[:-1], name[:-1] + elif name.startswith('PyExc_'): + typ = 'PyTypeObject' name = '_' + name - w_obj = eval(expr) - if typ in ('PyObject*', 'PyTypeObject*'): - struct_ptr = make_ref(space, w_obj) elif typ == 'PyDateTime_CAPI*': continue else: assert False, "Unknown static data: %s %s" % (typ, name) - struct = rffi.cast(get_structtype_for_ctype(typ), struct_ptr)._obj - struct._compilation_info = eci - export_struct(name, struct) + + from pypy.module import cpyext # for the eval() below + w_obj = eval(expr) + builder.prepare(None, w_obj) + lines.append('\t(PyObject *)&%s,\n' % (name,)) + include_lines.append('RPY_EXPORTED %s %s;\n' % (typ, name)) + + lines.append('};\n') + eci2 = CConfig._compilation_info_.merge(ExternalCompilationInfo( + separate_module_sources = [''.join(lines)], + post_include_bits = [''.join(include_lines)], + )) + # override this method to return a pointer to this C array directly + builder.get_static_pyobjs = rffi.CExternVariable( + PyObjectP, 'pypy_static_pyobjs', eci2, c_type='PyObject **', + getter_only=True, declare_as_extern=False) for name, func in FUNCTIONS.iteritems(): newname = mangle_name('PyPy', name) or name @@ -1180,6 +1209,10 @@ trunk_include = pypydir.dirpath() / 'include' copy_header_files(trunk_include, use_micronumpy) +def init_static_data_translated(space): + builder = space.fromcache(StaticObjectBuilder) + builder.attach_all() + def _load_from_cffi(space, name, path, initptr): from pypy.module._cffi_backend import cffi1_module cffi1_module.load_cffi1_module(space, name, path, initptr) @@ -1262,22 +1295,18 @@ @specialize.ll() def generic_cpy_call(space, func, *args): FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, True, False)(space, func, *args) - - at specialize.ll() -def generic_cpy_call_dont_decref(space, func, *args): - FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, False, False)(space, func, *args) + return make_generic_cpy_call(FT, False)(space, func, *args) @specialize.ll() def generic_cpy_call_expect_null(space, func, *args): FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, True, True)(space, func, *args) + return make_generic_cpy_call(FT, True)(space, func, *args) @specialize.memo() -def make_generic_cpy_call(FT, decref_args, expect_null): +def make_generic_cpy_call(FT, expect_null): from pypy.module.cpyext.pyobject import make_ref, from_ref, Py_DecRef - from pypy.module.cpyext.pyobject import RefcountState + from pypy.module.cpyext.pyobject import is_pyobj, as_pyobj + from pypy.module.cpyext.pyobject import get_w_obj_and_decref from pypy.module.cpyext.pyerrors import PyErr_Occurred unrolling_arg_types = unrolling_iterable(enumerate(FT.ARGS)) RESULT_TYPE = FT.RESULT @@ -1305,65 +1334,49 @@ @specialize.ll() def generic_cpy_call(space, func, *args): boxed_args = () - to_decref = [] + keepalives = () assert len(args) == len(FT.ARGS) for i, ARG in unrolling_arg_types: arg = args[i] if is_PyObject(ARG): - if arg is None: - boxed_args += (lltype.nullptr(PyObject.TO),) - elif isinstance(arg, W_Root): - ref = make_ref(space, arg) - boxed_args += (ref,) - if decref_args: - to_decref.append(ref) - else: - boxed_args += (arg,) - else: - boxed_args += (arg,) + if not is_pyobj(arg): + keepalives += (arg,) + arg = as_pyobj(space, arg) + boxed_args += (arg,) try: - # create a new container for borrowed references - state = space.fromcache(RefcountState) - old_container = state.swap_borrow_container(None) - try: - # Call the function - result = call_external_function(func, *boxed_args) - finally: - state.swap_borrow_container(old_container) + # Call the function + result = call_external_function(func, *boxed_args) + finally: + keepalive_until_here(*keepalives) - if is_PyObject(RESULT_TYPE): - if result is None: - ret = result - elif isinstance(result, W_Root): - ret = result + if is_PyObject(RESULT_TYPE): + if not is_pyobj(result): + ret = result + else: + # The object reference returned from a C function + # that is called from Python must be an owned reference + # - ownership is transferred from the function to its caller. + if result: + ret = get_w_obj_and_decref(space, result) else: - ret = from_ref(space, result) - # The object reference returned from a C function - # that is called from Python must be an owned reference - # - ownership is transferred from the function to its caller. - if result: - Py_DecRef(space, result) + ret = None - # Check for exception consistency - has_error = PyErr_Occurred(space) is not None - has_result = ret is not None - if has_error and has_result: - raise OperationError(space.w_SystemError, space.wrap( - "An exception was set, but function returned a value")) - elif not expect_null and not has_error and not has_result: - raise OperationError(space.w_SystemError, space.wrap( - "Function returned a NULL result without setting an exception")) + # Check for exception consistency + has_error = PyErr_Occurred(space) is not None + has_result = ret is not None + if has_error and has_result: + raise OperationError(space.w_SystemError, space.wrap( + "An exception was set, but function returned a value")) + elif not expect_null and not has_error and not has_result: + raise OperationError(space.w_SystemError, space.wrap( + "Function returned a NULL result without setting an exception")) - if has_error: - state = space.fromcache(State) - state.check_and_raise_exception() + if has_error: + state = space.fromcache(State) + state.check_and_raise_exception() - return ret - return result - finally: - if decref_args: - for ref in to_decref: - Py_DecRef(space, ref) + return ret + return result + return generic_cpy_call - diff --git a/pypy/module/cpyext/bytesobject.py b/pypy/module/cpyext/bytesobject.py --- a/pypy/module/cpyext/bytesobject.py +++ b/pypy/module/cpyext/bytesobject.py @@ -59,7 +59,7 @@ @bootstrap_function def init_bytesobject(space): "Type description of PyBytesObject" - make_typedescr(space.w_str.instancetypedef, + make_typedescr(space.w_str.layout.typedef, basestruct=PyBytesObject.TO, attach=bytes_attach, dealloc=bytes_dealloc, @@ -69,11 +69,11 @@ def new_empty_str(space, length): """ - Allocates a PyBytesObject and its buffer, but without a corresponding + Allocate a PyBytesObject and its buffer, but without a corresponding interpreter object. The buffer may be mutated, until bytes_realize() is - called. + called. Refcount of the result is 1. """ - typedescr = get_typedescr(space.w_bytes.instancetypedef) + typedescr = get_typedescr(space.w_bytes.layout.typedef) py_obj = typedescr.allocate(space, space.w_bytes) py_str = rffi.cast(PyBytesObject, py_obj) diff --git a/pypy/module/cpyext/complexobject.py b/pypy/module/cpyext/complexobject.py --- a/pypy/module/cpyext/complexobject.py +++ b/pypy/module/cpyext/complexobject.py @@ -43,7 +43,7 @@ # lltype does not handle functions returning a structure. This implements a # helper function, which takes as argument a reference to the return value. - at cpython_api([PyObject, Py_complex_ptr], lltype.Void) + at cpython_api([PyObject, Py_complex_ptr], rffi.INT_real, error=-1) def _PyComplex_AsCComplex(space, w_obj, result): """Return the Py_complex value of the complex number op. @@ -60,7 +60,7 @@ # if the above did not work, interpret obj as a float giving the # real part of the result, and fill in the imaginary part as 0. result.c_real = PyFloat_AsDouble(space, w_obj) # -1 on failure - return + return 0 if not PyComplex_Check(space, w_obj): raise OperationError(space.w_TypeError, space.wrap( @@ -69,3 +69,4 @@ assert isinstance(w_obj, W_ComplexObject) result.c_real = w_obj.realval result.c_imag = w_obj.imagval + return 0 diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -2,8 +2,7 @@ from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, build_type_checkers, Py_ssize_t, Py_ssize_tP, CONST_STRING) -from pypy.module.cpyext.pyobject import PyObject, PyObjectP, borrow_from -from pypy.module.cpyext.pyobject import RefcountState +from pypy.module.cpyext.pyobject import PyObject, PyObjectP, as_pyobj from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.interpreter.error import OperationError from rpython.rlib.objectmodel import specialize @@ -14,13 +13,17 @@ PyDict_Check, PyDict_CheckExact = build_type_checkers("Dict") - at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL) + at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL, + result_borrowed=True) def PyDict_GetItem(space, w_dict, w_key): try: w_res = space.getitem(w_dict, w_key) except: return None - return borrow_from(w_dict, w_res) + # NOTE: this works so far because all our dict strategies store + # *values* as full objects, which stay alive as long as the dict is + # alive and not modified. So we can return a borrowed ref. + return w_res @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) def PyDict_SetItem(space, w_dict, w_key, w_obj): @@ -47,7 +50,8 @@ else: PyErr_BadInternalCall(space) - at cpython_api([PyObject, CONST_STRING], PyObject, error=CANNOT_FAIL) + at cpython_api([PyObject, CONST_STRING], PyObject, error=CANNOT_FAIL, + result_borrowed=True) def PyDict_GetItemString(space, w_dict, key): """This is the same as PyDict_GetItem(), but key is specified as a char*, rather than a PyObject*.""" @@ -55,9 +59,10 @@ w_res = space.finditem_str(w_dict, rffi.charp2str(key)) except: w_res = None - if w_res is None: - return None - return borrow_from(w_dict, w_res) + # NOTE: this works so far because all our dict strategies store + # *values* as full objects, which stay alive as long as the dict is + # alive and not modified. So we can return a borrowed ref. + return w_res @cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=-1) def PyDict_DelItemString(space, w_dict, key_ptr): @@ -170,10 +175,13 @@ if w_dict is None: return 0 - # Note: this is not efficient. Storing an iterator would probably + # XXX XXX PyDict_Next is not efficient. Storing an iterator would probably # work, but we can't work out how to not leak it if iteration does - # not complete. + # not complete. Alternatively, we could add some RPython-only + # dict-iterator method to move forward by N steps. + w_dict.ensure_object_strategy() # make sure both keys and values can + # be borrwed try: w_iter = space.iter(space.call_method(space.w_dict, "items", w_dict)) pos = ppos[0] @@ -183,11 +191,10 @@ w_item = space.next(w_iter) w_key, w_value = space.fixedview(w_item, 2) - state = space.fromcache(RefcountState) if pkey: - pkey[0] = state.make_borrowed(w_dict, w_key) + pkey[0] = as_pyobj(space, w_key) if pvalue: - pvalue[0] = state.make_borrowed(w_dict, w_value) + pvalue[0] = as_pyobj(space, w_value) ppos[0] += 1 except OperationError, e: if not e.match(space, space.w_StopIteration): diff --git a/pypy/module/cpyext/eval.py b/pypy/module/cpyext/eval.py --- a/pypy/module/cpyext/eval.py +++ b/pypy/module/cpyext/eval.py @@ -4,7 +4,7 @@ from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, fread, feof, Py_ssize_tP, cpython_struct, is_valid_fp) -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.module.cpyext.pyerrors import PyErr_SetFromErrno from pypy.module.cpyext.funcobject import PyCodeObject from pypy.module.__builtin__ import compiling @@ -23,7 +23,7 @@ def PyEval_CallObjectWithKeywords(space, w_obj, w_arg, w_kwds): return space.call(w_obj, w_arg, w_kwds) - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyEval_GetBuiltins(space): """Return a dictionary of the builtins in the current execution frame, or the interpreter of the thread state if no frame is @@ -36,25 +36,25 @@ w_builtins = w_builtins.getdict(space) else: w_builtins = space.builtin.getdict(space) - return borrow_from(None, w_builtins) + return w_builtins # borrowed ref in all cases - at cpython_api([], PyObject, error=CANNOT_FAIL) + at cpython_api([], PyObject, error=CANNOT_FAIL, result_borrowed=True) def PyEval_GetLocals(space): """Return a dictionary of the local variables in the current execution frame, or NULL if no frame is currently executing.""" caller = space.getexecutioncontext().gettopframe_nohidden() if caller is None: return None - return borrow_from(None, caller.getdictscope()) + return caller.getdictscope() # borrowed ref - at cpython_api([], PyObject, error=CANNOT_FAIL) + at cpython_api([], PyObject, error=CANNOT_FAIL, result_borrowed=True) def PyEval_GetGlobals(space): """Return a dictionary of the global variables in the current execution frame, or NULL if no frame is currently executing.""" caller = space.getexecutioncontext().gettopframe_nohidden() if caller is None: return None - return borrow_from(None, caller.get_w_globals()) + return caller.get_w_globals() # borrowed ref @cpython_api([PyCodeObject, PyObject, PyObject], PyObject) def PyEval_EvalCode(space, w_code, w_globals, w_locals): diff --git a/pypy/module/cpyext/funcobject.py b/pypy/module/cpyext/funcobject.py --- a/pypy/module/cpyext/funcobject.py +++ b/pypy/module/cpyext/funcobject.py @@ -3,7 +3,7 @@ PyObjectFields, generic_cpy_call, CONST_STRING, CANNOT_FAIL, Py_ssize_t, cpython_api, bootstrap_function, cpython_struct, build_type_checkers) from pypy.module.cpyext.pyobject import ( - PyObject, make_ref, from_ref, Py_DecRef, make_typedescr, borrow_from) + PyObject, make_ref, from_ref, Py_DecRef, make_typedescr) from rpython.rlib.unroll import unrolling_iterable from pypy.interpreter.error import OperationError from pypy.interpreter.function import Function, Method @@ -83,12 +83,12 @@ from pypy.module.cpyext.object import PyObject_dealloc PyObject_dealloc(space, py_obj) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyFunction_GetCode(space, w_func): """Return the code object associated with the function object op.""" func = space.interp_w(Function, w_func) w_code = space.wrap(func.code) - return borrow_from(w_func, w_code) + return w_code # borrowed ref @cpython_api([PyObject, PyObject], PyObject) def PyMethod_New(space, w_func, w_self): @@ -98,18 +98,18 @@ not be NULL.""" return Method(space, w_func, w_self) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyMethod_Function(space, w_method): """Return the function object associated with the method meth.""" assert isinstance(w_method, Method) - return borrow_from(w_method, w_method.w_function) + return w_method.w_function # borrowed ref - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyMethod_Self(space, w_method): """Return the instance associated with the method meth if it is bound, otherwise return NULL.""" assert isinstance(w_method, Method) - return borrow_from(w_method, w_method.w_instance) + return w_method.w_instance # borrowed ref def unwrap_list_of_strings(space, w_list): return [space.str_w(w_item) for w_item in space.fixedview(w_list)] diff --git a/pypy/module/cpyext/import_.py b/pypy/module/cpyext/import_.py --- a/pypy/module/cpyext/import_.py +++ b/pypy/module/cpyext/import_.py @@ -1,7 +1,6 @@ from pypy.interpreter import module from pypy.module.cpyext.api import ( generic_cpy_call, cpython_api, PyObject, CONST_STRING) -from pypy.module.cpyext.pyobject import borrow_from from rpython.rtyper.lltypesystem import lltype, rffi from pypy.interpreter.error import OperationError from pypy.interpreter.module import Module @@ -56,7 +55,7 @@ from pypy.module.imp.importing import reload return reload(space, w_mod) - at cpython_api([CONST_STRING], PyObject) + at cpython_api([CONST_STRING], PyObject, result_borrowed=True) def PyImport_AddModule(space, name): """Return the module object corresponding to a module name. The name argument may be of the form package.module. First check the modules @@ -71,19 +70,19 @@ not already present.""" from pypy.module.imp.importing import check_sys_modules_w modulename = rffi.charp2str(name) - w_modulename = space.wrap(modulename) w_mod = check_sys_modules_w(space, modulename) if not w_mod or space.is_w(w_mod, space.w_None): - w_mod = Module(space, w_modulename) - space.setitem(space.sys.get('modules'), w_modulename, w_mod) - return borrow_from(None, w_mod) + w_mod = Module(space, space.wrap(modulename)) + space.setitem(space.sys.get('modules'), space.wrap(modulename), w_mod) + # return a borrowed ref --- assumes one copy in sys.modules + return w_mod - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyImport_GetModuleDict(space): """Return the dictionary used for the module administration (a.k.a. sys.modules). Note that this is a per-interpreter variable.""" w_modulesDict = space.sys.get('modules') - return borrow_from(None, w_modulesDict) + return w_modulesDict # borrowed ref @cpython_api([rffi.CCHARP, PyObject], PyObject) def PyImport_ExecCodeModule(space, name, w_code): diff --git a/pypy/module/cpyext/include/complexobject.h b/pypy/module/cpyext/include/complexobject.h --- a/pypy/module/cpyext/include/complexobject.h +++ b/pypy/module/cpyext/include/complexobject.h @@ -15,7 +15,7 @@ } Py_complex; /* generated function */ -PyAPI_FUNC(void) _PyComplex_AsCComplex(PyObject *, Py_complex *); +PyAPI_FUNC(int) _PyComplex_AsCComplex(PyObject *, Py_complex *); PyAPI_FUNC(PyObject *) _PyComplex_FromCComplex(Py_complex *); Py_LOCAL_INLINE(Py_complex) PyComplex_AsCComplex(PyObject *obj) diff --git a/pypy/module/cpyext/include/object.h b/pypy/module/cpyext/include/object.h --- a/pypy/module/cpyext/include/object.h +++ b/pypy/module/cpyext/include/object.h @@ -17,7 +17,8 @@ #define staticforward static #define PyObject_HEAD \ - long ob_refcnt; \ + Py_ssize_t ob_refcnt; \ + Py_ssize_t ob_pypy_link; \ struct _typeobject *ob_type; #define PyObject_VAR_HEAD \ @@ -25,7 +26,7 @@ Py_ssize_t ob_size; /* Number of items in variable part */ #define PyObject_HEAD_INIT(type) \ - 1, type, + 1, 0, type, #define PyVarObject_HEAD_INIT(type, size) \ PyObject_HEAD_INIT(type) size, @@ -40,19 +41,19 @@ #ifdef PYPY_DEBUG_REFCOUNT /* Slow version, but useful for debugging */ -#define Py_INCREF(ob) (Py_IncRef((PyObject *)ob)) -#define Py_DECREF(ob) (Py_DecRef((PyObject *)ob)) -#define Py_XINCREF(ob) (Py_IncRef((PyObject *)ob)) -#define Py_XDECREF(ob) (Py_DecRef((PyObject *)ob)) +#define Py_INCREF(ob) (Py_IncRef((PyObject *)(ob))) +#define Py_DECREF(ob) (Py_DecRef((PyObject *)(ob))) +#define Py_XINCREF(ob) (Py_IncRef((PyObject *)(ob))) +#define Py_XDECREF(ob) (Py_DecRef((PyObject *)(ob))) #else /* Fast version */ -#define Py_INCREF(ob) (((PyObject *)ob)->ob_refcnt++) -#define Py_DECREF(ob) \ +#define Py_INCREF(ob) (((PyObject *)(ob))->ob_refcnt++) +#define Py_DECREF(op) \ do { \ - if (((PyObject *)ob)->ob_refcnt > 1) \ - ((PyObject *)ob)->ob_refcnt--; \ + if (--((PyObject *)(op))->ob_refcnt != 0) \ + ; \ else \ - Py_DecRef((PyObject *)ob); \ + _Py_Dealloc((PyObject *)(op)); \ } while (0) #define Py_XINCREF(op) do { if ((op) == NULL) ; else Py_INCREF(op); } while (0) diff --git a/pypy/module/cpyext/include/patchlevel.h b/pypy/module/cpyext/include/patchlevel.h --- a/pypy/module/cpyext/include/patchlevel.h +++ b/pypy/module/cpyext/include/patchlevel.h @@ -30,6 +30,13 @@ /* PyPy version as a string */ #define PYPY_VERSION "4.1.0-alpha0" +#define PYPY_VERSION_NUM 0x04010000 + +/* Defined to mean a PyPy where cpyext holds more regular references + to PyObjects, e.g. staying alive as long as the internal PyPy object + stays alive. */ +#define PYPY_CPYEXT_GC 1 +#define PyPy_Borrow(a, b) ((void) 0) /* Subversion Revision number of this file (not of the repository). * Empty since Mercurial migration. */ diff --git a/pypy/module/cpyext/include/tupleobject.h b/pypy/module/cpyext/include/tupleobject.h --- a/pypy/module/cpyext/include/tupleobject.h +++ b/pypy/module/cpyext/include/tupleobject.h @@ -7,11 +7,21 @@ extern "C" { #endif +typedef struct { + PyObject_HEAD + Py_ssize_t ob_size; + PyObject **ob_item; /* XXX optimize to ob_item[] */ +} PyTupleObject; + /* defined in varargswrapper.c */ PyAPI_FUNC(PyObject *) PyTuple_Pack(Py_ssize_t, ...); -#define PyTuple_SET_ITEM PyTuple_SetItem -#define PyTuple_GET_ITEM PyTuple_GetItem +/* Macro, trading safety for speed */ +#define PyTuple_GET_ITEM(op, i) (((PyTupleObject *)(op))->ob_item[i]) +#define PyTuple_GET_SIZE(op) Py_SIZE(op) + +/* Macro, *only* to be used to fill in brand new tuples */ +#define PyTuple_SET_ITEM(op, i, v) (((PyTupleObject *)(op))->ob_item[i] = v) #ifdef __cplusplus diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py --- a/pypy/module/cpyext/listobject.py +++ b/pypy/module/cpyext/listobject.py @@ -3,7 +3,7 @@ from pypy.module.cpyext.api import (cpython_api, CANNOT_FAIL, Py_ssize_t, build_type_checkers) from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall -from pypy.module.cpyext.pyobject import Py_DecRef, PyObject, borrow_from +from pypy.module.cpyext.pyobject import Py_DecRef, PyObject from pypy.objspace.std.listobject import W_ListObject from pypy.interpreter.error import OperationError @@ -38,7 +38,7 @@ w_list.setitem(index, w_item) return 0 - at cpython_api([PyObject, Py_ssize_t], PyObject) + at cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True) def PyList_GetItem(space, w_list, index): """Return the object at position pos in the list pointed to by p. The position must be positive, indexing from the end of the list is not @@ -49,8 +49,10 @@ if index < 0 or index >= w_list.length(): raise OperationError(space.w_IndexError, space.wrap( "list index out of range")) - w_item = w_list.getitem(index) - return borrow_from(w_list, w_item) + w_list.ensure_object_strategy() # make sure we can return a borrowed obj + # XXX ^^^ how does this interact with CPyListStrategy? + w_res = w_list.getitem(index) + return w_res # borrowed ref @cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) diff --git a/pypy/module/cpyext/modsupport.py b/pypy/module/cpyext/modsupport.py --- a/pypy/module/cpyext/modsupport.py +++ b/pypy/module/cpyext/modsupport.py @@ -1,7 +1,7 @@ from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import cpython_api, cpython_struct, \ METH_STATIC, METH_CLASS, METH_COEXIST, CANNOT_FAIL, CONST_STRING -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.interpreter.module import Module from pypy.module.cpyext.methodobject import ( W_PyCFunctionObject, PyCFunction_NewEx, PyDescr_NewMethod, @@ -101,12 +101,12 @@ return int(space.is_w(w_type, w_obj_type) or space.is_true(space.issubtype(w_obj_type, w_type))) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyModule_GetDict(space, w_mod): if PyModule_Check(space, w_mod): assert isinstance(w_mod, Module) w_dict = w_mod.getdict(space) - return borrow_from(w_mod, w_dict) + return w_dict # borrowed reference, likely from w_mod.w_dict else: PyErr_BadInternalCall(space) diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py --- a/pypy/module/cpyext/object.py +++ b/pypy/module/cpyext/object.py @@ -6,7 +6,7 @@ Py_GE, CONST_STRING, FILEP, fwrite) from pypy.module.cpyext.pyobject import ( PyObject, PyObjectP, create_ref, from_ref, Py_IncRef, Py_DecRef, - track_reference, get_typedescr, _Py_NewReference, RefcountState) + get_typedescr, _Py_NewReference) from pypy.module.cpyext.typeobject import PyTypeObjectPtr from pypy.module.cpyext.pyerrors import PyErr_NoMemory, PyErr_BadInternalCall from pypy.objspace.std.typeobject import W_TypeObject @@ -35,9 +35,9 @@ def _PyObject_NewVar(space, type, itemcount): w_type = from_ref(space, rffi.cast(PyObject, type)) assert isinstance(w_type, W_TypeObject) - typedescr = get_typedescr(w_type.instancetypedef) + typedescr = get_typedescr(w_type.layout.typedef) py_obj = typedescr.allocate(space, w_type, itemcount=itemcount) - py_obj.c_ob_refcnt = 0 + #py_obj.c_ob_refcnt = 0 --- will be set to 1 again by PyObject_Init{Var} if type.c_tp_itemsize == 0: w_obj = PyObject_Init(space, py_obj, type) else: diff --git a/pypy/module/cpyext/pyerrors.py b/pypy/module/cpyext/pyerrors.py --- a/pypy/module/cpyext/pyerrors.py +++ b/pypy/module/cpyext/pyerrors.py @@ -6,7 +6,7 @@ from pypy.module.cpyext.api import cpython_api, CANNOT_FAIL, CONST_STRING from pypy.module.exceptions.interp_exceptions import W_RuntimeWarning from pypy.module.cpyext.pyobject import ( - PyObject, PyObjectP, make_ref, from_ref, Py_DecRef, borrow_from) + PyObject, PyObjectP, make_ref, from_ref, Py_DecRef) from pypy.module.cpyext.state import State from pypy.module.cpyext.import_ import PyImport_Import from rpython.rlib import rposix, jit @@ -28,12 +28,12 @@ """This is a shorthand for PyErr_SetObject(type, Py_None).""" PyErr_SetObject(space, w_type, space.w_None) - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyErr_Occurred(space): state = space.fromcache(State) if state.operror is None: return None - return borrow_from(None, state.operror.w_type) + return state.operror.w_type # borrowed ref @cpython_api([], lltype.Void) def PyErr_Clear(space): diff --git a/pypy/module/cpyext/pyfile.py b/pypy/module/cpyext/pyfile.py --- a/pypy/module/cpyext/pyfile.py +++ b/pypy/module/cpyext/pyfile.py @@ -1,7 +1,7 @@ from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import ( - cpython_api, CANNOT_FAIL, CONST_STRING, FILEP) -from pypy.module.cpyext.pyobject import PyObject, borrow_from + cpython_api, CONST_STRING, FILEP) +from pypy.module.cpyext.pyobject import PyObject from pypy.module.cpyext.object import Py_PRINT_RAW from pypy.interpreter.error import OperationError @@ -80,4 +80,5 @@ @cpython_api([PyObject], PyObject) def PyFile_Name(space, w_p): """Return the name of the file specified by p as a string object.""" - return borrow_from(w_p, space.getattr(w_p, space.wrap("name"))) + w_name = space.getattr(w_p, space.wrap("name")) + return w_name # borrowed ref, should be a W_StringObject from the file diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -2,15 +2,19 @@ from pypy.interpreter.baseobjspace import W_Root, SpaceCache from rpython.rtyper.lltypesystem import rffi, lltype +from rpython.rtyper.extregistry import ExtRegistryEntry from pypy.module.cpyext.api import ( cpython_api, bootstrap_function, PyObject, PyObjectP, ADDR, - CANNOT_FAIL, Py_TPFLAGS_HEAPTYPE, PyTypeObjectPtr) + CANNOT_FAIL, Py_TPFLAGS_HEAPTYPE, PyTypeObjectPtr, is_PyObject, + INTERPLEVEL_API) from pypy.module.cpyext.state import State from pypy.objspace.std.typeobject import W_TypeObject from pypy.objspace.std.objectobject import W_ObjectObject from rpython.rlib.objectmodel import specialize, we_are_translated -from rpython.rlib.rweakref import RWeakKeyDictionary +from rpython.rlib.objectmodel import keepalive_until_here from rpython.rtyper.annlowlevel import llhelper +from rpython.rlib import rawrefcount + #________________________________________________________ # type description @@ -28,13 +32,15 @@ def allocate(self, space, w_type, itemcount=0): # similar to PyType_GenericAlloc? # except that it's not related to any pypy object. + # this returns a PyObject with ob_refcnt == 1. - pytype = rffi.cast(PyTypeObjectPtr, make_ref(space, w_type)) + pytype = as_pyobj(space, w_type) + pytype = rffi.cast(PyTypeObjectPtr, pytype) + assert pytype # Don't increase refcount for non-heaptypes - if pytype: - flags = rffi.cast(lltype.Signed, pytype.c_tp_flags) - if not flags & Py_TPFLAGS_HEAPTYPE: - Py_DecRef(space, w_type) + flags = rffi.cast(lltype.Signed, pytype.c_tp_flags) + if flags & Py_TPFLAGS_HEAPTYPE: + Py_IncRef(space, w_type) if pytype: size = pytype.c_tp_basicsize @@ -42,6 +48,7 @@ size = rffi.sizeof(self.basestruct) if itemcount: size += itemcount * pytype.c_tp_itemsize + assert size >= rffi.sizeof(PyObject.TO) buf = lltype.malloc(rffi.VOIDP.TO, size, flavor='raw', zero=True) pyobj = rffi.cast(PyObject, buf) @@ -56,9 +63,6 @@ w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type)) w_obj = space.allocate_instance(self.W_BaseObject, w_type) track_reference(space, obj, w_obj) - if w_type is not space.gettypefor(self.W_BaseObject): - state = space.fromcache(RefcountState) - state.set_lifeline(w_obj, obj) return w_obj typedescr_cache = {} @@ -111,7 +115,7 @@ def init_pyobject(space): from pypy.module.cpyext.object import PyObject_dealloc # typedescr for the 'object' type - make_typedescr(space.w_object.instancetypedef, + make_typedescr(space.w_object.layout.typedef, dealloc=PyObject_dealloc) # almost all types, which should better inherit from object. make_typedescr(None) @@ -134,104 +138,6 @@ #________________________________________________________ # refcounted object support -class RefcountState: - def __init__(self, space): - self.space = space - self.py_objects_w2r = {} # { w_obj -> raw PyObject } - self.py_objects_r2w = {} # { addr of raw PyObject -> w_obj } - - self.lifeline_dict = RWeakKeyDictionary(W_Root, PyOLifeline) - - self.borrow_mapping = {None: {}} - # { w_container -> { w_containee -> None } } - # the None entry manages references borrowed during a call to - # generic_cpy_call() - - # For tests - self.non_heaptypes_w = [] - - def _cleanup_(self): - assert self.borrow_mapping == {None: {}} - self.py_objects_r2w.clear() # is not valid anymore after translation - - def init_r2w_from_w2r(self): - """Rebuilds the dict py_objects_r2w on startup""" - for w_obj, obj in self.py_objects_w2r.items(): - ptr = rffi.cast(ADDR, obj) - self.py_objects_r2w[ptr] = w_obj - - def print_refcounts(self): - print "REFCOUNTS" - for w_obj, obj in self.py_objects_w2r.items(): - print "%r: %i" % (w_obj, obj.c_ob_refcnt) - - def get_from_lifeline(self, w_obj): - lifeline = self.lifeline_dict.get(w_obj) - if lifeline is not None: # make old PyObject ready for use in C code - py_obj = lifeline.pyo - assert py_obj.c_ob_refcnt == 0 - return py_obj - else: - return lltype.nullptr(PyObject.TO) - - def set_lifeline(self, w_obj, py_obj): - self.lifeline_dict.set(w_obj, - PyOLifeline(self.space, py_obj)) - - def make_borrowed(self, w_container, w_borrowed): - """ - Create a borrowed reference, which will live as long as the container - has a living reference (as a PyObject!) - """ - ref = make_ref(self.space, w_borrowed) - obj_ptr = rffi.cast(ADDR, ref) - - borrowees = self.borrow_mapping.setdefault(w_container, {}) - if w_borrowed in borrowees: - Py_DecRef(self.space, w_borrowed) # cancel incref from make_ref() - else: - borrowees[w_borrowed] = None - - return ref - - def reset_borrowed_references(self): - "Used in tests" - for w_container, w_borrowed in self.borrow_mapping.items(): - Py_DecRef(self.space, w_borrowed) - self.borrow_mapping = {None: {}} - - def delete_borrower(self, w_obj): - """ - Called when a potential container for borrowed references has lost its - last reference. Removes the borrowed references it contains. - """ - if w_obj in self.borrow_mapping: # move to lifeline __del__ - for w_containee in self.borrow_mapping[w_obj]: - self.forget_borrowee(w_containee) - del self.borrow_mapping[w_obj] - - def swap_borrow_container(self, container): - """switch the current default contained with the given one.""" - if container is None: - old_container = self.borrow_mapping[None] - self.borrow_mapping[None] = {} - return old_container - else: - old_container = self.borrow_mapping[None] - self.borrow_mapping[None] = container - for w_containee in old_container: - self.forget_borrowee(w_containee) - - def forget_borrowee(self, w_obj): - "De-register an object from the list of borrowed references" - ref = self.py_objects_w2r.get(w_obj, lltype.nullptr(PyObject.TO)) - if not ref: - if DEBUG_REFCOUNT: - print >>sys.stderr, "Borrowed object is already gone!" - return - - Py_DecRef(self.space, ref) - class InvalidPointerException(Exception): pass @@ -249,55 +155,37 @@ def create_ref(space, w_obj, itemcount=0): """ Allocates a PyObject, and fills its fields with info from the given - intepreter object. + interpreter object. """ - state = space.fromcache(RefcountState) w_type = space.type(w_obj) - if w_type.is_cpytype(): - py_obj = state.get_from_lifeline(w_obj) - if py_obj: - Py_IncRef(space, py_obj) - return py_obj - typedescr = get_typedescr(w_obj.typedef) py_obj = typedescr.allocate(space, w_type, itemcount=itemcount) - if w_type.is_cpytype(): - state.set_lifeline(w_obj, py_obj) + track_reference(space, py_obj, w_obj) + # + # py_obj.c_ob_refcnt should be exactly REFCNT_FROM_PYPY + 1 here, + # and we want only REFCNT_FROM_PYPY, i.e. only count as attached + # to the W_Root but not with any reference from the py_obj side. + assert py_obj.c_ob_refcnt > rawrefcount.REFCNT_FROM_PYPY + py_obj.c_ob_refcnt -= 1 + # typedescr.attach(space, py_obj, w_obj) return py_obj -def track_reference(space, py_obj, w_obj, replace=False): +def track_reference(space, py_obj, w_obj): """ Ties together a PyObject and an interpreter object. + The PyObject's refcnt is increased by REFCNT_FROM_PYPY. + The reference in 'py_obj' is not stolen! Remember to Py_DecRef() + it is you need to. """ # XXX looks like a PyObject_GC_TRACK - ptr = rffi.cast(ADDR, py_obj) - state = space.fromcache(RefcountState) + assert py_obj.c_ob_refcnt < rawrefcount.REFCNT_FROM_PYPY + py_obj.c_ob_refcnt += rawrefcount.REFCNT_FROM_PYPY if DEBUG_REFCOUNT: debug_refcount("MAKREF", py_obj, w_obj) - if not replace: - assert w_obj not in state.py_objects_w2r - assert ptr not in state.py_objects_r2w - state.py_objects_w2r[w_obj] = py_obj - if ptr: # init_typeobject() bootstraps with NULL references - state.py_objects_r2w[ptr] = w_obj - -def make_ref(space, w_obj): - """ - Returns a new reference to an intepreter object. - """ - if w_obj is None: - return lltype.nullptr(PyObject.TO) - assert isinstance(w_obj, W_Root) - state = space.fromcache(RefcountState) - try: - py_obj = state.py_objects_w2r[w_obj] - except KeyError: - py_obj = create_ref(space, w_obj) - track_reference(space, py_obj, w_obj) - else: - Py_IncRef(space, py_obj) - return py_obj + assert w_obj + assert py_obj + rawrefcount.create_link_pypy(w_obj, py_obj) def from_ref(space, ref): @@ -305,16 +193,12 @@ Finds the interpreter object corresponding to the given reference. If the object is not yet realized (see bytesobject.py), creates it. """ - assert lltype.typeOf(ref) == PyObject + assert is_pyobj(ref) if not ref: return None - state = space.fromcache(RefcountState) - ptr = rffi.cast(ADDR, ref) - - try: - return state.py_objects_r2w[ptr] - except KeyError: - pass + w_obj = rawrefcount.to_obj(W_Root, ref) + if w_obj is not None: + return w_obj # This reference is not yet a real interpreter object. # Realize it. @@ -323,126 +207,135 @@ raise InvalidPointerException(str(ref)) w_type = from_ref(space, ref_type) assert isinstance(w_type, W_TypeObject) - return get_typedescr(w_type.instancetypedef).realize(space, ref) + return get_typedescr(w_type.layout.typedef).realize(space, ref) -# XXX Optimize these functions and put them into macro definitions - at cpython_api([PyObject], lltype.Void) -def Py_DecRef(space, obj): - if not obj: - return - assert lltype.typeOf(obj) == PyObject +def debug_collect(): + rawrefcount._collect() - obj.c_ob_refcnt -= 1 - if DEBUG_REFCOUNT: - debug_refcount("DECREF", obj, obj.c_ob_refcnt, frame_stackdepth=3) - if obj.c_ob_refcnt == 0: - state = space.fromcache(RefcountState) - ptr = rffi.cast(ADDR, obj) - if ptr not in state.py_objects_r2w: - # this is a half-allocated object, lets call the deallocator - # without modifying the r2w/w2r dicts - _Py_Dealloc(space, obj) - else: - w_obj = state.py_objects_r2w[ptr] - del state.py_objects_r2w[ptr] - w_type = space.type(w_obj) - if not w_type.is_cpytype(): + +def as_pyobj(space, w_obj): + """ + Returns a 'PyObject *' representing the given intepreter object. + This doesn't give a new reference, but the returned 'PyObject *' + is valid at least as long as 'w_obj' is. **To be safe, you should + use keepalive_until_here(w_obj) some time later.** In case of + doubt, use the safer make_ref(). + """ + if w_obj is not None: + assert not is_pyobj(w_obj) + py_obj = rawrefcount.from_obj(PyObject, w_obj) + if not py_obj: + py_obj = create_ref(space, w_obj) + return py_obj + else: + return lltype.nullptr(PyObject.TO) +as_pyobj._always_inline_ = 'try' +INTERPLEVEL_API['as_pyobj'] = as_pyobj + +def pyobj_has_w_obj(pyobj): + return rawrefcount.to_obj(W_Root, pyobj) is not None +INTERPLEVEL_API['pyobj_has_w_obj'] = staticmethod(pyobj_has_w_obj) + + +def is_pyobj(x): + if x is None or isinstance(x, W_Root): + return False + elif is_PyObject(lltype.typeOf(x)): + return True + else: + raise TypeError(repr(type(x))) +INTERPLEVEL_API['is_pyobj'] = staticmethod(is_pyobj) + +class Entry(ExtRegistryEntry): + _about_ = is_pyobj + def compute_result_annotation(self, s_x): + from rpython.rtyper.llannotation import SomePtr + return self.bookkeeper.immutablevalue(isinstance(s_x, SomePtr)) + def specialize_call(self, hop): + hop.exception_cannot_occur() + return hop.inputconst(lltype.Bool, hop.s_result.const) + + at specialize.ll() +def make_ref(space, obj): + """Increment the reference counter of the PyObject and return it. + Can be called with either a PyObject or a W_Root. + """ + if is_pyobj(obj): + pyobj = rffi.cast(PyObject, obj) + else: + pyobj = as_pyobj(space, obj) + if pyobj: + assert pyobj.c_ob_refcnt > 0 + pyobj.c_ob_refcnt += 1 + if not is_pyobj(obj): + keepalive_until_here(obj) + return pyobj +INTERPLEVEL_API['make_ref'] = make_ref + + + at specialize.ll() +def get_w_obj_and_decref(space, obj): + """Decrement the reference counter of the PyObject and return the + corresponding W_Root object (so the reference count is at least + REFCNT_FROM_PYPY and cannot be zero). Can be called with either + a PyObject or a W_Root. + """ + if is_pyobj(obj): + pyobj = rffi.cast(PyObject, obj) + w_obj = from_ref(space, pyobj) + else: + w_obj = obj + pyobj = as_pyobj(space, w_obj) + if pyobj: + pyobj.c_ob_refcnt -= 1 + assert pyobj.c_ob_refcnt >= rawrefcount.REFCNT_FROM_PYPY + keepalive_until_here(w_obj) + return w_obj +INTERPLEVEL_API['get_w_obj_and_decref'] = get_w_obj_and_decref + + + at specialize.ll() +def incref(space, obj): + make_ref(space, obj) +INTERPLEVEL_API['incref'] = incref + + at specialize.ll() +def decref(space, obj): + if is_pyobj(obj): + obj = rffi.cast(PyObject, obj) + if obj: + assert obj.c_ob_refcnt > 0 + obj.c_ob_refcnt -= 1 + if obj.c_ob_refcnt == 0: _Py_Dealloc(space, obj) - del state.py_objects_w2r[w_obj] - # if the object was a container for borrowed references - state.delete_borrower(w_obj) else: - if not we_are_translated() and obj.c_ob_refcnt < 0: - message = "Negative refcount for obj %s with type %s" % ( - obj, rffi.charp2str(obj.c_ob_type.c_tp_name)) - print >>sys.stderr, message - assert False, message + get_w_obj_and_decref(space, obj) +INTERPLEVEL_API['decref'] = decref + @cpython_api([PyObject], lltype.Void) def Py_IncRef(space, obj): - if not obj: - return - obj.c_ob_refcnt += 1 - assert obj.c_ob_refcnt > 0 - if DEBUG_REFCOUNT: - debug_refcount("INCREF", obj, obj.c_ob_refcnt, frame_stackdepth=3) + incref(space, obj) + + at cpython_api([PyObject], lltype.Void) +def Py_DecRef(space, obj): + decref(space, obj) @cpython_api([PyObject], lltype.Void) def _Py_NewReference(space, obj): obj.c_ob_refcnt = 1 w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type)) assert isinstance(w_type, W_TypeObject) - get_typedescr(w_type.instancetypedef).realize(space, obj) + get_typedescr(w_type.layout.typedef).realize(space, obj) + at cpython_api([PyObject], lltype.Void) def _Py_Dealloc(space, obj): - from pypy.module.cpyext.api import generic_cpy_call_dont_decref + from pypy.module.cpyext.api import generic_cpy_call pto = obj.c_ob_type #print >>sys.stderr, "Calling dealloc slot", pto.c_tp_dealloc, "of", obj, \ # "'s type which is", rffi.charp2str(pto.c_tp_name) - generic_cpy_call_dont_decref(space, pto.c_tp_dealloc, obj) - -#___________________________________________________________ -# Support for "lifelines" -# -# Object structure must stay alive even when not referenced -# by any C code. - -class PyOLifeline(object): - def __init__(self, space, pyo): - self.pyo = pyo - self.space = space - - def __del__(self): - if self.pyo: - assert self.pyo.c_ob_refcnt == 0 - _Py_Dealloc(self.space, self.pyo) - self.pyo = lltype.nullptr(PyObject.TO) - # XXX handle borrowed objects here - -#___________________________________________________________ -# Support for borrowed references - -def make_borrowed_ref(space, w_container, w_borrowed): - """ - Create a borrowed reference, which will live as long as the container - has a living reference (as a PyObject!) - """ - if w_borrowed is None: - return lltype.nullptr(PyObject.TO) - - state = space.fromcache(RefcountState) - return state.make_borrowed(w_container, w_borrowed) - -class Reference: - def __init__(self, pyobj): - assert not isinstance(pyobj, W_Root) - self.pyobj = pyobj - - def get_ref(self, space): - return self.pyobj - - def get_wrapped(self, space): - return from_ref(space, self.pyobj) - -class BorrowPair(Reference): - """ - Delays the creation of a borrowed reference. - """ - def __init__(self, w_container, w_borrowed): - self.w_container = w_container - self.w_borrowed = w_borrowed - - def get_ref(self, space): - return make_borrowed_ref(space, self.w_container, self.w_borrowed) - - def get_wrapped(self, space): - return self.w_borrowed - -def borrow_from(container, borrowed): - return BorrowPair(container, borrowed) - -#___________________________________________________________ + generic_cpy_call(space, pto.c_tp_dealloc, obj) @cpython_api([rffi.VOIDP], lltype.Signed, error=CANNOT_FAIL) def _Py_HashPointer(space, ptr): diff --git a/pypy/module/cpyext/pypyintf.py b/pypy/module/cpyext/pypyintf.py deleted file mode 100644 --- a/pypy/module/cpyext/pypyintf.py +++ /dev/null @@ -1,9 +0,0 @@ -from pypy.module.cpyext.api import cpython_api -from pypy.module.cpyext.pyobject import PyObject, borrow_from - - - at cpython_api([PyObject, PyObject], PyObject) -def PyPy_Borrow(space, w_parentobj, w_obj): - """Returns a borrowed reference to 'obj', borrowing from the 'parentobj'. - """ - return borrow_from(w_parentobj, w_obj) diff --git a/pypy/module/cpyext/pytraceback.py b/pypy/module/cpyext/pytraceback.py --- a/pypy/module/cpyext/pytraceback.py +++ b/pypy/module/cpyext/pytraceback.py @@ -3,7 +3,7 @@ PyObjectFields, generic_cpy_call, CONST_STRING, CANNOT_FAIL, Py_ssize_t, cpython_api, bootstrap_function, cpython_struct, build_type_checkers) from pypy.module.cpyext.pyobject import ( - PyObject, make_ref, from_ref, Py_DecRef, make_typedescr, borrow_from) + PyObject, make_ref, from_ref, Py_DecRef, make_typedescr) from pypy.module.cpyext.frameobject import PyFrameObject from rpython.rlib.unroll import unrolling_iterable from pypy.interpreter.error import OperationError diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py --- a/pypy/module/cpyext/sequence.py +++ b/pypy/module/cpyext/sequence.py @@ -2,7 +2,7 @@ from pypy.interpreter.error import OperationError, oefmt from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, Py_ssize_t) -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from rpython.rtyper.lltypesystem import rffi, lltype from pypy.objspace.std import listobject, tupleobject @@ -42,15 +42,19 @@ which case o is returned. Use PySequence_Fast_GET_ITEM() to access the members of the result. Returns NULL on failure. If the object is not a sequence, raises TypeError with m as the message text.""" - if (isinstance(w_obj, listobject.W_ListObject) or - isinstance(w_obj, tupleobject.W_TupleObject)): + if isinstance(w_obj, listobject.W_ListObject): + # make sure we can return a borrowed obj from PySequence_Fast_GET_ITEM + # XXX how does this interact with CPyListStrategy? + w_obj.ensure_object_strategy() + return w_obj + if isinstance(w_obj, tupleobject.W_TupleObject): return w_obj try: return tupleobject.W_TupleObject(space.fixedview(w_obj)) except OperationError: raise OperationError(space.w_TypeError, space.wrap(rffi.charp2str(m))) - at cpython_api([PyObject, Py_ssize_t], PyObject) + at cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True) def PySequence_Fast_GET_ITEM(space, w_obj, index): """Return the ith element of o, assuming that o was returned by PySequence_Fast(), o is not NULL, and that i is within bounds. @@ -60,7 +64,7 @@ else: assert isinstance(w_obj, tupleobject.W_TupleObject) w_res = w_obj.wrappeditems[index] - return borrow_from(w_obj, w_res) + return w_res # borrowed ref @cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL) def PySequence_Fast_GET_SIZE(space, w_obj): diff --git a/pypy/module/cpyext/setobject.py b/pypy/module/cpyext/setobject.py --- a/pypy/module/cpyext/setobject.py +++ b/pypy/module/cpyext/setobject.py @@ -3,7 +3,7 @@ from pypy.module.cpyext.api import (cpython_api, Py_ssize_t, CANNOT_FAIL, build_type_checkers) from pypy.module.cpyext.pyobject import (PyObject, PyObjectP, Py_DecRef, - borrow_from, make_ref, from_ref) + make_ref, from_ref) from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.objspace.std.setobject import W_SetObject, newset diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -31,17 +31,16 @@ Py_GE = 5 -def check_num_args(space, ob, n): - from pypy.module.cpyext.tupleobject import PyTuple_CheckExact, \ - PyTuple_GET_SIZE - if not PyTuple_CheckExact(space, ob): +def check_num_args(space, w_ob, n): + from pypy.module.cpyext.tupleobject import PyTuple_CheckExact + if not PyTuple_CheckExact(space, w_ob): raise OperationError(space.w_SystemError, space.wrap("PyArg_UnpackTuple() argument list is not a tuple")) - if n == PyTuple_GET_SIZE(space, ob): + if n == space.len_w(w_ob): return raise oefmt(space.w_TypeError, From pypy.commits at gmail.com Tue Feb 23 17:06:53 2016 From: pypy.commits at gmail.com (mjacob) Date: Tue, 23 Feb 2016 14:06:53 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56ccd7fd.42711c0a.cdca3.2047@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82463:a21adf0cd8fd Date: 2016-02-23 22:43 +0100 http://bitbucket.org/pypy/pypy/changeset/a21adf0cd8fd/ Log: hg merge py3k diff too long, truncating to 2000 out of 5520 lines diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py --- a/lib_pypy/_pypy_testcapi.py +++ b/lib_pypy/_pypy_testcapi.py @@ -8,6 +8,7 @@ content = fid.read() # from cffi's Verifier() key = '\x00'.join([sys.version[:3], content]) + key += 'cpyext-gc-support-2' # this branch requires recompilation! if sys.version_info >= (3,): key = key.encode('utf-8') k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) diff --git a/pypy/doc/discussion/rawrefcount.rst b/pypy/doc/discussion/rawrefcount.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/discussion/rawrefcount.rst @@ -0,0 +1,158 @@ +====================== +Rawrefcount and the GC +====================== + + +GC Interface +------------ + +"PyObject" is a raw structure with at least two fields, ob_refcnt and +ob_pypy_link. The ob_refcnt is the reference counter as used on +CPython. If the PyObject structure is linked to a live PyPy object, +its current address is stored in ob_pypy_link and ob_refcnt is bumped +by either the constant REFCNT_FROM_PYPY, or the constant +REFCNT_FROM_PYPY_LIGHT (== REFCNT_FROM_PYPY + SOME_HUGE_VALUE) +(to mean "light finalizer"). + +Most PyPy objects exist outside cpyext, and conversely in cpyext it is +possible that a lot of PyObjects exist without being seen by the rest +of PyPy. At the interface, however, we can "link" a PyPy object and a +PyObject. There are two kinds of link: + +rawrefcount.create_link_pypy(p, ob) + + Makes a link between an exising object gcref 'p' and a newly + allocated PyObject structure 'ob'. ob->ob_refcnt must be + initialized to either REFCNT_FROM_PYPY, or + REFCNT_FROM_PYPY_LIGHT. (The second case is an optimization: + when the GC finds the PyPy object and PyObject no longer + referenced, it can just free() the PyObject.) + +rawrefcount.create_link_pyobj(p, ob) + + Makes a link from an existing PyObject structure 'ob' to a newly + allocated W_CPyExtPlaceHolderObject 'p'. You must also add + REFCNT_FROM_PYPY to ob->ob_refcnt. For cases where the PyObject + contains all the data, and the PyPy object is just a proxy. The + W_CPyExtPlaceHolderObject should have only a field that contains + the address of the PyObject, but that's outside the scope of the + GC. + +rawrefcount.from_obj(p) + + If there is a link from object 'p' made with create_link_pypy(), + returns the corresponding 'ob'. Otherwise, returns NULL. + +rawrefcount.to_obj(Class, ob) + + Returns ob->ob_pypy_link, cast to an instance of 'Class'. + + +Collection logic +---------------- + +Objects existing purely on the C side have ob->ob_pypy_link == 0; +these are purely reference counted. On the other hand, if +ob->ob_pypy_link != 0, then ob->ob_refcnt is at least REFCNT_FROM_PYPY +and the object is part of a "link". + +The idea is that links whose 'p' is not reachable from other PyPy +objects *and* whose 'ob->ob_refcnt' is REFCNT_FROM_PYPY or +REFCNT_FROM_PYPY_LIGHT are the ones who die. But it is more messy +because PyObjects still (usually) need to have a tp_dealloc called, +and this cannot occur immediately (and can do random things like +accessing other references this object points to, or resurrecting the +object). + +Let P = list of links created with rawrefcount.create_link_pypy() +and O = list of links created with rawrefcount.create_link_pyobj(). +The PyPy objects in the list O are all W_CPyExtPlaceHolderObject: all +the data is in the PyObjects, and all outsite references (if any) are +in C, as "PyObject *" fields. + +So, during the collection we do this about P links: + + for (p, ob) in P: + if ob->ob_refcnt != REFCNT_FROM_PYPY + and ob->ob_refcnt != REFCNT_FROM_PYPY_LIGHT: + mark 'p' as surviving, as well as all its dependencies + +At the end of the collection, the P and O links are both handled like +this: + + for (p, ob) in P + O: + if p is not surviving: # even if 'ob' might be surviving + unlink p and ob + if ob->ob_refcnt == REFCNT_FROM_PYPY_LIGHT: + free(ob) + elif ob->ob_refcnt > REFCNT_FROM_PYPY_LIGHT: + ob->ob_refcnt -= REFCNT_FROM_PYPY_LIGHT + else: + ob->ob_refcnt -= REFCNT_FROM_PYPY + if ob->ob_refcnt == 0: + invoke _Py_Dealloc(ob) later, outside the GC + + +GC Implementation +----------------- + +We need two copies of both the P list and O list, for young or old +objects. All four lists can be regular AddressLists of 'ob' objects. + +We also need an AddressDict mapping 'p' to 'ob' for all links in the P +list, and update it when PyPy objects move. + + +Further notes +------------- + +XXX +XXX the rest is the ideal world, but as a first step, we'll look +XXX for the minimal tweaks needed to adapt the existing cpyext +XXX + +For objects that are opaque in CPython, like , we always create +a PyPy object, and then when needed we make an empty PyObject and +attach it with create_link_pypy()/REFCNT_FROM_PYPY_LIGHT. + +For and objects, the corresponding PyObjects contain a +"long" or "double" field too. We link them with create_link_pypy() +and we can use REFCNT_FROM_PYPY_LIGHT too: 'tp_dealloc' doesn't +need to be called, and instead just calling free() is fine. + +For objects, we need both a PyPy and a PyObject side. These +are made with create_link_pypy()/REFCNT_FROM_PYPY. + +For custom PyXxxObjects allocated from the C extension module, we +need create_link_pyobj(). + +For or objects coming from PyPy, we use +create_link_pypy()/REFCNT_FROM_PYPY_LIGHT with a PyObject +preallocated with the size of the string. We copy the string +lazily into that area if PyString_AS_STRING() is called. + +For , , or objects in the C extension +module, we first allocate it as only a PyObject, which supports +mutation of the data from C, like CPython. When it is exported to +PyPy we could make a W_CPyExtPlaceHolderObject with +create_link_pyobj(). + +For objects coming from PyPy, if they are not specialized, +then the PyPy side holds a regular reference to the items. Then we +can allocate a PyTupleObject and store in it borrowed PyObject +pointers to the items. Such a case is created with +create_link_pypy()/REFCNT_FROM_PYPY_LIGHT. If it is specialized, +then it doesn't work because the items are created just-in-time on the +PyPy side. In this case, the PyTupleObject needs to hold real +references to the PyObject items, and we use create_link_pypy()/ +REFCNT_FROM_PYPY. In all cases, we have a C array of PyObjects +that we can directly return from PySequence_Fast_ITEMS, PyTuple_ITEMS, +PyTuple_GetItem, and so on. + +For objects coming from PyPy, we can use a cpyext list +strategy. The list turns into a PyListObject, as if it had been +allocated from C in the first place. The special strategy can hold +(only) a direct reference to the PyListObject, and we can use either +create_link_pyobj() or create_link_pypy() (to be decided). +PySequence_Fast_ITEMS then works for lists too, and PyList_GetItem +can return a borrowed reference, and so on. diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -51,7 +51,7 @@ class W_Root(object): """This is the abstract root class of all wrapped objects that live in a 'normal' object space like StdObjSpace.""" - __slots__ = () + __slots__ = ('__weakref__',) user_overridden_class = False def getdict(self, space): diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -248,7 +248,7 @@ def user_setup(self, space, w_subtype): self.space = space self.w__class__ = w_subtype - self.user_setup_slots(w_subtype.nslots) + self.user_setup_slots(w_subtype.layout.nslots) def user_setup_slots(self, nslots): assert nslots == 0 diff --git a/pypy/module/cpyext/__init__.py b/pypy/module/cpyext/__init__.py --- a/pypy/module/cpyext/__init__.py +++ b/pypy/module/cpyext/__init__.py @@ -58,7 +58,6 @@ import pypy.module.cpyext.funcobject import pypy.module.cpyext.frameobject import pypy.module.cpyext.classobject -import pypy.module.cpyext.pypyintf import pypy.module.cpyext.exception import pypy.module.cpyext.memoryobject import pypy.module.cpyext.codecs diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -9,7 +9,7 @@ from rpython.rtyper.tool import rffi_platform from rpython.rtyper.lltypesystem import ll2ctypes from rpython.rtyper.annlowlevel import llhelper -from rpython.rlib.objectmodel import we_are_translated +from rpython.rlib.objectmodel import we_are_translated, keepalive_until_here from rpython.translator import cdir from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.translator.gensupp import NameManager @@ -29,13 +29,13 @@ from rpython.rlib.rposix import is_valid_fd, validate_fd from rpython.rlib.unroll import unrolling_iterable from rpython.rlib.objectmodel import specialize -from rpython.rlib.exports import export_struct from pypy.module import exceptions from pypy.module.exceptions import interp_exceptions # CPython 2.4 compatibility from py.builtin import BaseException from rpython.tool.sourcetools import func_with_new_name from rpython.rtyper.lltypesystem.lloperation import llop +from rpython.rlib import rawrefcount DEBUG_WRAPPER = True @@ -194,7 +194,7 @@ class ApiFunction: def __init__(self, argtypes, restype, callable, error=_NOT_SPECIFIED, - c_name=None, gil=None): + c_name=None, gil=None, result_borrowed=False): self.argtypes = argtypes self.restype = restype self.functype = lltype.Ptr(lltype.FuncType(argtypes, restype)) @@ -211,6 +211,11 @@ self.argnames = argnames[1:] assert len(self.argnames) == len(self.argtypes) self.gil = gil + self.result_borrowed = result_borrowed + # + def get_llhelper(space): + return llhelper(self.functype, self.get_wrapper(space)) + self.get_llhelper = get_llhelper def __repr__(self): return "" % (self.callable.__name__,) @@ -218,13 +223,6 @@ def _freeze_(self): return True - def get_llhelper(self, space): - llh = getattr(self, '_llhelper', None) - if llh is None: - llh = llhelper(self.functype, self.get_wrapper(space)) - self._llhelper = llh - return llh - @specialize.memo() def get_wrapper(self, space): wrapper = getattr(self, '_wrapper', None) @@ -237,7 +235,7 @@ return wrapper def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header='pypy_decl.h', - gil=None): + gil=None, result_borrowed=False): """ Declares a function to be exported. - `argtypes`, `restype` are lltypes and describe the function signature. @@ -266,13 +264,15 @@ rffi.cast(restype, 0) == 0) def decorate(func): + func._always_inline_ = 'try' func_name = func.func_name if header is not None: c_name = None else: c_name = func_name api_function = ApiFunction(argtypes, restype, func, error, - c_name=c_name, gil=gil) + c_name=c_name, gil=gil, + result_borrowed=result_borrowed) func.api_func = api_function if header is not None: @@ -283,6 +283,10 @@ raise ValueError("function %s has no return value for exceptions" % func) def make_unwrapper(catch_exception): + # ZZZ is this whole logic really needed??? It seems to be only + # for RPython code calling PyXxx() functions directly. I would + # think that usually directly calling the function is clean + # enough now names = api_function.argnames types_names_enum_ui = unrolling_iterable(enumerate( zip(api_function.argtypes, @@ -290,56 +294,58 @@ @specialize.ll() def unwrapper(space, *args): - from pypy.module.cpyext.pyobject import Py_DecRef - from pypy.module.cpyext.pyobject import make_ref, from_ref - from pypy.module.cpyext.pyobject import Reference + from pypy.module.cpyext.pyobject import Py_DecRef, is_pyobj + from pypy.module.cpyext.pyobject import from_ref, as_pyobj newargs = () - to_decref = [] + keepalives = () assert len(args) == len(api_function.argtypes) for i, (ARG, is_wrapped) in types_names_enum_ui: input_arg = args[i] if is_PyObject(ARG) and not is_wrapped: - # build a reference - if input_arg is None: - arg = lltype.nullptr(PyObject.TO) - elif isinstance(input_arg, W_Root): - ref = make_ref(space, input_arg) - to_decref.append(ref) - arg = rffi.cast(ARG, ref) + # build a 'PyObject *' (not holding a reference) + if not is_pyobj(input_arg): + keepalives += (input_arg,) + arg = rffi.cast(ARG, as_pyobj(space, input_arg)) + else: + arg = rffi.cast(ARG, input_arg) + elif is_PyObject(ARG) and is_wrapped: + # build a W_Root, possibly from a 'PyObject *' + if is_pyobj(input_arg): + arg = from_ref(space, input_arg) else: arg = input_arg - elif is_PyObject(ARG) and is_wrapped: - # convert to a wrapped object - if input_arg is None: - arg = input_arg - elif isinstance(input_arg, W_Root): - arg = input_arg - else: - try: - arg = from_ref(space, - rffi.cast(PyObject, input_arg)) - except TypeError, e: - err = OperationError(space.w_TypeError, - space.wrap( - "could not cast arg to PyObject")) - if not catch_exception: - raise err - state = space.fromcache(State) - state.set_exception(err) - if is_PyObject(restype): - return None - else: - return api_function.error_value + + ## ZZZ: for is_pyobj: + ## try: + ## arg = from_ref(space, + ## rffi.cast(PyObject, input_arg)) + ## except TypeError, e: + ## err = OperationError(space.w_TypeError, + ## space.wrap( + ## "could not cast arg to PyObject")) + ## if not catch_exception: + ## raise err + ## state = space.fromcache(State) + ## state.set_exception(err) + ## if is_PyObject(restype): + ## return None + ## else: + ## return api_function.error_value else: - # convert to a wrapped object + # arg is not declared as PyObject, no magic arg = input_arg newargs += (arg, ) - try: + if not catch_exception: + try: + res = func(space, *newargs) + finally: + keepalive_until_here(*keepalives) + else: + # non-rpython variant + assert not we_are_translated() try: res = func(space, *newargs) except OperationError, e: - if not catch_exception: - raise if not hasattr(api_function, "error_value"): raise state = space.fromcache(State) @@ -348,21 +354,13 @@ return None else: return api_function.error_value - if not we_are_translated(): - got_integer = isinstance(res, (int, long, float)) - assert got_integer == expect_integer,'got %r not integer' % res - if res is None: - return None - elif isinstance(res, Reference): - return res.get_wrapped(space) - else: - return res - finally: - for arg in to_decref: - Py_DecRef(space, arg) + # 'keepalives' is alive here (it's not rpython) + got_integer = isinstance(res, (int, long, float)) + assert got_integer == expect_integer, ( + 'got %r not integer' % (res,)) + return res unwrapper.func = func unwrapper.api_func = api_function - unwrapper._always_inline_ = 'try' return unwrapper unwrapper_catch = make_unwrapper(True) @@ -505,7 +503,7 @@ GLOBALS['%s#' % (cpyname, )] = ('PyTypeObject*', pypyexpr) for cpyname in '''PyMethodObject PyListObject PyLongObject - PyDictObject PyTupleObject'''.split(): + PyDictObject'''.split(): FORWARD_DECLS.append('typedef struct { PyObject_HEAD } %s' % (cpyname, )) build_exported_objects() @@ -516,14 +514,16 @@ return {"PyObject*": PyObject, "PyTypeObject*": PyTypeObjectPtr, "PyDateTime_CAPI*": lltype.Ptr(PyDateTime_CAPI)}[ctype] +# Note: as a special case, "PyObject" is the pointer type in RPython, +# corresponding to "PyObject *" in C. We do that only for PyObject. +# For example, "PyTypeObject" is the struct type even in RPython. PyTypeObject = lltype.ForwardReference() PyTypeObjectPtr = lltype.Ptr(PyTypeObject) -# It is important that these PyObjects are allocated in a raw fashion -# Thus we cannot save a forward pointer to the wrapped object -# So we need a forward and backward mapping in our State instance PyObjectStruct = lltype.ForwardReference() PyObject = lltype.Ptr(PyObjectStruct) -PyObjectFields = (("ob_refcnt", lltype.Signed), ("ob_type", PyTypeObjectPtr)) +PyObjectFields = (("ob_refcnt", lltype.Signed), + ("ob_pypy_link", lltype.Signed), + ("ob_type", PyTypeObjectPtr)) PyVarObjectFields = PyObjectFields + (("ob_size", Py_ssize_t), ) cpython_struct('PyObject', PyObjectFields, PyObjectStruct) PyVarObjectStruct = cpython_struct("PyVarObject", PyVarObjectFields) @@ -620,8 +620,8 @@ @specialize.ll() def wrapper(*args): - from pypy.module.cpyext.pyobject import make_ref, from_ref - from pypy.module.cpyext.pyobject import Reference + from pypy.module.cpyext.pyobject import make_ref, from_ref, is_pyobj + from pypy.module.cpyext.pyobject import as_pyobj # we hope that malloc removal removes the newtuple() that is # inserted exactly here by the varargs specializer if gil_acquire: @@ -630,6 +630,7 @@ llop.gc_stack_bottom(lltype.Void) # marker for trackgcroot.py retval = fatal_value boxed_args = () + tb = None try: if not we_are_translated() and DEBUG_WRAPPER: print >>sys.stderr, callable, @@ -637,10 +638,8 @@ for i, (typ, is_wrapped) in argtypes_enum_ui: arg = args[i] if is_PyObject(typ) and is_wrapped: - if arg: - arg_conv = from_ref(space, rffi.cast(PyObject, arg)) - else: - arg_conv = None + assert is_pyobj(arg) + arg_conv = from_ref(space, rffi.cast(PyObject, arg)) else: arg_conv = arg boxed_args += (arg_conv, ) @@ -655,6 +654,7 @@ except BaseException, e: failed = True if not we_are_translated(): + tb = sys.exc_info()[2] message = repr(e) import traceback traceback.print_exc() @@ -673,29 +673,34 @@ retval = error_value elif is_PyObject(callable.api_func.restype): - if result is None: - retval = rffi.cast(callable.api_func.restype, - make_ref(space, None)) - elif isinstance(result, Reference): - retval = result.get_ref(space) - elif not rffi._isllptr(result): - retval = rffi.cast(callable.api_func.restype, - make_ref(space, result)) + if is_pyobj(result): + retval = result else: - retval = result + if result is not None: + if callable.api_func.result_borrowed: + retval = as_pyobj(space, result) + else: + retval = make_ref(space, result) + retval = rffi.cast(callable.api_func.restype, retval) + else: + retval = lltype.nullptr(PyObject.TO) elif callable.api_func.restype is not lltype.Void: retval = rffi.cast(callable.api_func.restype, result) except Exception, e: print 'Fatal error in cpyext, CPython compatibility layer, calling', callable.__name__ print 'Either report a bug or consider not using this particular extension' if not we_are_translated(): + if tb is None: + tb = sys.exc_info()[2] import traceback traceback.print_exc() - print str(e) + if sys.stdout == sys.__stdout__: + import pdb; pdb.post_mortem(tb) # we can't do much here, since we're in ctypes, swallow else: print str(e) pypy_debug_catch_fatal_exception() + assert False rffi.stackcounter.stacks_counter -= 1 if gil_release: rgil.release() @@ -829,6 +834,19 @@ outputfilename=str(udir / "module_cache" / "pypyapi")) modulename = py.path.local(eci.libraries[-1]) + def dealloc_trigger(): + from pypy.module.cpyext.pyobject import _Py_Dealloc + print 'dealloc_trigger...' + while True: + ob = rawrefcount.next_dead(PyObject) + if not ob: + break + print ob + _Py_Dealloc(space, ob) + print 'dealloc_trigger DONE' + return "RETRY" + rawrefcount.init(dealloc_trigger) + run_bootstrap_functions(space) # load the bridge, and init structure @@ -838,9 +856,9 @@ space.fromcache(State).install_dll(eci) # populate static data - builder = StaticObjectBuilder(space) + builder = space.fromcache(StaticObjectBuilder) for name, (typ, expr) in GLOBALS.iteritems(): - from pypy.module import cpyext + from pypy.module import cpyext # for the eval() below w_obj = eval(expr) if name.endswith('#'): name = name[:-1] @@ -896,27 +914,44 @@ class StaticObjectBuilder: def __init__(self, space): self.space = space - self.to_attach = [] + self.static_pyobjs = [] + self.static_objs_w = [] + self.cpyext_type_init = None + # + # add a "method" that is overridden in setup_library() + # ('self.static_pyobjs' is completely ignored in that case) + self.get_static_pyobjs = lambda: self.static_pyobjs def prepare(self, py_obj, w_obj): - from pypy.module.cpyext.pyobject import track_reference - py_obj.c_ob_refcnt = 1 - track_reference(self.space, py_obj, w_obj) - self.to_attach.append((py_obj, w_obj)) + "NOT_RPYTHON" + if py_obj: + py_obj.c_ob_refcnt = 1 # 1 for kept immortal + self.static_pyobjs.append(py_obj) + self.static_objs_w.append(w_obj) def attach_all(self): + # this is RPython, called once in pypy-c when it imports cpyext from pypy.module.cpyext.pyobject import get_typedescr, make_ref from pypy.module.cpyext.typeobject import finish_type_1, finish_type_2 + from pypy.module.cpyext.pyobject import track_reference + # space = self.space - space._cpyext_type_init = [] - for py_obj, w_obj in self.to_attach: + static_pyobjs = self.get_static_pyobjs() + static_objs_w = self.static_objs_w + for i in range(len(static_objs_w)): + track_reference(space, static_pyobjs[i], static_objs_w[i]) + # + self.cpyext_type_init = [] + for i in range(len(static_objs_w)): + py_obj = static_pyobjs[i] + w_obj = static_objs_w[i] w_type = space.type(w_obj) - typedescr = get_typedescr(w_type.instancetypedef) + typedescr = get_typedescr(w_type.layout.typedef) py_obj.c_ob_type = rffi.cast(PyTypeObjectPtr, make_ref(space, w_type)) typedescr.attach(space, py_obj, w_obj) - cpyext_type_init = space._cpyext_type_init - del space._cpyext_type_init + cpyext_type_init = self.cpyext_type_init + self.cpyext_type_init = None for pto, w_type in cpyext_type_init: finish_type_1(space, pto) finish_type_2(space, pto, w_type) @@ -1068,7 +1103,7 @@ if name.endswith('#'): structs.append('%s %s;' % (typ[:-1], name[:-1])) elif name.startswith('PyExc_'): - structs.append('extern PyTypeObject _%s;' % (name,)) + structs.append('PyTypeObject _%s;' % (name,)) structs.append('PyObject* %s = (PyObject*)&_%s;' % (name, name)) elif typ == 'PyDateTime_CAPI*': structs.append('%s %s = NULL;' % (typ, name)) @@ -1120,10 +1155,8 @@ def setup_library(space): "NOT_RPYTHON" - from pypy.module.cpyext.pyobject import make_ref use_micronumpy = setup_micronumpy(space) - - export_symbols = list(FUNCTIONS) + SYMBOLS_C + list(GLOBALS) + export_symbols = sorted(FUNCTIONS) + sorted(SYMBOLS_C) + sorted(GLOBALS) from rpython.translator.c.database import LowLevelDatabase db = LowLevelDatabase() @@ -1139,41 +1172,37 @@ run_bootstrap_functions(space) setup_va_functions(eci) - from pypy.module import cpyext # for eval() below - - # Set up the types. Needs a special case, because of the - # immediate cycle involving 'c_ob_type', and because we don't - # want these types to be Py_TPFLAGS_HEAPTYPE. - static_types = {} - for name, (typ, expr) in GLOBALS.items(): - if typ == 'PyTypeObject*': - pto = lltype.malloc(PyTypeObject, immortal=True, - zero=True, flavor='raw') - pto.c_ob_refcnt = 1 - pto.c_tp_basicsize = -1 - static_types[name] = pto - builder = StaticObjectBuilder(space) - for name, pto in static_types.items(): - pto.c_ob_type = static_types['PyType_Type#'] - w_type = eval(GLOBALS[name][1]) - builder.prepare(rffi.cast(PyObject, pto), w_type) - builder.attach_all() - - # populate static data - for name, (typ, expr) in GLOBALS.iteritems(): - name = name.replace("#", "") - if name.startswith('PyExc_'): + # emit uninitialized static data + builder = space.fromcache(StaticObjectBuilder) + lines = ['PyObject *pypy_static_pyobjs[] = {\n'] + include_lines = ['RPY_EXTERN PyObject *pypy_static_pyobjs[];\n'] + for name, (typ, expr) in sorted(GLOBALS.items()): + if name.endswith('#'): + assert typ in ('PyObject*', 'PyTypeObject*', 'PyIntObject*') + typ, name = typ[:-1], name[:-1] + elif name.startswith('PyExc_'): + typ = 'PyTypeObject' name = '_' + name - w_obj = eval(expr) - if typ in ('PyObject*', 'PyTypeObject*'): - struct_ptr = make_ref(space, w_obj) elif typ == 'PyDateTime_CAPI*': continue else: assert False, "Unknown static data: %s %s" % (typ, name) - struct = rffi.cast(get_structtype_for_ctype(typ), struct_ptr)._obj - struct._compilation_info = eci - export_struct(name, struct) + + from pypy.module import cpyext # for the eval() below + w_obj = eval(expr) + builder.prepare(None, w_obj) + lines.append('\t(PyObject *)&%s,\n' % (name,)) + include_lines.append('RPY_EXPORTED %s %s;\n' % (typ, name)) + + lines.append('};\n') + eci2 = CConfig._compilation_info_.merge(ExternalCompilationInfo( + separate_module_sources = [''.join(lines)], + post_include_bits = [''.join(include_lines)], + )) + # override this method to return a pointer to this C array directly + builder.get_static_pyobjs = rffi.CExternVariable( + PyObjectP, 'pypy_static_pyobjs', eci2, c_type='PyObject **', + getter_only=True, declare_as_extern=False) for name, func in FUNCTIONS.iteritems(): newname = mangle_name('PyPy', name) or name @@ -1184,6 +1213,10 @@ trunk_include = pypydir.dirpath() / 'include' copy_header_files(trunk_include, use_micronumpy) +def init_static_data_translated(space): + builder = space.fromcache(StaticObjectBuilder) + builder.attach_all() + def _load_from_cffi(space, name, path, initptr): from pypy.module._cffi_backend import cffi1_module cffi1_module.load_cffi1_module(space, name, path, initptr) @@ -1266,22 +1299,18 @@ @specialize.ll() def generic_cpy_call(space, func, *args): FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, True, False)(space, func, *args) - - at specialize.ll() -def generic_cpy_call_dont_decref(space, func, *args): - FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, False, False)(space, func, *args) + return make_generic_cpy_call(FT, False)(space, func, *args) @specialize.ll() def generic_cpy_call_expect_null(space, func, *args): FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, True, True)(space, func, *args) + return make_generic_cpy_call(FT, True)(space, func, *args) @specialize.memo() -def make_generic_cpy_call(FT, decref_args, expect_null): +def make_generic_cpy_call(FT, expect_null): from pypy.module.cpyext.pyobject import make_ref, from_ref, Py_DecRef - from pypy.module.cpyext.pyobject import RefcountState + from pypy.module.cpyext.pyobject import is_pyobj, as_pyobj + from pypy.module.cpyext.pyobject import get_w_obj_and_decref from pypy.module.cpyext.pyerrors import PyErr_Occurred unrolling_arg_types = unrolling_iterable(enumerate(FT.ARGS)) RESULT_TYPE = FT.RESULT @@ -1309,65 +1338,49 @@ @specialize.ll() def generic_cpy_call(space, func, *args): boxed_args = () - to_decref = [] + keepalives = () assert len(args) == len(FT.ARGS) for i, ARG in unrolling_arg_types: arg = args[i] if is_PyObject(ARG): - if arg is None: - boxed_args += (lltype.nullptr(PyObject.TO),) - elif isinstance(arg, W_Root): - ref = make_ref(space, arg) - boxed_args += (ref,) - if decref_args: - to_decref.append(ref) - else: - boxed_args += (arg,) - else: - boxed_args += (arg,) + if not is_pyobj(arg): + keepalives += (arg,) + arg = as_pyobj(space, arg) + boxed_args += (arg,) try: - # create a new container for borrowed references - state = space.fromcache(RefcountState) - old_container = state.swap_borrow_container(None) - try: - # Call the function - result = call_external_function(func, *boxed_args) - finally: - state.swap_borrow_container(old_container) + # Call the function + result = call_external_function(func, *boxed_args) + finally: + keepalive_until_here(*keepalives) - if is_PyObject(RESULT_TYPE): - if result is None: - ret = result - elif isinstance(result, W_Root): - ret = result + if is_PyObject(RESULT_TYPE): + if not is_pyobj(result): + ret = result + else: + # The object reference returned from a C function + # that is called from Python must be an owned reference + # - ownership is transferred from the function to its caller. + if result: + ret = get_w_obj_and_decref(space, result) else: - ret = from_ref(space, result) - # The object reference returned from a C function - # that is called from Python must be an owned reference - # - ownership is transferred from the function to its caller. - if result: - Py_DecRef(space, result) + ret = None - # Check for exception consistency - has_error = PyErr_Occurred(space) is not None - has_result = ret is not None - if has_error and has_result: - raise OperationError(space.w_SystemError, space.wrap( - "An exception was set, but function returned a value")) - elif not expect_null and not has_error and not has_result: - raise OperationError(space.w_SystemError, space.wrap( - "Function returned a NULL result without setting an exception")) + # Check for exception consistency + has_error = PyErr_Occurred(space) is not None + has_result = ret is not None + if has_error and has_result: + raise OperationError(space.w_SystemError, space.wrap( + "An exception was set, but function returned a value")) + elif not expect_null and not has_error and not has_result: + raise OperationError(space.w_SystemError, space.wrap( + "Function returned a NULL result without setting an exception")) - if has_error: - state = space.fromcache(State) - state.check_and_raise_exception() + if has_error: + state = space.fromcache(State) + state.check_and_raise_exception() - return ret - return result - finally: - if decref_args: - for ref in to_decref: - Py_DecRef(space, ref) + return ret + return result + return generic_cpy_call - diff --git a/pypy/module/cpyext/bytesobject.py b/pypy/module/cpyext/bytesobject.py --- a/pypy/module/cpyext/bytesobject.py +++ b/pypy/module/cpyext/bytesobject.py @@ -8,7 +8,6 @@ PyObject, PyObjectP, Py_DecRef, make_ref, from_ref, track_reference, make_typedescr, get_typedescr) - ## ## Implementation of PyBytesObject ## ================================ @@ -60,7 +59,7 @@ @bootstrap_function def init_bytesobject(space): "Type description of PyBytesObject" - make_typedescr(space.w_str.instancetypedef, + make_typedescr(space.w_str.layout.typedef, basestruct=PyBytesObject.TO, attach=bytes_attach, dealloc=bytes_dealloc, @@ -70,11 +69,11 @@ def new_empty_str(space, length): """ - Allocates a PyBytesObject and its buffer, but without a corresponding + Allocate a PyBytesObject and its buffer, but without a corresponding interpreter object. The buffer may be mutated, until bytes_realize() is - called. + called. Refcount of the result is 1. """ - typedescr = get_typedescr(space.w_bytes.instancetypedef) + typedescr = get_typedescr(space.w_bytes.layout.typedef) py_obj = typedescr.allocate(space, space.w_bytes) py_str = rffi.cast(PyBytesObject, py_obj) @@ -144,8 +143,6 @@ ref_str.c_buffer = rffi.str2charp(s) return ref_str.c_buffer -#_______________________________________________________________________ - @cpython_api([PyObject, rffi.CCHARPP, rffi.CArrayPtr(Py_ssize_t)], rffi.INT_real, error=-1) def PyBytes_AsStringAndSize(space, ref, buffer, length): if not PyBytes_Check(space, ref): @@ -228,9 +225,9 @@ if w_newpart is None or not PyBytes_Check(space, ref[0]) or \ not PyBytes_Check(space, w_newpart): - Py_DecRef(space, ref[0]) - ref[0] = lltype.nullptr(PyObject.TO) - return + Py_DecRef(space, ref[0]) + ref[0] = lltype.nullptr(PyObject.TO) + return w_str = from_ref(space, ref[0]) w_newstr = space.add(w_str, w_newpart) Py_DecRef(space, ref[0]) diff --git a/pypy/module/cpyext/complexobject.py b/pypy/module/cpyext/complexobject.py --- a/pypy/module/cpyext/complexobject.py +++ b/pypy/module/cpyext/complexobject.py @@ -43,7 +43,7 @@ # lltype does not handle functions returning a structure. This implements a # helper function, which takes as argument a reference to the return value. - at cpython_api([PyObject, Py_complex_ptr], lltype.Void) + at cpython_api([PyObject, Py_complex_ptr], rffi.INT_real, error=-1) def _PyComplex_AsCComplex(space, w_obj, result): """Return the Py_complex value of the complex number op. @@ -60,7 +60,7 @@ # if the above did not work, interpret obj as a float giving the # real part of the result, and fill in the imaginary part as 0. result.c_real = PyFloat_AsDouble(space, w_obj) # -1 on failure - return + return 0 if not PyComplex_Check(space, w_obj): raise OperationError(space.w_TypeError, space.wrap( @@ -69,3 +69,4 @@ assert isinstance(w_obj, W_ComplexObject) result.c_real = w_obj.realval result.c_imag = w_obj.imagval + return 0 diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -2,8 +2,7 @@ from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, build_type_checkers, Py_ssize_t, Py_ssize_tP, CONST_STRING) -from pypy.module.cpyext.pyobject import PyObject, PyObjectP, borrow_from -from pypy.module.cpyext.pyobject import RefcountState +from pypy.module.cpyext.pyobject import PyObject, PyObjectP, as_pyobj from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.module.cpyext.dictproxyobject import W_DictProxyObject from pypy.interpreter.error import OperationError @@ -14,13 +13,17 @@ PyDict_Check, PyDict_CheckExact = build_type_checkers("Dict") - at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL) + at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL, + result_borrowed=True) def PyDict_GetItem(space, w_dict, w_key): try: w_res = space.getitem(w_dict, w_key) except: return None - return borrow_from(w_dict, w_res) + # NOTE: this works so far because all our dict strategies store + # *values* as full objects, which stay alive as long as the dict is + # alive and not modified. So we can return a borrowed ref. + return w_res @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) def PyDict_SetItem(space, w_dict, w_key, w_obj): @@ -47,7 +50,8 @@ else: PyErr_BadInternalCall(space) - at cpython_api([PyObject, CONST_STRING], PyObject, error=CANNOT_FAIL) + at cpython_api([PyObject, CONST_STRING], PyObject, error=CANNOT_FAIL, + result_borrowed=True) def PyDict_GetItemString(space, w_dict, key): """This is the same as PyDict_GetItem(), but key is specified as a char*, rather than a PyObject*.""" @@ -55,9 +59,10 @@ w_res = space.finditem_str(w_dict, rffi.charp2str(key)) except: w_res = None - if w_res is None: - return None - return borrow_from(w_dict, w_res) + # NOTE: this works so far because all our dict strategies store + # *values* as full objects, which stay alive as long as the dict is + # alive and not modified. So we can return a borrowed ref. + return w_res @cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=-1) def PyDict_DelItemString(space, w_dict, key_ptr): @@ -170,10 +175,13 @@ if w_dict is None: return 0 - # Note: this is not efficient. Storing an iterator would probably + # XXX XXX PyDict_Next is not efficient. Storing an iterator would probably # work, but we can't work out how to not leak it if iteration does - # not complete. + # not complete. Alternatively, we could add some RPython-only + # dict-iterator method to move forward by N steps. + w_dict.ensure_object_strategy() # make sure both keys and values can + # be borrwed try: w_iter = space.iter(space.call_method(space.w_dict, "items", w_dict)) pos = ppos[0] @@ -183,11 +191,10 @@ w_item = space.next(w_iter) w_key, w_value = space.fixedview(w_item, 2) - state = space.fromcache(RefcountState) if pkey: - pkey[0] = state.make_borrowed(w_dict, w_key) + pkey[0] = as_pyobj(space, w_key) if pvalue: - pvalue[0] = state.make_borrowed(w_dict, w_value) + pvalue[0] = as_pyobj(space, w_value) ppos[0] += 1 except OperationError, e: if not e.match(space, space.w_StopIteration): diff --git a/pypy/module/cpyext/eval.py b/pypy/module/cpyext/eval.py --- a/pypy/module/cpyext/eval.py +++ b/pypy/module/cpyext/eval.py @@ -4,7 +4,7 @@ from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, fread, feof, Py_ssize_tP, cpython_struct, is_valid_fp) -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.module.cpyext.pyerrors import PyErr_SetFromErrno from pypy.module.cpyext.funcobject import PyCodeObject from pypy.module.__builtin__ import compiling @@ -23,7 +23,7 @@ def PyEval_CallObjectWithKeywords(space, w_obj, w_arg, w_kwds): return space.call(w_obj, w_arg, w_kwds) - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyEval_GetBuiltins(space): """Return a dictionary of the builtins in the current execution frame, or the interpreter of the thread state if no frame is @@ -36,25 +36,25 @@ w_builtins = w_builtins.getdict(space) else: w_builtins = space.builtin.getdict(space) - return borrow_from(None, w_builtins) + return w_builtins # borrowed ref in all cases - at cpython_api([], PyObject, error=CANNOT_FAIL) + at cpython_api([], PyObject, error=CANNOT_FAIL, result_borrowed=True) def PyEval_GetLocals(space): """Return a dictionary of the local variables in the current execution frame, or NULL if no frame is currently executing.""" caller = space.getexecutioncontext().gettopframe_nohidden() if caller is None: return None - return borrow_from(None, caller.getdictscope()) + return caller.getdictscope() # borrowed ref - at cpython_api([], PyObject, error=CANNOT_FAIL) + at cpython_api([], PyObject, error=CANNOT_FAIL, result_borrowed=True) def PyEval_GetGlobals(space): """Return a dictionary of the global variables in the current execution frame, or NULL if no frame is currently executing.""" caller = space.getexecutioncontext().gettopframe_nohidden() if caller is None: return None - return borrow_from(None, caller.get_w_globals()) + return caller.get_w_globals() # borrowed ref @cpython_api([PyCodeObject, PyObject, PyObject], PyObject) def PyEval_EvalCode(space, w_code, w_globals, w_locals): diff --git a/pypy/module/cpyext/funcobject.py b/pypy/module/cpyext/funcobject.py --- a/pypy/module/cpyext/funcobject.py +++ b/pypy/module/cpyext/funcobject.py @@ -3,7 +3,7 @@ PyObjectFields, generic_cpy_call, CONST_STRING, CANNOT_FAIL, Py_ssize_t, cpython_api, bootstrap_function, cpython_struct, build_type_checkers) from pypy.module.cpyext.pyobject import ( - PyObject, make_ref, from_ref, Py_DecRef, make_typedescr, borrow_from) + PyObject, make_ref, from_ref, Py_DecRef, make_typedescr) from rpython.rlib.unroll import unrolling_iterable from pypy.interpreter.error import OperationError from pypy.interpreter.function import Function, Method @@ -83,12 +83,12 @@ from pypy.module.cpyext.object import PyObject_dealloc PyObject_dealloc(space, py_obj) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyFunction_GetCode(space, w_func): """Return the code object associated with the function object op.""" func = space.interp_w(Function, w_func) w_code = space.wrap(func.code) - return borrow_from(w_func, w_code) + return w_code # borrowed ref @cpython_api([PyObject, PyObject], PyObject) def PyMethod_New(space, w_func, w_self): @@ -98,18 +98,18 @@ not be NULL.""" return Method(space, w_func, w_self) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyMethod_Function(space, w_method): """Return the function object associated with the method meth.""" assert isinstance(w_method, Method) - return borrow_from(w_method, w_method.w_function) + return w_method.w_function # borrowed ref - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyMethod_Self(space, w_method): """Return the instance associated with the method meth if it is bound, otherwise return NULL.""" assert isinstance(w_method, Method) - return borrow_from(w_method, w_method.w_instance) + return w_method.w_instance # borrowed ref def unwrap_list_of_strings(space, w_list): return [space.str_w(w_item) for w_item in space.fixedview(w_list)] diff --git a/pypy/module/cpyext/import_.py b/pypy/module/cpyext/import_.py --- a/pypy/module/cpyext/import_.py +++ b/pypy/module/cpyext/import_.py @@ -1,7 +1,6 @@ from pypy.interpreter import module from pypy.module.cpyext.api import ( generic_cpy_call, cpython_api, PyObject, CONST_STRING) -from pypy.module.cpyext.pyobject import borrow_from from rpython.rtyper.lltypesystem import lltype, rffi from pypy.interpreter.error import OperationError from pypy.interpreter.module import Module @@ -57,7 +56,7 @@ w_imp = space.call_function(w_import, space.wrap('imp')) return space.call_method(w_imp, 'reload', w_mod) - at cpython_api([CONST_STRING], PyObject) + at cpython_api([CONST_STRING], PyObject, result_borrowed=True) def PyImport_AddModule(space, name): """Return the module object corresponding to a module name. The name argument may be of the form package.module. First check the modules @@ -72,19 +71,19 @@ not already present.""" from pypy.module.imp.importing import check_sys_modules_w modulename = rffi.charp2str(name) - w_modulename = space.wrap(modulename) w_mod = check_sys_modules_w(space, modulename) if not w_mod or space.is_w(w_mod, space.w_None): - w_mod = Module(space, w_modulename) - space.setitem(space.sys.get('modules'), w_modulename, w_mod) - return borrow_from(None, w_mod) + w_mod = Module(space, space.wrap(modulename)) + space.setitem(space.sys.get('modules'), space.wrap(modulename), w_mod) + # return a borrowed ref --- assumes one copy in sys.modules + return w_mod - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyImport_GetModuleDict(space): """Return the dictionary used for the module administration (a.k.a. sys.modules). Note that this is a per-interpreter variable.""" w_modulesDict = space.sys.get('modules') - return borrow_from(None, w_modulesDict) + return w_modulesDict # borrowed ref @cpython_api([rffi.CCHARP, PyObject], PyObject) def PyImport_ExecCodeModule(space, name, w_code): diff --git a/pypy/module/cpyext/include/complexobject.h b/pypy/module/cpyext/include/complexobject.h --- a/pypy/module/cpyext/include/complexobject.h +++ b/pypy/module/cpyext/include/complexobject.h @@ -15,7 +15,7 @@ } Py_complex; /* generated function */ -PyAPI_FUNC(void) _PyComplex_AsCComplex(PyObject *, Py_complex *); +PyAPI_FUNC(int) _PyComplex_AsCComplex(PyObject *, Py_complex *); PyAPI_FUNC(PyObject *) _PyComplex_FromCComplex(Py_complex *); Py_LOCAL_INLINE(Py_complex) PyComplex_AsCComplex(PyObject *obj) diff --git a/pypy/module/cpyext/include/object.h b/pypy/module/cpyext/include/object.h --- a/pypy/module/cpyext/include/object.h +++ b/pypy/module/cpyext/include/object.h @@ -17,7 +17,8 @@ #define staticforward static #define PyObject_HEAD \ - long ob_refcnt; \ + Py_ssize_t ob_refcnt; \ + Py_ssize_t ob_pypy_link; \ struct _typeobject *ob_type; #define PyObject_VAR_HEAD \ @@ -25,7 +26,7 @@ Py_ssize_t ob_size; /* Number of items in variable part */ #define PyObject_HEAD_INIT(type) \ - 1, type, + 1, 0, type, #define PyVarObject_HEAD_INIT(type, size) \ PyObject_HEAD_INIT(type) size, @@ -40,19 +41,19 @@ #ifdef PYPY_DEBUG_REFCOUNT /* Slow version, but useful for debugging */ -#define Py_INCREF(ob) (Py_IncRef((PyObject *)ob)) -#define Py_DECREF(ob) (Py_DecRef((PyObject *)ob)) -#define Py_XINCREF(ob) (Py_IncRef((PyObject *)ob)) -#define Py_XDECREF(ob) (Py_DecRef((PyObject *)ob)) +#define Py_INCREF(ob) (Py_IncRef((PyObject *)(ob))) +#define Py_DECREF(ob) (Py_DecRef((PyObject *)(ob))) +#define Py_XINCREF(ob) (Py_IncRef((PyObject *)(ob))) +#define Py_XDECREF(ob) (Py_DecRef((PyObject *)(ob))) #else /* Fast version */ -#define Py_INCREF(ob) (((PyObject *)ob)->ob_refcnt++) -#define Py_DECREF(ob) \ +#define Py_INCREF(ob) (((PyObject *)(ob))->ob_refcnt++) +#define Py_DECREF(op) \ do { \ - if (((PyObject *)ob)->ob_refcnt > 1) \ - ((PyObject *)ob)->ob_refcnt--; \ + if (--((PyObject *)(op))->ob_refcnt != 0) \ + ; \ else \ - Py_DecRef((PyObject *)ob); \ + _Py_Dealloc((PyObject *)(op)); \ } while (0) #define Py_XINCREF(op) do { if ((op) == NULL) ; else Py_INCREF(op); } while (0) diff --git a/pypy/module/cpyext/include/patchlevel.h b/pypy/module/cpyext/include/patchlevel.h --- a/pypy/module/cpyext/include/patchlevel.h +++ b/pypy/module/cpyext/include/patchlevel.h @@ -30,6 +30,13 @@ /* PyPy version as a string */ #define PYPY_VERSION "4.1.0-alpha0" +#define PYPY_VERSION_NUM 0x04010000 + +/* Defined to mean a PyPy where cpyext holds more regular references + to PyObjects, e.g. staying alive as long as the internal PyPy object + stays alive. */ +#define PYPY_CPYEXT_GC 1 +#define PyPy_Borrow(a, b) ((void) 0) /* Subversion Revision number of this file (not of the repository). * Empty since Mercurial migration. */ diff --git a/pypy/module/cpyext/include/tupleobject.h b/pypy/module/cpyext/include/tupleobject.h --- a/pypy/module/cpyext/include/tupleobject.h +++ b/pypy/module/cpyext/include/tupleobject.h @@ -7,11 +7,21 @@ extern "C" { #endif +typedef struct { + PyObject_HEAD + Py_ssize_t ob_size; + PyObject **ob_item; /* XXX optimize to ob_item[] */ +} PyTupleObject; + /* defined in varargswrapper.c */ PyAPI_FUNC(PyObject *) PyTuple_Pack(Py_ssize_t, ...); -#define PyTuple_SET_ITEM PyTuple_SetItem -#define PyTuple_GET_ITEM PyTuple_GetItem +/* Macro, trading safety for speed */ +#define PyTuple_GET_ITEM(op, i) (((PyTupleObject *)(op))->ob_item[i]) +#define PyTuple_GET_SIZE(op) Py_SIZE(op) + +/* Macro, *only* to be used to fill in brand new tuples */ +#define PyTuple_SET_ITEM(op, i, v) (((PyTupleObject *)(op))->ob_item[i] = v) #ifdef __cplusplus diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py --- a/pypy/module/cpyext/listobject.py +++ b/pypy/module/cpyext/listobject.py @@ -3,7 +3,7 @@ from pypy.module.cpyext.api import (cpython_api, CANNOT_FAIL, Py_ssize_t, build_type_checkers) from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall -from pypy.module.cpyext.pyobject import Py_DecRef, PyObject, borrow_from +from pypy.module.cpyext.pyobject import Py_DecRef, PyObject from pypy.objspace.std.listobject import W_ListObject from pypy.interpreter.error import OperationError @@ -38,7 +38,7 @@ w_list.setitem(index, w_item) return 0 - at cpython_api([PyObject, Py_ssize_t], PyObject) + at cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True) def PyList_GetItem(space, w_list, index): """Return the object at position pos in the list pointed to by p. The position must be positive, indexing from the end of the list is not @@ -49,8 +49,10 @@ if index < 0 or index >= w_list.length(): raise OperationError(space.w_IndexError, space.wrap( "list index out of range")) - w_item = w_list.getitem(index) - return borrow_from(w_list, w_item) + w_list.ensure_object_strategy() # make sure we can return a borrowed obj + # XXX ^^^ how does this interact with CPyListStrategy? + w_res = w_list.getitem(index) + return w_res # borrowed ref @cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) diff --git a/pypy/module/cpyext/modsupport.py b/pypy/module/cpyext/modsupport.py --- a/pypy/module/cpyext/modsupport.py +++ b/pypy/module/cpyext/modsupport.py @@ -1,7 +1,7 @@ from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import cpython_api, cpython_struct, \ METH_STATIC, METH_CLASS, METH_COEXIST, CANNOT_FAIL, CONST_STRING -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from pypy.interpreter.module import Module from pypy.module.cpyext.methodobject import ( W_PyCFunctionObject, PyCFunction_NewEx, PyDescr_NewMethod, @@ -101,12 +101,12 @@ return int(space.is_w(w_type, w_obj_type) or space.is_true(space.issubtype(w_obj_type, w_type))) - at cpython_api([PyObject], PyObject) + at cpython_api([PyObject], PyObject, result_borrowed=True) def PyModule_GetDict(space, w_mod): if PyModule_Check(space, w_mod): assert isinstance(w_mod, Module) w_dict = w_mod.getdict(space) - return borrow_from(w_mod, w_dict) + return w_dict # borrowed reference, likely from w_mod.w_dict else: PyErr_BadInternalCall(space) diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py --- a/pypy/module/cpyext/object.py +++ b/pypy/module/cpyext/object.py @@ -6,7 +6,7 @@ Py_GE, CONST_STRING, FILEP, fwrite) from pypy.module.cpyext.pyobject import ( PyObject, PyObjectP, create_ref, from_ref, Py_IncRef, Py_DecRef, - track_reference, get_typedescr, _Py_NewReference, RefcountState) + get_typedescr, _Py_NewReference) from pypy.module.cpyext.typeobject import PyTypeObjectPtr from pypy.module.cpyext.pyerrors import PyErr_NoMemory, PyErr_BadInternalCall from pypy.objspace.std.typeobject import W_TypeObject @@ -35,9 +35,9 @@ def _PyObject_NewVar(space, type, itemcount): w_type = from_ref(space, rffi.cast(PyObject, type)) assert isinstance(w_type, W_TypeObject) - typedescr = get_typedescr(w_type.instancetypedef) + typedescr = get_typedescr(w_type.layout.typedef) py_obj = typedescr.allocate(space, w_type, itemcount=itemcount) - py_obj.c_ob_refcnt = 0 + #py_obj.c_ob_refcnt = 0 --- will be set to 1 again by PyObject_Init{Var} if type.c_tp_itemsize == 0: w_obj = PyObject_Init(space, py_obj, type) else: diff --git a/pypy/module/cpyext/pyerrors.py b/pypy/module/cpyext/pyerrors.py --- a/pypy/module/cpyext/pyerrors.py +++ b/pypy/module/cpyext/pyerrors.py @@ -6,7 +6,7 @@ from pypy.module.cpyext.api import cpython_api, CANNOT_FAIL, CONST_STRING from pypy.module.exceptions.interp_exceptions import W_RuntimeWarning from pypy.module.cpyext.pyobject import ( - PyObject, PyObjectP, make_ref, from_ref, Py_DecRef, borrow_from) + PyObject, PyObjectP, make_ref, from_ref, Py_DecRef) from pypy.module.cpyext.state import State from pypy.module.cpyext.import_ import PyImport_Import from rpython.rlib import rposix, jit @@ -28,12 +28,12 @@ """This is a shorthand for PyErr_SetObject(type, Py_None).""" PyErr_SetObject(space, w_type, space.w_None) - at cpython_api([], PyObject) + at cpython_api([], PyObject, result_borrowed=True) def PyErr_Occurred(space): state = space.fromcache(State) if state.operror is None: return None - return borrow_from(None, state.operror.w_type) + return state.operror.w_type # borrowed ref @cpython_api([], lltype.Void) def PyErr_Clear(space): diff --git a/pypy/module/cpyext/pyfile.py b/pypy/module/cpyext/pyfile.py --- a/pypy/module/cpyext/pyfile.py +++ b/pypy/module/cpyext/pyfile.py @@ -1,7 +1,7 @@ from rpython.rtyper.lltypesystem import rffi, lltype from pypy.module.cpyext.api import ( - cpython_api, CANNOT_FAIL, CONST_STRING, FILEP) -from pypy.module.cpyext.pyobject import PyObject, borrow_from + cpython_api, CONST_STRING, FILEP) +from pypy.module.cpyext.pyobject import PyObject from pypy.module.cpyext.object import Py_PRINT_RAW from pypy.interpreter.error import OperationError @@ -80,4 +80,5 @@ @cpython_api([PyObject], PyObject) def PyFile_Name(space, w_p): """Return the name of the file specified by p as a string object.""" - return borrow_from(w_p, space.getattr(w_p, space.wrap("name"))) + w_name = space.getattr(w_p, space.wrap("name")) + return w_name # borrowed ref, should be a W_StringObject from the file diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -2,15 +2,19 @@ from pypy.interpreter.baseobjspace import W_Root, SpaceCache from rpython.rtyper.lltypesystem import rffi, lltype +from rpython.rtyper.extregistry import ExtRegistryEntry from pypy.module.cpyext.api import ( cpython_api, bootstrap_function, PyObject, PyObjectP, ADDR, - CANNOT_FAIL, Py_TPFLAGS_HEAPTYPE, PyTypeObjectPtr) + CANNOT_FAIL, Py_TPFLAGS_HEAPTYPE, PyTypeObjectPtr, is_PyObject, + INTERPLEVEL_API) from pypy.module.cpyext.state import State from pypy.objspace.std.typeobject import W_TypeObject from pypy.objspace.std.objectobject import W_ObjectObject from rpython.rlib.objectmodel import specialize, we_are_translated -from rpython.rlib.rweakref import RWeakKeyDictionary +from rpython.rlib.objectmodel import keepalive_until_here from rpython.rtyper.annlowlevel import llhelper +from rpython.rlib import rawrefcount + #________________________________________________________ # type description @@ -28,13 +32,15 @@ def allocate(self, space, w_type, itemcount=0): # similar to PyType_GenericAlloc? # except that it's not related to any pypy object. + # this returns a PyObject with ob_refcnt == 1. - pytype = rffi.cast(PyTypeObjectPtr, make_ref(space, w_type)) + pytype = as_pyobj(space, w_type) + pytype = rffi.cast(PyTypeObjectPtr, pytype) + assert pytype # Don't increase refcount for non-heaptypes - if pytype: - flags = rffi.cast(lltype.Signed, pytype.c_tp_flags) - if not flags & Py_TPFLAGS_HEAPTYPE: - Py_DecRef(space, w_type) + flags = rffi.cast(lltype.Signed, pytype.c_tp_flags) + if flags & Py_TPFLAGS_HEAPTYPE: + Py_IncRef(space, w_type) if pytype: size = pytype.c_tp_basicsize @@ -42,6 +48,7 @@ size = rffi.sizeof(self.basestruct) if itemcount: size += itemcount * pytype.c_tp_itemsize + assert size >= rffi.sizeof(PyObject.TO) buf = lltype.malloc(rffi.VOIDP.TO, size, flavor='raw', zero=True) pyobj = rffi.cast(PyObject, buf) @@ -56,9 +63,6 @@ w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type)) w_obj = space.allocate_instance(self.W_BaseObject, w_type) track_reference(space, obj, w_obj) - if w_type is not space.gettypefor(self.W_BaseObject): - state = space.fromcache(RefcountState) - state.set_lifeline(w_obj, obj) return w_obj typedescr_cache = {} @@ -111,7 +115,7 @@ def init_pyobject(space): from pypy.module.cpyext.object import PyObject_dealloc # typedescr for the 'object' type - make_typedescr(space.w_object.instancetypedef, + make_typedescr(space.w_object.layout.typedef, dealloc=PyObject_dealloc) # almost all types, which should better inherit from object. make_typedescr(None) @@ -134,104 +138,6 @@ #________________________________________________________ # refcounted object support -class RefcountState: - def __init__(self, space): - self.space = space - self.py_objects_w2r = {} # { w_obj -> raw PyObject } - self.py_objects_r2w = {} # { addr of raw PyObject -> w_obj } - - self.lifeline_dict = RWeakKeyDictionary(W_Root, PyOLifeline) - - self.borrow_mapping = {None: {}} - # { w_container -> { w_containee -> None } } - # the None entry manages references borrowed during a call to - # generic_cpy_call() - - # For tests - self.non_heaptypes_w = [] - - def _cleanup_(self): - assert self.borrow_mapping == {None: {}} - self.py_objects_r2w.clear() # is not valid anymore after translation - - def init_r2w_from_w2r(self): - """Rebuilds the dict py_objects_r2w on startup""" - for w_obj, obj in self.py_objects_w2r.items(): - ptr = rffi.cast(ADDR, obj) - self.py_objects_r2w[ptr] = w_obj - - def print_refcounts(self): - print "REFCOUNTS" - for w_obj, obj in self.py_objects_w2r.items(): - print "%r: %i" % (w_obj, obj.c_ob_refcnt) - - def get_from_lifeline(self, w_obj): - lifeline = self.lifeline_dict.get(w_obj) - if lifeline is not None: # make old PyObject ready for use in C code - py_obj = lifeline.pyo - assert py_obj.c_ob_refcnt == 0 - return py_obj - else: - return lltype.nullptr(PyObject.TO) - - def set_lifeline(self, w_obj, py_obj): - self.lifeline_dict.set(w_obj, - PyOLifeline(self.space, py_obj)) - - def make_borrowed(self, w_container, w_borrowed): - """ - Create a borrowed reference, which will live as long as the container - has a living reference (as a PyObject!) - """ - ref = make_ref(self.space, w_borrowed) - obj_ptr = rffi.cast(ADDR, ref) - - borrowees = self.borrow_mapping.setdefault(w_container, {}) - if w_borrowed in borrowees: - Py_DecRef(self.space, w_borrowed) # cancel incref from make_ref() - else: - borrowees[w_borrowed] = None - - return ref - - def reset_borrowed_references(self): - "Used in tests" - for w_container, w_borrowed in self.borrow_mapping.items(): - Py_DecRef(self.space, w_borrowed) - self.borrow_mapping = {None: {}} - - def delete_borrower(self, w_obj): - """ - Called when a potential container for borrowed references has lost its - last reference. Removes the borrowed references it contains. - """ - if w_obj in self.borrow_mapping: # move to lifeline __del__ - for w_containee in self.borrow_mapping[w_obj]: - self.forget_borrowee(w_containee) - del self.borrow_mapping[w_obj] - - def swap_borrow_container(self, container): - """switch the current default contained with the given one.""" - if container is None: - old_container = self.borrow_mapping[None] - self.borrow_mapping[None] = {} - return old_container - else: - old_container = self.borrow_mapping[None] - self.borrow_mapping[None] = container - for w_containee in old_container: - self.forget_borrowee(w_containee) - - def forget_borrowee(self, w_obj): - "De-register an object from the list of borrowed references" - ref = self.py_objects_w2r.get(w_obj, lltype.nullptr(PyObject.TO)) - if not ref: - if DEBUG_REFCOUNT: - print >>sys.stderr, "Borrowed object is already gone!" - return - - Py_DecRef(self.space, ref) - class InvalidPointerException(Exception): pass @@ -249,55 +155,37 @@ def create_ref(space, w_obj, itemcount=0): """ Allocates a PyObject, and fills its fields with info from the given - intepreter object. + interpreter object. """ - state = space.fromcache(RefcountState) w_type = space.type(w_obj) - if w_type.is_cpytype(): - py_obj = state.get_from_lifeline(w_obj) - if py_obj: - Py_IncRef(space, py_obj) - return py_obj - typedescr = get_typedescr(w_obj.typedef) py_obj = typedescr.allocate(space, w_type, itemcount=itemcount) - if w_type.is_cpytype(): - state.set_lifeline(w_obj, py_obj) + track_reference(space, py_obj, w_obj) + # + # py_obj.c_ob_refcnt should be exactly REFCNT_FROM_PYPY + 1 here, + # and we want only REFCNT_FROM_PYPY, i.e. only count as attached + # to the W_Root but not with any reference from the py_obj side. + assert py_obj.c_ob_refcnt > rawrefcount.REFCNT_FROM_PYPY + py_obj.c_ob_refcnt -= 1 + # typedescr.attach(space, py_obj, w_obj) return py_obj -def track_reference(space, py_obj, w_obj, replace=False): +def track_reference(space, py_obj, w_obj): """ Ties together a PyObject and an interpreter object. + The PyObject's refcnt is increased by REFCNT_FROM_PYPY. + The reference in 'py_obj' is not stolen! Remember to Py_DecRef() + it is you need to. """ # XXX looks like a PyObject_GC_TRACK - ptr = rffi.cast(ADDR, py_obj) - state = space.fromcache(RefcountState) + assert py_obj.c_ob_refcnt < rawrefcount.REFCNT_FROM_PYPY + py_obj.c_ob_refcnt += rawrefcount.REFCNT_FROM_PYPY if DEBUG_REFCOUNT: debug_refcount("MAKREF", py_obj, w_obj) - if not replace: - assert w_obj not in state.py_objects_w2r - assert ptr not in state.py_objects_r2w - state.py_objects_w2r[w_obj] = py_obj - if ptr: # init_typeobject() bootstraps with NULL references - state.py_objects_r2w[ptr] = w_obj - -def make_ref(space, w_obj): - """ - Returns a new reference to an intepreter object. - """ - if w_obj is None: - return lltype.nullptr(PyObject.TO) - assert isinstance(w_obj, W_Root) - state = space.fromcache(RefcountState) - try: - py_obj = state.py_objects_w2r[w_obj] - except KeyError: - py_obj = create_ref(space, w_obj) - track_reference(space, py_obj, w_obj) - else: - Py_IncRef(space, py_obj) - return py_obj + assert w_obj + assert py_obj + rawrefcount.create_link_pypy(w_obj, py_obj) def from_ref(space, ref): @@ -305,16 +193,12 @@ Finds the interpreter object corresponding to the given reference. If the object is not yet realized (see bytesobject.py), creates it. """ - assert lltype.typeOf(ref) == PyObject + assert is_pyobj(ref) if not ref: return None - state = space.fromcache(RefcountState) - ptr = rffi.cast(ADDR, ref) - - try: - return state.py_objects_r2w[ptr] - except KeyError: - pass + w_obj = rawrefcount.to_obj(W_Root, ref) + if w_obj is not None: + return w_obj # This reference is not yet a real interpreter object. # Realize it. @@ -323,126 +207,135 @@ raise InvalidPointerException(str(ref)) w_type = from_ref(space, ref_type) assert isinstance(w_type, W_TypeObject) - return get_typedescr(w_type.instancetypedef).realize(space, ref) + return get_typedescr(w_type.layout.typedef).realize(space, ref) -# XXX Optimize these functions and put them into macro definitions - at cpython_api([PyObject], lltype.Void) -def Py_DecRef(space, obj): - if not obj: - return - assert lltype.typeOf(obj) == PyObject +def debug_collect(): + rawrefcount._collect() - obj.c_ob_refcnt -= 1 - if DEBUG_REFCOUNT: - debug_refcount("DECREF", obj, obj.c_ob_refcnt, frame_stackdepth=3) - if obj.c_ob_refcnt == 0: - state = space.fromcache(RefcountState) - ptr = rffi.cast(ADDR, obj) - if ptr not in state.py_objects_r2w: - # this is a half-allocated object, lets call the deallocator - # without modifying the r2w/w2r dicts - _Py_Dealloc(space, obj) - else: - w_obj = state.py_objects_r2w[ptr] - del state.py_objects_r2w[ptr] - w_type = space.type(w_obj) - if not w_type.is_cpytype(): + +def as_pyobj(space, w_obj): + """ + Returns a 'PyObject *' representing the given intepreter object. + This doesn't give a new reference, but the returned 'PyObject *' + is valid at least as long as 'w_obj' is. **To be safe, you should + use keepalive_until_here(w_obj) some time later.** In case of + doubt, use the safer make_ref(). + """ + if w_obj is not None: + assert not is_pyobj(w_obj) + py_obj = rawrefcount.from_obj(PyObject, w_obj) + if not py_obj: + py_obj = create_ref(space, w_obj) + return py_obj + else: + return lltype.nullptr(PyObject.TO) +as_pyobj._always_inline_ = 'try' +INTERPLEVEL_API['as_pyobj'] = as_pyobj + +def pyobj_has_w_obj(pyobj): + return rawrefcount.to_obj(W_Root, pyobj) is not None +INTERPLEVEL_API['pyobj_has_w_obj'] = staticmethod(pyobj_has_w_obj) + + +def is_pyobj(x): + if x is None or isinstance(x, W_Root): + return False + elif is_PyObject(lltype.typeOf(x)): + return True + else: + raise TypeError(repr(type(x))) +INTERPLEVEL_API['is_pyobj'] = staticmethod(is_pyobj) + +class Entry(ExtRegistryEntry): + _about_ = is_pyobj + def compute_result_annotation(self, s_x): + from rpython.rtyper.llannotation import SomePtr + return self.bookkeeper.immutablevalue(isinstance(s_x, SomePtr)) + def specialize_call(self, hop): + hop.exception_cannot_occur() + return hop.inputconst(lltype.Bool, hop.s_result.const) + + at specialize.ll() +def make_ref(space, obj): + """Increment the reference counter of the PyObject and return it. + Can be called with either a PyObject or a W_Root. + """ + if is_pyobj(obj): + pyobj = rffi.cast(PyObject, obj) + else: + pyobj = as_pyobj(space, obj) + if pyobj: + assert pyobj.c_ob_refcnt > 0 + pyobj.c_ob_refcnt += 1 + if not is_pyobj(obj): + keepalive_until_here(obj) + return pyobj +INTERPLEVEL_API['make_ref'] = make_ref + + + at specialize.ll() +def get_w_obj_and_decref(space, obj): + """Decrement the reference counter of the PyObject and return the + corresponding W_Root object (so the reference count is at least + REFCNT_FROM_PYPY and cannot be zero). Can be called with either + a PyObject or a W_Root. + """ + if is_pyobj(obj): + pyobj = rffi.cast(PyObject, obj) + w_obj = from_ref(space, pyobj) + else: + w_obj = obj + pyobj = as_pyobj(space, w_obj) + if pyobj: + pyobj.c_ob_refcnt -= 1 + assert pyobj.c_ob_refcnt >= rawrefcount.REFCNT_FROM_PYPY + keepalive_until_here(w_obj) + return w_obj +INTERPLEVEL_API['get_w_obj_and_decref'] = get_w_obj_and_decref + + + at specialize.ll() +def incref(space, obj): + make_ref(space, obj) +INTERPLEVEL_API['incref'] = incref + + at specialize.ll() +def decref(space, obj): + if is_pyobj(obj): + obj = rffi.cast(PyObject, obj) + if obj: + assert obj.c_ob_refcnt > 0 + obj.c_ob_refcnt -= 1 + if obj.c_ob_refcnt == 0: _Py_Dealloc(space, obj) - del state.py_objects_w2r[w_obj] - # if the object was a container for borrowed references - state.delete_borrower(w_obj) else: - if not we_are_translated() and obj.c_ob_refcnt < 0: - message = "Negative refcount for obj %s with type %s" % ( - obj, rffi.charp2str(obj.c_ob_type.c_tp_name)) - print >>sys.stderr, message - assert False, message + get_w_obj_and_decref(space, obj) +INTERPLEVEL_API['decref'] = decref + @cpython_api([PyObject], lltype.Void) def Py_IncRef(space, obj): - if not obj: - return - obj.c_ob_refcnt += 1 - assert obj.c_ob_refcnt > 0 - if DEBUG_REFCOUNT: - debug_refcount("INCREF", obj, obj.c_ob_refcnt, frame_stackdepth=3) + incref(space, obj) + + at cpython_api([PyObject], lltype.Void) +def Py_DecRef(space, obj): + decref(space, obj) @cpython_api([PyObject], lltype.Void) def _Py_NewReference(space, obj): obj.c_ob_refcnt = 1 w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type)) assert isinstance(w_type, W_TypeObject) - get_typedescr(w_type.instancetypedef).realize(space, obj) + get_typedescr(w_type.layout.typedef).realize(space, obj) + at cpython_api([PyObject], lltype.Void) def _Py_Dealloc(space, obj): - from pypy.module.cpyext.api import generic_cpy_call_dont_decref + from pypy.module.cpyext.api import generic_cpy_call pto = obj.c_ob_type #print >>sys.stderr, "Calling dealloc slot", pto.c_tp_dealloc, "of", obj, \ # "'s type which is", rffi.charp2str(pto.c_tp_name) - generic_cpy_call_dont_decref(space, pto.c_tp_dealloc, obj) - -#___________________________________________________________ -# Support for "lifelines" -# -# Object structure must stay alive even when not referenced -# by any C code. - -class PyOLifeline(object): - def __init__(self, space, pyo): - self.pyo = pyo - self.space = space - - def __del__(self): - if self.pyo: - assert self.pyo.c_ob_refcnt == 0 - _Py_Dealloc(self.space, self.pyo) - self.pyo = lltype.nullptr(PyObject.TO) - # XXX handle borrowed objects here - -#___________________________________________________________ -# Support for borrowed references - -def make_borrowed_ref(space, w_container, w_borrowed): - """ - Create a borrowed reference, which will live as long as the container - has a living reference (as a PyObject!) - """ - if w_borrowed is None: - return lltype.nullptr(PyObject.TO) - - state = space.fromcache(RefcountState) - return state.make_borrowed(w_container, w_borrowed) - -class Reference: - def __init__(self, pyobj): - assert not isinstance(pyobj, W_Root) - self.pyobj = pyobj - - def get_ref(self, space): - return self.pyobj - - def get_wrapped(self, space): - return from_ref(space, self.pyobj) - -class BorrowPair(Reference): - """ - Delays the creation of a borrowed reference. - """ - def __init__(self, w_container, w_borrowed): - self.w_container = w_container - self.w_borrowed = w_borrowed - - def get_ref(self, space): - return make_borrowed_ref(space, self.w_container, self.w_borrowed) - - def get_wrapped(self, space): - return self.w_borrowed - -def borrow_from(container, borrowed): - return BorrowPair(container, borrowed) - -#___________________________________________________________ + generic_cpy_call(space, pto.c_tp_dealloc, obj) @cpython_api([rffi.VOIDP], lltype.Signed, error=CANNOT_FAIL) def _Py_HashPointer(space, ptr): diff --git a/pypy/module/cpyext/pypyintf.py b/pypy/module/cpyext/pypyintf.py deleted file mode 100644 --- a/pypy/module/cpyext/pypyintf.py +++ /dev/null @@ -1,9 +0,0 @@ -from pypy.module.cpyext.api import cpython_api -from pypy.module.cpyext.pyobject import PyObject, borrow_from - - - at cpython_api([PyObject, PyObject], PyObject) -def PyPy_Borrow(space, w_parentobj, w_obj): - """Returns a borrowed reference to 'obj', borrowing from the 'parentobj'. - """ - return borrow_from(w_parentobj, w_obj) diff --git a/pypy/module/cpyext/pytraceback.py b/pypy/module/cpyext/pytraceback.py --- a/pypy/module/cpyext/pytraceback.py +++ b/pypy/module/cpyext/pytraceback.py @@ -3,7 +3,7 @@ PyObjectFields, generic_cpy_call, CONST_STRING, CANNOT_FAIL, Py_ssize_t, cpython_api, bootstrap_function, cpython_struct, build_type_checkers) from pypy.module.cpyext.pyobject import ( - PyObject, make_ref, from_ref, Py_DecRef, make_typedescr, borrow_from) + PyObject, make_ref, from_ref, Py_DecRef, make_typedescr) from pypy.module.cpyext.frameobject import PyFrameObject from rpython.rlib.unroll import unrolling_iterable from pypy.interpreter.error import OperationError diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py --- a/pypy/module/cpyext/sequence.py +++ b/pypy/module/cpyext/sequence.py @@ -2,7 +2,7 @@ from pypy.interpreter.error import OperationError, oefmt from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, CONST_STRING, Py_ssize_t) -from pypy.module.cpyext.pyobject import PyObject, borrow_from +from pypy.module.cpyext.pyobject import PyObject from rpython.rtyper.lltypesystem import rffi, lltype from pypy.objspace.std import listobject, tupleobject @@ -42,15 +42,19 @@ which case o is returned. Use PySequence_Fast_GET_ITEM() to access the members of the result. Returns NULL on failure. If the object is not a sequence, raises TypeError with m as the message text.""" - if (isinstance(w_obj, listobject.W_ListObject) or - isinstance(w_obj, tupleobject.W_TupleObject)): + if isinstance(w_obj, listobject.W_ListObject): + # make sure we can return a borrowed obj from PySequence_Fast_GET_ITEM + # XXX how does this interact with CPyListStrategy? + w_obj.ensure_object_strategy() + return w_obj + if isinstance(w_obj, tupleobject.W_TupleObject): return w_obj try: return tupleobject.W_TupleObject(space.fixedview(w_obj)) except OperationError: raise OperationError(space.w_TypeError, space.wrap(rffi.charp2str(m))) - at cpython_api([PyObject, Py_ssize_t], PyObject) + at cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True) def PySequence_Fast_GET_ITEM(space, w_obj, index): """Return the ith element of o, assuming that o was returned by PySequence_Fast(), o is not NULL, and that i is within bounds. @@ -60,7 +64,7 @@ else: assert isinstance(w_obj, tupleobject.W_TupleObject) w_res = w_obj.wrappeditems[index] - return borrow_from(w_obj, w_res) + return w_res # borrowed ref @cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL) def PySequence_Fast_GET_SIZE(space, w_obj): diff --git a/pypy/module/cpyext/setobject.py b/pypy/module/cpyext/setobject.py --- a/pypy/module/cpyext/setobject.py From pypy.commits at gmail.com Tue Feb 23 17:26:22 2016 From: pypy.commits at gmail.com (Raemi) Date: Tue, 23 Feb 2016 14:26:22 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: rename to distinguish LiveRange objs from longevity dicts Message-ID: <56ccdc8e.a185c20a.1c81f.fffffee3@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82465:449462f0839e Date: 2016-02-23 22:38 +0100 http://bitbucket.org/pypy/pypy/changeset/449462f0839e/ Log: rename to distinguish LiveRange objs from longevity dicts diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -276,10 +276,10 @@ save_around_call_regs = [] frame_reg = None - def __init__(self, longevity, frame_manager=None, assembler=None): + def __init__(self, live_ranges, frame_manager=None, assembler=None): self.free_regs = self.all_regs[:] self.free_regs.reverse() - self.longevity = longevity + self.live_ranges = live_ranges self.temp_boxes = [] if not we_are_translated(): self.reg_bindings = OrderedDict() @@ -293,12 +293,12 @@ def is_still_alive(self, v): # Check if 'v' is alive at the current position. # Return False if the last usage is strictly before. - return self.longevity.last_use(v) >= self.position + return self.live_ranges.last_use(v) >= self.position def stays_alive(self, v): # Check if 'v' stays alive after the current position. # Return False if the last usage is before or at position. - return self.longevity.last_use(v) > self.position + return self.live_ranges.last_use(v) > self.position def next_instruction(self, incr=1): self.position += incr @@ -315,7 +315,7 @@ self._check_type(v) if isinstance(v, Const): return - if not self.longevity.exists(v) or self.longevity.last_use(v) <= self.position: + if not self.live_ranges.exists(v) or self.live_ranges.last_use(v) <= self.position: if v in self.reg_bindings: self.free_regs.append(self.reg_bindings[v]) del self.reg_bindings[v] @@ -347,9 +347,9 @@ else: assert len(self.reg_bindings) + len(self.free_regs) == len(self.all_regs) assert len(self.temp_boxes) == 0 - if self.longevity: + if self.live_ranges: for v in self.reg_bindings: - assert self.longevity.last_use(v) > self.position + assert self.live_ranges.last_use(v) > self.position def try_allocate_reg(self, v, selected_reg=None, need_lower_byte=False): """ Try to allocate a register, if we have one free. @@ -425,7 +425,7 @@ continue if need_lower_byte and reg in self.no_lower_byte_regs: continue - max_age = self.longevity.last_use(next) + max_age = self.live_ranges.last_use(next) if cur_max_age < max_age: cur_max_age = max_age candidate = next @@ -444,7 +444,7 @@ """ self._check_type(v) if isinstance(v, TempVar): - self.longevity.new_live_range(v, self.position, self.position) + self.live_ranges.new_live_range(v, self.position, self.position) loc = self.try_allocate_reg(v, selected_reg, need_lower_byte=need_lower_byte) if loc: @@ -554,7 +554,7 @@ loc = self.force_allocate_reg(v, forbidden_vars) self.assembler.regalloc_mov(prev_loc, loc) assert v in self.reg_bindings - if self.longevity.last_use(v) > self.position: + if self.live_ranges.last_use(v) > self.position: # we need to find a new place for variable v and # store result in the same place loc = self.reg_bindings[v] @@ -583,7 +583,7 @@ 1 (save all), or 2 (save default+PTRs). """ for v, reg in self.reg_bindings.items(): - if v not in force_store and self.longevity.last_use(v) <= self.position: + if v not in force_store and self.live_ranges.last_use(v) <= self.position: # variable dies del self.reg_bindings[v] self.free_regs.append(reg) diff --git a/rpython/jit/backend/x86/regalloc.py b/rpython/jit/backend/x86/regalloc.py --- a/rpython/jit/backend/x86/regalloc.py +++ b/rpython/jit/backend/x86/regalloc.py @@ -515,7 +515,7 @@ # and won't be used after the current operation finishes, # then swap the role of 'x' and 'y' if (symm and isinstance(argloc, RegLoc) and - self.rm.longevity.last_use(y) == self.rm.position): + self.rm.live_ranges.last_use(y) == self.rm.position): x, y = y, x argloc = self.loc(y) # From pypy.commits at gmail.com Tue Feb 23 17:26:20 2016 From: pypy.commits at gmail.com (Raemi) Date: Tue, 23 Feb 2016 14:26:20 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) fix broken commit Message-ID: <56ccdc8c.162f1c0a.2f944.2280@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82464:2feb1ca9c35c Date: 2016-02-23 22:36 +0100 http://bitbucket.org/pypy/pypy/changeset/2feb1ca9c35c/ Log: (plan_rich,remi) fix broken commit diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -1,7 +1,7 @@ import os -from rpython.jit.metainterp.history import Const, Box, REF, JitCellToken +from rpython.jit.metainterp.history import Const, REF, JitCellToken from rpython.rlib.objectmodel import we_are_translated, specialize -from rpython.jit.metainterp.resoperation import rop +from rpython.jit.metainterp.resoperation import rop, AbstractValue from rpython.rtyper.lltypesystem import lltype from rpython.rtyper.lltypesystem.lloperation import llop @@ -10,7 +10,7 @@ except ImportError: OrderedDict = dict # too bad -class TempBox(Box): +class TempVar(AbstractValue): def __init__(self): pass @@ -267,6 +267,7 @@ raise NotImplementedError("Purely abstract") class RegisterManager(object): + """ Class that keeps track of register allocations """ box_types = None # or a list of acceptable types @@ -292,19 +293,19 @@ def is_still_alive(self, v): # Check if 'v' is alive at the current position. # Return False if the last usage is strictly before. - return self.longevity[v][1] >= self.position + return self.longevity.last_use(v) >= self.position def stays_alive(self, v): # Check if 'v' stays alive after the current position. # Return False if the last usage is before or at position. - return self.longevity[v][1] > self.position + return self.longevity.last_use(v) > self.position def next_instruction(self, incr=1): self.position += incr def _check_type(self, v): if not we_are_translated() and self.box_types is not None: - assert isinstance(v, TempBox) or v.type in self.box_types + assert isinstance(v, TempVar) or v.type in self.box_types def possibly_free_var(self, v): """ If v is stored in a register and v is not used beyond the @@ -314,7 +315,7 @@ self._check_type(v) if isinstance(v, Const): return - if v not in self.longevity or self.longevity[v][1] <= self.position: + if not self.longevity.exists(v) or self.longevity.last_use(v) <= self.position: if v in self.reg_bindings: self.free_regs.append(self.reg_bindings[v]) del self.reg_bindings[v] @@ -348,7 +349,7 @@ assert len(self.temp_boxes) == 0 if self.longevity: for v in self.reg_bindings: - assert self.longevity[v][1] > self.position + assert self.longevity.last_use(v) > self.position def try_allocate_reg(self, v, selected_reg=None, need_lower_byte=False): """ Try to allocate a register, if we have one free. @@ -424,7 +425,7 @@ continue if need_lower_byte and reg in self.no_lower_byte_regs: continue - max_age = self.longevity[next][1] + max_age = self.longevity.last_use(next) if cur_max_age < max_age: cur_max_age = max_age candidate = next @@ -442,8 +443,8 @@ Will not spill a variable from 'forbidden_vars'. """ self._check_type(v) - if isinstance(v, TempBox): - self.longevity[v] = (self.position, self.position) + if isinstance(v, TempVar): + self.longevity.new_live_range(v, self.position, self.position) loc = self.try_allocate_reg(v, selected_reg, need_lower_byte=need_lower_byte) if loc: @@ -553,13 +554,14 @@ loc = self.force_allocate_reg(v, forbidden_vars) self.assembler.regalloc_mov(prev_loc, loc) assert v in self.reg_bindings - if self.longevity[v][1] > self.position: + if self.longevity.last_use(v) > self.position: # we need to find a new place for variable v and # store result in the same place loc = self.reg_bindings[v] del self.reg_bindings[v] - if self.frame_manager.get(v) is None: + if self.frame_manager.get(v) is None or self.free_regs: self._move_variable_away(v, loc) + self.reg_bindings[result_v] = loc else: self._reallocate_from_to(v, result_v) @@ -581,7 +583,7 @@ 1 (save all), or 2 (save default+PTRs). """ for v, reg in self.reg_bindings.items(): - if v not in force_store and self.longevity[v][1] <= self.position: + if v not in force_store and self.longevity.last_use(v) <= self.position: # variable dies del self.reg_bindings[v] self.free_regs.append(reg) @@ -633,69 +635,79 @@ locs = [] base_ofs = self.assembler.cpu.get_baseofs_of_frame_field() for box in inputargs: - assert isinstance(box, Box) + assert not isinstance(box, Const) loc = self.fm.get_new_loc(box) locs.append(loc.value - base_ofs) if looptoken.compiled_loop_token is not None: # for tests looptoken.compiled_loop_token._ll_initial_locs = locs - def can_merge_with_next_guard(self, op, i, operations): - if (op.getopnum() == rop.CALL_MAY_FORCE or - op.getopnum() == rop.CALL_ASSEMBLER or - op.getopnum() == rop.CALL_RELEASE_GIL): - assert operations[i + 1].getopnum() == rop.GUARD_NOT_FORCED - return True - if not op.is_comparison(): - if op.is_ovf(): - if (operations[i + 1].getopnum() != rop.GUARD_NO_OVERFLOW and - operations[i + 1].getopnum() != rop.GUARD_OVERFLOW): - not_implemented("int_xxx_ovf not followed by " - "guard_(no)_overflow") - return True + def next_op_can_accept_cc(self, operations, i): + op = operations[i] + next_op = operations[i + 1] + opnum = next_op.getopnum() + if (opnum != rop.GUARD_TRUE and opnum != rop.GUARD_FALSE + and opnum != rop.COND_CALL): return False - if (operations[i + 1].getopnum() != rop.GUARD_TRUE and - operations[i + 1].getopnum() != rop.GUARD_FALSE): + if next_op.getarg(0) is not op: return False - if operations[i + 1].getarg(0) is not op.result: + if self.longevity[op][1] > i + 1: return False - if (self.longevity[op.result][1] > i + 1 or - op.result in operations[i + 1].getfailargs()): - return False + if opnum != rop.COND_CALL: + if op in operations[i + 1].getfailargs(): + return False + else: + if op in operations[i + 1].getarglist()[1:]: + return False return True - def locs_for_call_assembler(self, op, guard_op): + def locs_for_call_assembler(self, op): descr = op.getdescr() assert isinstance(descr, JitCellToken) if op.numargs() == 2: self.rm._sync_var(op.getarg(1)) return [self.loc(op.getarg(0)), self.fm.loc(op.getarg(1))] else: + assert op.numargs() == 1 return [self.loc(op.getarg(0))] -def compute_vars_longevity(inputargs, operations): +class LiveRanges(object): + def __init__(self, longevity, last_real_usage, dist_to_next_call): + self.longevity = longevity + self.last_real_usage = last_real_usage + self.dist_to_next_call = dist_to_next_call + + def exists(self, var): + return var in self.longevity + + def last_use(self, var): + return self.longevity[var][1] + + def new_live_range(self, var, start, end): + self.longevity[var] = (start, end) + +def compute_var_live_ranges(inputargs, operations): # compute a dictionary that maps variables to index in # operations that is a "last-time-seen" - # returns a pair longevity/useful. Non-useful variables are ones that + # returns a Longevity object with longevity/useful. Non-useful variables are ones that # never appear in the assembler or it does not matter if they appear on # stack or in registers. Main example is loop arguments that go # only to guard operations or to jump or to finish - produced = {} last_used = {} last_real_usage = {} + dist_to_next_call = [0] * len(operations) + last_call_pos = -1 for i in range(len(operations)-1, -1, -1): op = operations[i] - if op.result: - if op.result not in last_used and op.has_no_side_effect(): + if op.type != 'v': + if op not in last_used and op.has_no_side_effect(): continue - assert op.result not in produced - produced[op.result] = i opnum = op.getopnum() for j in range(op.numargs()): arg = op.getarg(j) - if not isinstance(arg, Box): + if isinstance(arg, Const): continue if arg not in last_used: last_used[arg] = i @@ -706,26 +718,40 @@ for arg in op.getfailargs(): if arg is None: # hole continue - assert isinstance(arg, Box) + assert not isinstance(arg, Const) if arg not in last_used: last_used[arg] = i + if op.is_call(): + last_call_pos = i + dist_to_next_call[i] = last_call_pos - i # longevity = {} - for arg in produced: - if arg in last_used: - assert isinstance(arg, Box) - assert produced[arg] < last_used[arg] - longevity[arg] = (produced[arg], last_used[arg]) + for i, arg in enumerate(operations): + if arg.type != 'v' and arg in last_used: + assert not isinstance(arg, Const) + assert i < last_used[arg] + longevity[arg] = (i, last_used[arg]) del last_used[arg] for arg in inputargs: - assert isinstance(arg, Box) + assert not isinstance(arg, Const) if arg not in last_used: longevity[arg] = (-1, -1) else: longevity[arg] = (0, last_used[arg]) del last_used[arg] assert len(last_used) == 0 - return longevity, last_real_usage + + if not we_are_translated(): + produced = {} + for arg in inputargs: + produced[arg] = None + for op in operations: + for arg in op.getarglist(): + if not isinstance(arg, Const): + assert arg in produced + produced[op] = None + + return LiveRanges(longevity, last_real_usage, dist_to_next_call) def is_comparison_or_ovf_op(opnum): from rpython.jit.metainterp.resoperation import opclasses @@ -734,7 +760,7 @@ # any instance field, we can use a fake object class Fake(cls): pass - op = Fake(None) + op = Fake() return op.is_comparison() or op.is_ovf() def valid_addressing_size(size): diff --git a/rpython/jit/backend/x86/regalloc.py b/rpython/jit/backend/x86/regalloc.py --- a/rpython/jit/backend/x86/regalloc.py +++ b/rpython/jit/backend/x86/regalloc.py @@ -515,7 +515,7 @@ # and won't be used after the current operation finishes, # then swap the role of 'x' and 'y' if (symm and isinstance(argloc, RegLoc) and - self.rm.live_ranges.last_use(y) == self.rm.position): + self.rm.longevity.last_use(y) == self.rm.position): x, y = y, x argloc = self.loc(y) # From pypy.commits at gmail.com Wed Feb 24 02:30:10 2016 From: pypy.commits at gmail.com (mattip) Date: Tue, 23 Feb 2016 23:30:10 -0800 (PST) Subject: [pypy-commit] pypy ndarray-setitem-filtered: test and fix setitem for ndarrays with a boolean filter Message-ID: <56cd5c02.e6bbc20a.c6f05.64fa@mx.google.com> Author: mattip Branch: ndarray-setitem-filtered Changeset: r82466:965416239e8f Date: 2016-02-24 08:24 +0100 http://bitbucket.org/pypy/pypy/changeset/965416239e8f/ Log: test and fix setitem for ndarrays with a boolean filter From pypy.commits at gmail.com Wed Feb 24 02:30:12 2016 From: pypy.commits at gmail.com (mattip) Date: Tue, 23 Feb 2016 23:30:12 -0800 (PST) Subject: [pypy-commit] pypy ndarray-setitem-filtered: test, move copying to more explicit location Message-ID: <56cd5c04.046f1c0a.c7219.221c@mx.google.com> Author: mattip Branch: ndarray-setitem-filtered Changeset: r82467:d995a378bdd0 Date: 2016-02-24 08:26 +0100 http://bitbucket.org/pypy/pypy/changeset/d995a378bdd0/ Log: test, move copying to more explicit location diff --git a/pypy/module/micronumpy/concrete.py b/pypy/module/micronumpy/concrete.py --- a/pypy/module/micronumpy/concrete.py +++ b/pypy/module/micronumpy/concrete.py @@ -298,7 +298,14 @@ except IndexError: # not a single result chunks = self._prepare_slice_args(space, w_index) - return new_view(space, orig_arr, chunks) + copy = False + if isinstance(chunks[0], BooleanChunk): + # numpy compatibility + copy = True + w_ret = new_view(space, orig_arr, chunks) + if copy: + w_ret = w_ret.descr_copy(space, space.wrap(w_ret.get_order())) + return w_ret def descr_setitem(self, space, orig_arr, w_index, w_value): try: diff --git a/pypy/module/micronumpy/ndarray.py b/pypy/module/micronumpy/ndarray.py --- a/pypy/module/micronumpy/ndarray.py +++ b/pypy/module/micronumpy/ndarray.py @@ -22,7 +22,7 @@ from pypy.module.micronumpy.flagsobj import W_FlagsObject from pypy.module.micronumpy.strides import ( get_shape_from_iterable, shape_agreement, shape_agreement_multiple, - is_c_contiguous, is_f_contiguous, calc_strides, new_view) + is_c_contiguous, is_f_contiguous, calc_strides, new_view, BooleanChunk) from pypy.module.micronumpy.casting import can_cast_array from pypy.module.micronumpy.descriptor import get_dtype_cache @@ -204,7 +204,13 @@ if iter_shape is None: # w_index is a list of slices, return a view chunks = self.implementation._prepare_slice_args(space, w_index) - return new_view(space, self, chunks) + copy = False + if isinstance(chunks[0], BooleanChunk): + copy = True + w_ret = new_view(space, self, chunks) + if copy: + w_ret = w_ret.descr_copy(space, space.wrap(w_ret.get_order())) + return w_ret shape = res_shape + self.get_shape()[len(indexes):] w_res = W_NDimArray.from_shape(space, shape, self.get_dtype(), self.get_order(), w_instance=self) diff --git a/pypy/module/micronumpy/strides.py b/pypy/module/micronumpy/strides.py --- a/pypy/module/micronumpy/strides.py +++ b/pypy/module/micronumpy/strides.py @@ -100,19 +100,15 @@ w_arr = w_arr.getitem_filter(space, filtr.w_idx, axis=dim) arr = w_arr.implementation chunks[dim] = SliceChunk(space.newslice(space.wrap(0), - space.wrap(-1), space.w_None)) + space.w_None, space.w_None)) r = calculate_slice_strides(space, arr.shape, arr.start, arr.get_strides(), arr.get_backstrides(), chunks) else: r = calculate_slice_strides(space, arr.shape, arr.start, arr.get_strides(), arr.get_backstrides(), chunks) shape, start, strides, backstrides = r - w_ret = W_NDimArray.new_slice(space, start, strides[:], backstrides[:], + return W_NDimArray.new_slice(space, start, strides[:], backstrides[:], shape[:], arr, w_arr) - if dim == 0: - # Do not return a view - return w_ret.descr_copy(space, space.wrap(w_ret.get_order())) - return w_ret @jit.unroll_safe def _extend_shape(old_shape, chunks): diff --git a/pypy/module/micronumpy/test/test_ndarray.py b/pypy/module/micronumpy/test/test_ndarray.py --- a/pypy/module/micronumpy/test/test_ndarray.py +++ b/pypy/module/micronumpy/test/test_ndarray.py @@ -2550,8 +2550,10 @@ assert b.base is None b = a[:, np.array([True, False, True])] assert b.base is not None + a[np.array([True, False]), 0] = 100 b = a[np.array([True, False]), 0] - assert (b ==[0]).all() + assert b.shape == (1,) + assert (b ==[100]).all() def test_scalar_indexing(self): import numpy as np From pypy.commits at gmail.com Wed Feb 24 02:30:14 2016 From: pypy.commits at gmail.com (mattip) Date: Tue, 23 Feb 2016 23:30:14 -0800 (PST) Subject: [pypy-commit] pypy ndarray-setitem-filtered: WIP - add comment where the problem occurs Message-ID: <56cd5c06.2179c20a.cf896.6653@mx.google.com> Author: mattip Branch: ndarray-setitem-filtered Changeset: r82468:cae31623079d Date: 2016-02-24 08:29 +0100 http://bitbucket.org/pypy/pypy/changeset/cae31623079d/ Log: WIP - add comment where the problem occurs diff --git a/pypy/module/micronumpy/strides.py b/pypy/module/micronumpy/strides.py --- a/pypy/module/micronumpy/strides.py +++ b/pypy/module/micronumpy/strides.py @@ -97,6 +97,7 @@ # filter by axis dim filtr = chunks[dim] assert isinstance(filtr, BooleanChunk) + # XXX this creates a new array, and fails in setitem w_arr = w_arr.getitem_filter(space, filtr.w_idx, axis=dim) arr = w_arr.implementation chunks[dim] = SliceChunk(space.newslice(space.wrap(0), From pypy.commits at gmail.com Wed Feb 24 04:06:41 2016 From: pypy.commits at gmail.com (mjacob) Date: Wed, 24 Feb 2016 01:06:41 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Merged in marky1991/pypy_new/fix_module_repr (pull request #404) Message-ID: <56cd72a1.c9161c0a.2ed0.ffffaff1@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82477:79680cd9c664 Date: 2016-02-24 10:05 +0100 http://bitbucket.org/pypy/pypy/changeset/79680cd9c664/ Log: Merged in marky1991/pypy_new/fix_module_repr (pull request #404) 3.3: Fix_module_repr to match PEP 420 and fix reload_builtin diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py --- a/pypy/interpreter/module.py +++ b/pypy/interpreter/module.py @@ -121,19 +121,32 @@ return space.newtuple(tup_return) def descr_module__repr__(self, space): - from pypy.interpreter.mixedmodule import MixedModule - if self.w_name is not None: - name = space.unicode_w(space.repr(self.w_name)) - else: + w_loader = space.finditem(self.w_dict, space.wrap('__loader__')) + if w_loader is not None: + try: + return space.call_method(w_loader, "module_repr", + space.wrap(self)) + except OperationError: + pass + try: + w_name = space.getattr(self, space.wrap('__name__')) + name = space.unicode_w(space.repr(w_name)) + except OperationError: name = u"'?'" - if isinstance(self, MixedModule): - return space.wrap(u"" % name) + try: w___file__ = space.getattr(self, space.wrap('__file__')) + except OperationError: + w___file__ = space.w_None + if not space.isinstance_w(w___file__, space.w_unicode): + if w_loader is not None: + w_loader_repr = space.unicode_w(space.repr(w_loader)) + return space.wrap(u"" % (name, w_loader_repr)) + else: + return space.wrap(u"" % (name,)) + else: __file__ = space.unicode_w(space.repr(w___file__)) - except OperationError: - __file__ = u'?' - return space.wrap(u"" % (name, __file__)) + return space.wrap(u"" % (name, __file__)) def descr_module__dir__(self, space): w_dict = space.getattr(self, space.wrap('__dict__')) diff --git a/pypy/interpreter/test/test_module.py b/pypy/interpreter/test/test_module.py --- a/pypy/interpreter/test/test_module.py +++ b/pypy/interpreter/test/test_module.py @@ -74,11 +74,88 @@ r'lib_pypy\\_pypy_interact.py' in r.lower()) and r.endswith('>')) nofile = type(_pypy_interact)('nofile', 'foo') - assert repr(nofile) == "" + assert repr(nofile) == "" m = type(_pypy_interact).__new__(type(_pypy_interact)) assert repr(m).startswith("".format(mod_name=repr(module.__name__), + cls=repr(cls.__name__))) + return mod_repr + test_module.__loader__ = CustomLoader + assert repr(test_module) == "" + + def test_repr_with_loader_with_module_repr_wrong_type(self): + import sys + test_module = type(sys)("test_module", "doc") + + # This return value must be a string. + class BuggyCustomLoader: + @classmethod + def module_repr(cls, module): + return 5 + + test_module.__loader__ = BuggyCustomLoader + raises(TypeError, repr, test_module) + + def test_repr_with_loader_with_raising_module_repr(self): + import sys + test_module = type(sys)("test_module", "doc") + # If an exception occurs in module_repr(), the exception is caught + # and discarded, and the calculation of the module’s repr continues + # as if module_repr() did not exist. + class CustomLoaderWithRaisingRepr: + @classmethod + def module_repr(cls, module): + return repr(1/0) + + test_module.__loader__ = CustomLoaderWithRaisingRepr + mod_repr = repr(test_module) + + # The module has no __file__ attribute, so the repr should use + # the loader and name + loader_repr = repr(test_module.__loader__) + expected_repr = "".format(loader_repr) + assert mod_repr == expected_repr + + def test_repr_with_raising_loader_and___file__(self): + import sys + test_module = type(sys)("test_module", "doc") + test_module.__file__ = "/fake_dir/test_module.py" + class CustomLoaderWithRaisingRepr: + """Operates just like the builtin importer, but implements a + module_repr method that raises an exception.""" + @classmethod + def module_repr(cls, module): + return repr(1/0) + + test_module.__loader__ = CustomLoaderWithRaisingRepr + + # If the module has an __file__ attribute, this is used as part + # of the module's repr. + # (If we have a loader that doesn't correctly implement module_repr, + # if we have a path, we always just use name and path. + expected_repr = "" + assert repr(test_module) == expected_repr + + def test_repr_with_missing_name(self): + import sys + test_module = type(sys)("test_module", "doc") + del test_module.__name__ + mod_repr = repr(test_module) + assert mod_repr == "" + def test_dir(self): import sys items = sys.__dir__() diff --git a/pypy/module/imp/interp_imp.py b/pypy/module/imp/interp_imp.py --- a/pypy/module/imp/interp_imp.py +++ b/pypy/module/imp/interp_imp.py @@ -86,7 +86,11 @@ return # force_init is needed to make reload actually reload instead of just # using the already-present module in sys.modules. - return space.getbuiltinmodule(name, force_init=True, reuse=False) + + # If the module is already in sys.modules, it must be a reload, so + # we want to reuse (and reinitialize) the existing module object + reuse = space.finditem(space.sys.get('modules'), w_name) is not None + return space.getbuiltinmodule(name, force_init=True, reuse=reuse) def init_frozen(space, w_name): return None From pypy.commits at gmail.com Wed Feb 24 04:06:49 2016 From: pypy.commits at gmail.com (marky1991) Date: Wed, 24 Feb 2016 01:06:49 -0800 (PST) Subject: [pypy-commit] pypy py3.3: If the module is already in sys.modules, it must be a reload, so just initialize the existing module instance and return it. (Fixes test_reload_builtin) Message-ID: <56cd72a9.e83cc20a.2a8a.ffff8673@mx.google.com> Author: Mark Young Branch: py3.3 Changeset: r82469:afa0092349e3 Date: 2016-02-13 10:12 -0500 http://bitbucket.org/pypy/pypy/changeset/afa0092349e3/ Log: If the module is already in sys.modules, it must be a reload, so just initialize the existing module instance and return it. (Fixes test_reload_builtin) diff --git a/pypy/module/imp/interp_imp.py b/pypy/module/imp/interp_imp.py --- a/pypy/module/imp/interp_imp.py +++ b/pypy/module/imp/interp_imp.py @@ -86,7 +86,8 @@ return # force_init is needed to make reload actually reload instead of just # using the already-present module in sys.modules. - return space.getbuiltinmodule(name, force_init=True, reuse=False) + reuse = space.finditem(space.sys.get('modules'), w_name) is not None + return space.getbuiltinmodule(name, force_init=True, reuse=reuse) def init_frozen(space, w_name): return None From pypy.commits at gmail.com Wed Feb 24 04:06:52 2016 From: pypy.commits at gmail.com (marky1991) Date: Wed, 24 Feb 2016 01:06:52 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix Module.repr to use space.builtin_modules to determine if a module is builtin. (Fixes regression in test_shadow_extension_2.) Message-ID: <56cd72ac.12871c0a.29944.4b05@mx.google.com> Author: Mark Young Branch: py3.3 Changeset: r82470:1139cf119d2a Date: 2016-02-13 11:46 -0500 http://bitbucket.org/pypy/pypy/changeset/1139cf119d2a/ Log: Fix Module.repr to use space.builtin_modules to determine if a module is builtin. (Fixes regression in test_shadow_extension_2.) diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py --- a/pypy/interpreter/module.py +++ b/pypy/interpreter/module.py @@ -121,12 +121,11 @@ return space.newtuple(tup_return) def descr_module__repr__(self, space): - from pypy.interpreter.mixedmodule import MixedModule if self.w_name is not None: name = space.unicode_w(space.repr(self.w_name)) else: name = u"'?'" - if isinstance(self, MixedModule): + if self.getname(space) in self.space.builtin_modules: return space.wrap(u"" % name) try: w___file__ = space.getattr(self, space.wrap('__file__')) From pypy.commits at gmail.com Wed Feb 24 04:06:53 2016 From: pypy.commits at gmail.com (marky1991) Date: Wed, 24 Feb 2016 01:06:53 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Translation still not fixed. Committing so I can ask about the problem. Message-ID: <56cd72ad.6507c20a.db17a.ffff8786@mx.google.com> Author: Mark Young Branch: py3.3 Changeset: r82471:4678e88f7915 Date: 2016-02-14 11:28 -0500 http://bitbucket.org/pypy/pypy/changeset/4678e88f7915/ Log: Translation still not fixed. Committing so I can ask about the problem. diff --git a/lib-python/3/importlib/_bootstrap.py b/lib-python/3/importlib/_bootstrap.py --- a/lib-python/3/importlib/_bootstrap.py +++ b/lib-python/3/importlib/_bootstrap.py @@ -1496,6 +1496,7 @@ raise TypeError("module name must be str, not {}".format(type(name))) if level < 0: raise ValueError('level must be >= 0') + #print(name, package, level) if package: if not isinstance(package, str): raise TypeError("__package__ not set to a string") diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py --- a/pypy/interpreter/module.py +++ b/pypy/interpreter/module.py @@ -123,9 +123,11 @@ def descr_module__repr__(self, space): if self.w_name is not None: name = space.unicode_w(space.repr(self.w_name)) + nonrepr_name = space.unicode_w(self.w_name) else: name = u"'?'" - if self.getname(space) in self.space.builtin_modules: + nonrepr_name = u"?" + if nonrepr_name in self.space.builtin_modules: return space.wrap(u"" % name) try: w___file__ = space.getattr(self, space.wrap('__file__')) From pypy.commits at gmail.com Wed Feb 24 04:06:55 2016 From: pypy.commits at gmail.com (marky1991) Date: Wed, 24 Feb 2016 01:06:55 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix translation. Message-ID: <56cd72af.162f1c0a.2f944.ffffadbc@mx.google.com> Author: Mark Young Branch: py3.3 Changeset: r82472:6aef6089a236 Date: 2016-02-14 12:48 -0500 http://bitbucket.org/pypy/pypy/changeset/6aef6089a236/ Log: Fix translation. diff --git a/lib-python/3/importlib/_bootstrap.py b/lib-python/3/importlib/_bootstrap.py --- a/lib-python/3/importlib/_bootstrap.py +++ b/lib-python/3/importlib/_bootstrap.py @@ -1496,7 +1496,6 @@ raise TypeError("module name must be str, not {}".format(type(name))) if level < 0: raise ValueError('level must be >= 0') - #print(name, package, level) if package: if not isinstance(package, str): raise TypeError("__package__ not set to a string") diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py --- a/pypy/interpreter/module.py +++ b/pypy/interpreter/module.py @@ -123,10 +123,10 @@ def descr_module__repr__(self, space): if self.w_name is not None: name = space.unicode_w(space.repr(self.w_name)) - nonrepr_name = space.unicode_w(self.w_name) + nonrepr_name = self.space.identifier_w(self.w_name) else: name = u"'?'" - nonrepr_name = u"?" + nonrepr_name = "?" if nonrepr_name in self.space.builtin_modules: return space.wrap(u"" % name) try: From pypy.commits at gmail.com Wed Feb 24 04:06:58 2016 From: pypy.commits at gmail.com (marky1991) Date: Wed, 24 Feb 2016 01:06:58 -0800 (PST) Subject: [pypy-commit] pypy fix_module_repr: Add a comment and fix an unintended change in the comment. Message-ID: <56cd72b2.86e31c0a.a5acd.ffffad77@mx.google.com> Author: Mark Young Branch: fix_module_repr Changeset: r82474:5f69cc129429 Date: 2016-02-20 15:27 -0500 http://bitbucket.org/pypy/pypy/changeset/5f69cc129429/ Log: Add a comment and fix an unintended change in the comment. diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py --- a/pypy/interpreter/module.py +++ b/pypy/interpreter/module.py @@ -1,5 +1,5 @@ """ -eodule objects. +Module objects. """ from pypy.interpreter.baseobjspace import W_Root diff --git a/pypy/module/imp/interp_imp.py b/pypy/module/imp/interp_imp.py --- a/pypy/module/imp/interp_imp.py +++ b/pypy/module/imp/interp_imp.py @@ -86,6 +86,9 @@ return # force_init is needed to make reload actually reload instead of just # using the already-present module in sys.modules. + + # If the module is already in sys.modules, it must be a reload, so + # we want to reuse (and reinitialize) the existing module object reuse = space.finditem(space.sys.get('modules'), w_name) is not None return space.getbuiltinmodule(name, force_init=True, reuse=reuse) From pypy.commits at gmail.com Wed Feb 24 04:07:00 2016 From: pypy.commits at gmail.com (marky1991) Date: Wed, 24 Feb 2016 01:07:00 -0800 (PST) Subject: [pypy-commit] pypy fix_module_repr: Respond to feedback. Message-ID: <56cd72b4.8ee61c0a.acdac.ffffabd9@mx.google.com> Author: Mark Young Branch: fix_module_repr Changeset: r82475:02c4b715132b Date: 2016-02-21 12:40 -0500 http://bitbucket.org/pypy/pypy/changeset/02c4b715132b/ Log: Respond to feedback. diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py --- a/pypy/interpreter/module.py +++ b/pypy/interpreter/module.py @@ -124,19 +124,15 @@ w_loader = space.finditem(self.w_dict, space.wrap('__loader__')) if w_loader is not None: try: - w_repr = space.call_method(w_loader, "module_repr", - space.wrap(self)) + return space.call_method(w_loader, "module_repr", + space.wrap(self)) except OperationError: - w_repr = None - - if w_repr is not None: - return w_repr - + pass try: w_name = space.getattr(self, space.wrap('__name__')) + name = space.unicode_w(space.repr(w_name)) except OperationError: - w_name = space.wrap(u'?') - name = space.unicode_w(space.repr(w_name)) + name = u"'?'" try: w___file__ = space.getattr(self, space.wrap('__file__')) From pypy.commits at gmail.com Wed Feb 24 04:07:02 2016 From: pypy.commits at gmail.com (marky1991) Date: Wed, 24 Feb 2016 01:07:02 -0800 (PST) Subject: [pypy-commit] pypy fix_module_repr: Respond to mjacob's feedback. Message-ID: <56cd72b6.0ab81c0a.80bcd.4689@mx.google.com> Author: Mark Young Branch: fix_module_repr Changeset: r82476:97351ee2ac6f Date: 2016-02-22 22:27 -0500 http://bitbucket.org/pypy/pypy/changeset/97351ee2ac6f/ Log: Respond to mjacob's feedback. diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py --- a/pypy/interpreter/module.py +++ b/pypy/interpreter/module.py @@ -138,7 +138,7 @@ w___file__ = space.getattr(self, space.wrap('__file__')) except OperationError: w___file__ = space.w_None - if not space.is_true(space.isinstance(w___file__, space.w_unicode)): + if not space.isinstance_w(w___file__, space.w_unicode): if w_loader is not None: w_loader_repr = space.unicode_w(space.repr(w_loader)) return space.wrap(u"" % (name, w_loader_repr)) diff --git a/pypy/interpreter/test/test_module.py b/pypy/interpreter/test/test_module.py --- a/pypy/interpreter/test/test_module.py +++ b/pypy/interpreter/test/test_module.py @@ -80,15 +80,13 @@ assert repr(m).startswith("" def test_repr_with_loader_with_module_repr_wrong_type(self): - import _frozen_importlib, sys + import sys test_module = type(sys)("test_module", "doc") # This return value must be a string. - class BuggyCustomLoader(_frozen_importlib.BuiltinImporter): - """Operates just like the builtin importer, but implements a - module_repr method that returns a non-string value.""" + class BuggyCustomLoader: @classmethod def module_repr(cls, module): return 5 test_module.__loader__ = BuggyCustomLoader - try: - repr(test_module) - assert False, "module_repr must fail if it returns a nonstring." - except TypeError: - pass + raises(TypeError, repr, test_module) def test_repr_with_loader_with_raising_module_repr(self): - import _frozen_importlib, sys + import sys test_module = type(sys)("test_module", "doc") # If an exception occurs in module_repr(), the exception is caught # and discarded, and the calculation of the module’s repr continues # as if module_repr() did not exist. - class CustomLoaderWithRaisingRepr(_frozen_importlib.BuiltinImporter): - """Operates just like the builtin importer, but implements a - module_repr method that raises an exception.""" + class CustomLoaderWithRaisingRepr: @classmethod def module_repr(cls, module): return repr(1/0) @@ -140,10 +130,10 @@ assert mod_repr == expected_repr def test_repr_with_raising_loader_and___file__(self): - import _frozen_importlib, sys + import sys test_module = type(sys)("test_module", "doc") test_module.__file__ = "/fake_dir/test_module.py" - class CustomLoaderWithRaisingRepr(_frozen_importlib.BuiltinImporter): + class CustomLoaderWithRaisingRepr: """Operates just like the builtin importer, but implements a module_repr method that raises an exception.""" @classmethod @@ -160,13 +150,12 @@ assert repr(test_module) == expected_repr def test_repr_with_missing_name(self): - import _frozen_importlib, sys + import sys test_module = type(sys)("test_module", "doc") del test_module.__name__ mod_repr = repr(test_module) assert mod_repr == "" - def test_dir(self): import sys items = sys.__dir__() From pypy.commits at gmail.com Wed Feb 24 04:06:57 2016 From: pypy.commits at gmail.com (marky1991) Date: Wed, 24 Feb 2016 01:06:57 -0800 (PST) Subject: [pypy-commit] pypy fix_module_repr: Fix module repr as per PEP 420. Message-ID: <56cd72b1.02931c0a.dfa51.ffffac7d@mx.google.com> Author: Mark Young Branch: fix_module_repr Changeset: r82473:89cec61d8f3d Date: 2016-02-20 13:46 -0500 http://bitbucket.org/pypy/pypy/changeset/89cec61d8f3d/ Log: Fix module repr as per PEP 420. diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py --- a/pypy/interpreter/module.py +++ b/pypy/interpreter/module.py @@ -1,5 +1,5 @@ """ -Module objects. +eodule objects. """ from pypy.interpreter.baseobjspace import W_Root @@ -121,20 +121,36 @@ return space.newtuple(tup_return) def descr_module__repr__(self, space): - if self.w_name is not None: - name = space.unicode_w(space.repr(self.w_name)) - nonrepr_name = self.space.identifier_w(self.w_name) - else: - name = u"'?'" - nonrepr_name = "?" - if nonrepr_name in self.space.builtin_modules: - return space.wrap(u"" % name) + w_loader = space.finditem(self.w_dict, space.wrap('__loader__')) + if w_loader is not None: + try: + w_repr = space.call_method(w_loader, "module_repr", + space.wrap(self)) + except OperationError: + w_repr = None + + if w_repr is not None: + return w_repr + + try: + w_name = space.getattr(self, space.wrap('__name__')) + except OperationError: + w_name = space.wrap(u'?') + name = space.unicode_w(space.repr(w_name)) + try: w___file__ = space.getattr(self, space.wrap('__file__')) + except OperationError: + w___file__ = space.w_None + if not space.is_true(space.isinstance(w___file__, space.w_unicode)): + if w_loader is not None: + w_loader_repr = space.unicode_w(space.repr(w_loader)) + return space.wrap(u"" % (name, w_loader_repr)) + else: + return space.wrap(u"" % (name,)) + else: __file__ = space.unicode_w(space.repr(w___file__)) - except OperationError: - __file__ = u'?' - return space.wrap(u"" % (name, __file__)) + return space.wrap(u"" % (name, __file__)) def descr_module__dir__(self, space): w_dict = space.getattr(self, space.wrap('__dict__')) diff --git a/pypy/interpreter/test/test_module.py b/pypy/interpreter/test/test_module.py --- a/pypy/interpreter/test/test_module.py +++ b/pypy/interpreter/test/test_module.py @@ -74,11 +74,99 @@ r'lib_pypy\\_pypy_interact.py' in r.lower()) and r.endswith('>')) nofile = type(_pypy_interact)('nofile', 'foo') - assert repr(nofile) == "" + assert repr(nofile) == "" m = type(_pypy_interact).__new__(type(_pypy_interact)) assert repr(m).startswith("".format(mod_name=repr(module.__name__), + cls=repr(cls.__name__))) + return mod_repr + test_module.__loader__ = CustomLoader + assert repr(test_module) == "" + + def test_repr_with_loader_with_module_repr_wrong_type(self): + import _frozen_importlib, sys + test_module = type(sys)("test_module", "doc") + + # This return value must be a string. + class BuggyCustomLoader(_frozen_importlib.BuiltinImporter): + """Operates just like the builtin importer, but implements a + module_repr method that returns a non-string value.""" + @classmethod + def module_repr(cls, module): + return 5 + + test_module.__loader__ = BuggyCustomLoader + try: + repr(test_module) + assert False, "module_repr must fail if it returns a nonstring." + except TypeError: + pass + + def test_repr_with_loader_with_raising_module_repr(self): + import _frozen_importlib, sys + test_module = type(sys)("test_module", "doc") + # If an exception occurs in module_repr(), the exception is caught + # and discarded, and the calculation of the module’s repr continues + # as if module_repr() did not exist. + class CustomLoaderWithRaisingRepr(_frozen_importlib.BuiltinImporter): + """Operates just like the builtin importer, but implements a + module_repr method that raises an exception.""" + @classmethod + def module_repr(cls, module): + return repr(1/0) + + test_module.__loader__ = CustomLoaderWithRaisingRepr + mod_repr = repr(test_module) + + # The module has no __file__ attribute, so the repr should use + # the loader and name + loader_repr = repr(test_module.__loader__) + expected_repr = "".format(loader_repr) + assert mod_repr == expected_repr + + def test_repr_with_raising_loader_and___file__(self): + import _frozen_importlib, sys + test_module = type(sys)("test_module", "doc") + test_module.__file__ = "/fake_dir/test_module.py" + class CustomLoaderWithRaisingRepr(_frozen_importlib.BuiltinImporter): + """Operates just like the builtin importer, but implements a + module_repr method that raises an exception.""" + @classmethod + def module_repr(cls, module): + return repr(1/0) + + test_module.__loader__ = CustomLoaderWithRaisingRepr + + # If the module has an __file__ attribute, this is used as part + # of the module's repr. + # (If we have a loader that doesn't correctly implement module_repr, + # if we have a path, we always just use name and path. + expected_repr = "" + assert repr(test_module) == expected_repr + + def test_repr_with_missing_name(self): + import _frozen_importlib, sys + test_module = type(sys)("test_module", "doc") + del test_module.__name__ + mod_repr = repr(test_module) + assert mod_repr == "" + + def test_dir(self): import sys items = sys.__dir__() From pypy.commits at gmail.com Wed Feb 24 04:24:24 2016 From: pypy.commits at gmail.com (mjacob) Date: Wed, 24 Feb 2016 01:24:24 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Small cleanup: don't wrap already wrapped object. Message-ID: <56cd76c8.8abb1c0a.92707.468e@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82478:864c7d539f4d Date: 2016-02-24 10:24 +0100 http://bitbucket.org/pypy/pypy/changeset/864c7d539f4d/ Log: Small cleanup: don't wrap already wrapped object. diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py --- a/pypy/interpreter/module.py +++ b/pypy/interpreter/module.py @@ -124,8 +124,7 @@ w_loader = space.finditem(self.w_dict, space.wrap('__loader__')) if w_loader is not None: try: - return space.call_method(w_loader, "module_repr", - space.wrap(self)) + return space.call_method(w_loader, "module_repr", self) except OperationError: pass try: From pypy.commits at gmail.com Wed Feb 24 04:26:04 2016 From: pypy.commits at gmail.com (mjacob) Date: Wed, 24 Feb 2016 01:26:04 -0800 (PST) Subject: [pypy-commit] pypy default: Add external package to test_tab.py's exclude set. Message-ID: <56cd772c.512f1c0a.5bc2.4852@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82479:f25587392dc9 Date: 2016-02-24 10:26 +0100 http://bitbucket.org/pypy/pypy/changeset/f25587392dc9/ Log: Add external package to test_tab.py's exclude set. diff --git a/pypy/tool/test/test_tab.py b/pypy/tool/test/test_tab.py --- a/pypy/tool/test/test_tab.py +++ b/pypy/tool/test/test_tab.py @@ -6,7 +6,7 @@ from pypy.conftest import pypydir ROOT = os.path.abspath(os.path.join(pypydir, '..')) -EXCLUDE = {} +EXCLUDE = {'/virt_test/lib/python2.7/site-packages/setuptools'} def test_no_tabs(): From pypy.commits at gmail.com Wed Feb 24 04:31:33 2016 From: pypy.commits at gmail.com (mjacob) Date: Wed, 24 Feb 2016 01:31:33 -0800 (PST) Subject: [pypy-commit] pypy default: Add stubs in LLInterp for missing gc_rawrefcount_* operations. Message-ID: <56cd7875.03321c0a.43247.41f0@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82480:969fad5c38b9 Date: 2016-02-24 10:31 +0100 http://bitbucket.org/pypy/pypy/changeset/969fad5c38b9/ Log: Add stubs in LLInterp for missing gc_rawrefcount_* operations. diff --git a/rpython/rtyper/llinterp.py b/rpython/rtyper/llinterp.py --- a/rpython/rtyper/llinterp.py +++ b/rpython/rtyper/llinterp.py @@ -925,6 +925,21 @@ def op_gc_gcflag_extra(self, subopnum, *args): return self.heap.gcflag_extra(subopnum, *args) + def op_gc_rawrefcount_init(self, *args): + raise NotImplementedError("gc_rawrefcount_init") + + def op_gc_rawrefcount_to_obj(self, *args): + raise NotImplementedError("gc_rawrefcount_to_obj") + + def op_gc_rawrefcount_from_obj(self, *args): + raise NotImplementedError("gc_rawrefcount_from_obj") + + def op_gc_rawrefcount_create_link_pyobj(self, *args): + raise NotImplementedError("gc_rawrefcount_create_link_pyobj") + + def op_gc_rawrefcount_create_link_pypy(self, *args): + raise NotImplementedError("gc_rawrefcount_create_link_pypy") + def op_do_malloc_fixedsize(self): raise NotImplementedError("do_malloc_fixedsize") def op_do_malloc_fixedsize_clear(self): From pypy.commits at gmail.com Wed Feb 24 06:03:54 2016 From: pypy.commits at gmail.com (Raemi) Date: Wed, 24 Feb 2016 03:03:54 -0800 (PST) Subject: [pypy-commit] pypy stmgc-c8: merge and fix various things Message-ID: <56cd8e1a.657bc20a.8268c.ffffb5a0@mx.google.com> Author: Remi Meier Branch: stmgc-c8 Changeset: r82481:36de0d994b4e Date: 2016-02-24 12:03 +0100 http://bitbucket.org/pypy/pypy/changeset/36de0d994b4e/ Log: merge and fix various things diff too long, truncating to 2000 out of 5848 lines diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -685,13 +685,17 @@ # the previous version of this code did. This should work for # CPython too. The point is that on PyPy with cpyext, the # config var 'SO' is just ".so" but we want to return - # ".pypy-VERSION.so" instead. - so_ext = _get_c_extension_suffix() + # ".pypy-VERSION.so" instead. Note a further tweak for cffi's + # embedding mode: if EXT_SUFFIX is also defined, use that + # directly. + so_ext = get_config_var('EXT_SUFFIX') if so_ext is None: - so_ext = get_config_var('SO') # fall-back - # extensions in debug_mode are named 'module_d.pyd' under windows - if os.name == 'nt' and self.debug: - so_ext = '_d.pyd' + so_ext = _get_c_extension_suffix() + if so_ext is None: + so_ext = get_config_var('SO') # fall-back + # extensions in debug_mode are named 'module_d.pyd' under windows + if os.name == 'nt' and self.debug: + so_ext = '_d.pyd' return os.path.join(*ext_path) + so_ext def get_export_symbols (self, ext): diff --git a/lib-python/2.7/test/capath/0e4015b9.0 b/lib-python/2.7/test/capath/0e4015b9.0 new file mode 100644 --- /dev/null +++ b/lib-python/2.7/test/capath/0e4015b9.0 @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv +bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG +A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo +b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 +aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ +Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm +Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= +-----END CERTIFICATE----- diff --git a/lib-python/2.7/test/capath/ce7b8643.0 b/lib-python/2.7/test/capath/ce7b8643.0 new file mode 100644 --- /dev/null +++ b/lib-python/2.7/test/capath/ce7b8643.0 @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u +IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv +bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG +A1UEBhMCWFkxFzAVBgNVBAcMDkNhc3RsZSBBbnRocmF4MSMwIQYDVQQKDBpQeXRo +b24gU29mdHdhcmUgRm91bmRhdGlvbjEjMCEGA1UEAwwac2VsZi1zaWduZWQucHl0 +aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ +Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm +Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= +-----END CERTIFICATE----- diff --git a/lib-python/2.7/test/https_svn_python_org_root.pem b/lib-python/2.7/test/https_svn_python_org_root.pem deleted file mode 100644 --- a/lib-python/2.7/test/https_svn_python_org_root.pem +++ /dev/null @@ -1,41 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290 -IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB -IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA -Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO -BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi -MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ -ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ -8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6 -zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y -fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7 -w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc -G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k -epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q -laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ -QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU -fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826 -YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w -ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY -gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe -MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0 -IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy -dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw -czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0 -dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl -aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC -AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg -b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB -ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc -nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg -18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c -gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl -Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY -sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T -SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF -CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum -GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk -zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW -omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD ------END CERTIFICATE----- diff --git a/lib-python/2.7/test/selfsigned_pythontestdotnet.pem b/lib-python/2.7/test/selfsigned_pythontestdotnet.pem --- a/lib-python/2.7/test/selfsigned_pythontestdotnet.pem +++ b/lib-python/2.7/test/selfsigned_pythontestdotnet.pem @@ -1,5 +1,5 @@ -----BEGIN CERTIFICATE----- -MIIChzCCAfCgAwIBAgIJAKGU95wKR8pSMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV +MIIClTCCAf6gAwIBAgIJAKGU95wKR8pTMA0GCSqGSIb3DQEBBQUAMHAxCzAJBgNV BAYTAlhZMRcwFQYDVQQHDA5DYXN0bGUgQW50aHJheDEjMCEGA1UECgwaUHl0aG9u IFNvZnR3YXJlIEZvdW5kYXRpb24xIzAhBgNVBAMMGnNlbGYtc2lnbmVkLnB5dGhv bnRlc3QubmV0MB4XDTE0MTEwMjE4MDkyOVoXDTI0MTAzMDE4MDkyOVowcDELMAkG @@ -8,9 +8,9 @@ aG9udGVzdC5uZXQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANDXQXW9tjyZ Xt0Iv2tLL1+jinr4wGg36ioLDLFkMf+2Y1GL0v0BnKYG4N1OKlAU15LXGeGer8vm Sv/yIvmdrELvhAbbo3w4a9TMYQA4XkIVLdvu3mvNOAet+8PMJxn26dbDhG809ALv -EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjKTAnMCUGA1UdEQQeMByCGnNl -bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MA0GCSqGSIb3DQEBBQUAA4GBAIOXmdtM -eG9qzP9TiXW/Gc/zI4cBfdCpC+Y4gOfC9bQUC7hefix4iO3+iZjgy3X/FaRxUUoV -HKiXcXIaWqTSUWp45cSh0MbwZXudp6JIAptzdAhvvCrPKeC9i9GvxsPD4LtDAL97 -vSaxQBezA7hdxZd90/EeyMgVZgAnTCnvAWX9 +EHY57lQsBS3G59RZyBPVqAqmImWNJnVzAgMBAAGjNzA1MCUGA1UdEQQeMByCGnNl +bGYtc2lnbmVkLnB5dGhvbnRlc3QubmV0MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcN +AQEFBQADgYEAIuzAhgMouJpNdf3URCHIineyoSt6WK/9+eyUcjlKOrDoXNZaD72h +TXMeKYoWvJyVcSLKL8ckPtDobgP2OTt0UkyAaj0n+ZHaqq1lH2yVfGUA1ILJv515 +C8BqbvVZuqm3i7ygmw3bqE/lYMgOrYtXXnqOrz6nvsE6Yc9V9rFflOM= -----END CERTIFICATE----- diff --git a/lib-python/2.7/test/test_ssl.py b/lib-python/2.7/test/test_ssl.py --- a/lib-python/2.7/test/test_ssl.py +++ b/lib-python/2.7/test/test_ssl.py @@ -57,7 +57,8 @@ SIGNED_CERTFILE2 = data_file("keycert4.pem") SIGNING_CA = data_file("pycacert.pem") -SVN_PYTHON_ORG_ROOT_CERT = data_file("https_svn_python_org_root.pem") +REMOTE_HOST = "self-signed.pythontest.net" +REMOTE_ROOT_CERT = data_file("selfsigned_pythontestdotnet.pem") EMPTYCERT = data_file("nullcert.pem") BADCERT = data_file("badcert.pem") @@ -244,7 +245,7 @@ self.assertEqual(p['subjectAltName'], san) def test_DER_to_PEM(self): - with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: + with open(CAFILE_CACERT, 'r') as f: pem = f.read() d1 = ssl.PEM_cert_to_DER_cert(pem) p2 = ssl.DER_cert_to_PEM_cert(d1) @@ -792,7 +793,7 @@ # Mismatching key and cert ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) with self.assertRaisesRegexp(ssl.SSLError, "key values mismatch"): - ctx.load_cert_chain(SVN_PYTHON_ORG_ROOT_CERT, ONLYKEY) + ctx.load_cert_chain(CAFILE_CACERT, ONLYKEY) # Password protected key and cert ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD) ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode()) @@ -1013,7 +1014,7 @@ ctx.load_verify_locations(CERTFILE) self.assertEqual(ctx.cert_store_stats(), {'x509_ca': 0, 'crl': 0, 'x509': 1}) - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + ctx.load_verify_locations(CAFILE_CACERT) self.assertEqual(ctx.cert_store_stats(), {'x509_ca': 1, 'crl': 0, 'x509': 2}) @@ -1023,8 +1024,8 @@ # CERTFILE is not flagged as X509v3 Basic Constraints: CA:TRUE ctx.load_verify_locations(CERTFILE) self.assertEqual(ctx.get_ca_certs(), []) - # but SVN_PYTHON_ORG_ROOT_CERT is a CA cert - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + # but CAFILE_CACERT is a CA cert + ctx.load_verify_locations(CAFILE_CACERT) self.assertEqual(ctx.get_ca_certs(), [{'issuer': ((('organizationName', 'Root CA'),), (('organizationalUnitName', 'http://www.cacert.org'),), @@ -1040,7 +1041,7 @@ (('emailAddress', 'support at cacert.org'),)), 'version': 3}]) - with open(SVN_PYTHON_ORG_ROOT_CERT) as f: + with open(CAFILE_CACERT) as f: pem = f.read() der = ssl.PEM_cert_to_DER_cert(pem) self.assertEqual(ctx.get_ca_certs(True), [der]) @@ -1215,11 +1216,11 @@ class NetworkedTests(unittest.TestCase): def test_connect(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_NONE) try: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) self.assertEqual({}, s.getpeercert()) finally: s.close() @@ -1228,27 +1229,27 @@ s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED) self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", - s.connect, ("svn.python.org", 443)) + s.connect, (REMOTE_HOST, 443)) s.close() # this should succeed because we specify the root cert s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) self.assertTrue(s.getpeercert()) finally: s.close() def test_connect_ex(self): # Issue #11326: check connect_ex() implementation - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - self.assertEqual(0, s.connect_ex(("svn.python.org", 443))) + self.assertEqual(0, s.connect_ex((REMOTE_HOST, 443))) self.assertTrue(s.getpeercert()) finally: s.close() @@ -1256,14 +1257,14 @@ def test_non_blocking_connect_ex(self): # Issue #11326: non-blocking connect_ex() should allow handshake # to proceed after the socket gets ready. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT, + ca_certs=REMOTE_ROOT_CERT, do_handshake_on_connect=False) try: s.setblocking(False) - rc = s.connect_ex(('svn.python.org', 443)) + rc = s.connect_ex((REMOTE_HOST, 443)) # EWOULDBLOCK under Windows, EINPROGRESS elsewhere self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK)) # Wait for connect to finish @@ -1285,58 +1286,62 @@ def test_timeout_connect_ex(self): # Issue #12065: on a timeout, connect_ex() should return the original # errno (mimicking the behaviour of non-SSL sockets). - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT, + ca_certs=REMOTE_ROOT_CERT, do_handshake_on_connect=False) try: s.settimeout(0.0000001) - rc = s.connect_ex(('svn.python.org', 443)) + rc = s.connect_ex((REMOTE_HOST, 443)) if rc == 0: - self.skipTest("svn.python.org responded too quickly") + self.skipTest("REMOTE_HOST responded too quickly") self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK)) finally: s.close() def test_connect_ex_error(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, - ca_certs=SVN_PYTHON_ORG_ROOT_CERT) + ca_certs=REMOTE_ROOT_CERT) try: - rc = s.connect_ex(("svn.python.org", 444)) + rc = s.connect_ex((REMOTE_HOST, 444)) # Issue #19919: Windows machines or VMs hosted on Windows # machines sometimes return EWOULDBLOCK. - self.assertIn(rc, (errno.ECONNREFUSED, errno.EWOULDBLOCK)) + errors = ( + errno.ECONNREFUSED, errno.EHOSTUNREACH, errno.ETIMEDOUT, + errno.EWOULDBLOCK, + ) + self.assertIn(rc, errors) finally: s.close() def test_connect_with_context(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): # Same as test_connect, but with a separately created context ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: self.assertEqual({}, s.getpeercert()) finally: s.close() # Same with a server hostname s = ctx.wrap_socket(socket.socket(socket.AF_INET), - server_hostname="svn.python.org") - s.connect(("svn.python.org", 443)) + server_hostname=REMOTE_HOST) + s.connect((REMOTE_HOST, 443)) s.close() # This should fail because we have no verification certs ctx.verify_mode = ssl.CERT_REQUIRED s = ctx.wrap_socket(socket.socket(socket.AF_INET)) self.assertRaisesRegexp(ssl.SSLError, "certificate verify failed", - s.connect, ("svn.python.org", 443)) + s.connect, (REMOTE_HOST, 443)) s.close() # This should succeed because we specify the root cert - ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT) + ctx.load_verify_locations(REMOTE_ROOT_CERT) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1349,12 +1354,12 @@ # OpenSSL 0.9.8n and 1.0.0, as a result the capath directory must # contain both versions of each certificate (same content, different # filename) for this test to be portable across OpenSSL releases. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=CAPATH) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1365,7 +1370,7 @@ ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=BYTES_CAPATH) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1373,15 +1378,15 @@ s.close() def test_connect_cadata(self): - with open(CAFILE_CACERT) as f: + with open(REMOTE_ROOT_CERT) as f: pem = f.read().decode('ascii') der = ssl.PEM_cert_to_DER_cert(pem) - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(cadata=pem) with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) cert = s.getpeercert() self.assertTrue(cert) @@ -1390,7 +1395,7 @@ ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(cadata=der) with closing(ctx.wrap_socket(socket.socket(socket.AF_INET))) as s: - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) cert = s.getpeercert() self.assertTrue(cert) @@ -1399,9 +1404,9 @@ # Issue #5238: creating a file-like object with makefile() shouldn't # delay closing the underlying "real socket" (here tested with its # file descriptor, hence skipping the test under Windows). - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ss = ssl.wrap_socket(socket.socket(socket.AF_INET)) - ss.connect(("svn.python.org", 443)) + ss.connect((REMOTE_HOST, 443)) fd = ss.fileno() f = ss.makefile() f.close() @@ -1415,9 +1420,9 @@ self.assertEqual(e.exception.errno, errno.EBADF) def test_non_blocking_handshake(self): - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): s = socket.socket(socket.AF_INET) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) s.setblocking(False) s = ssl.wrap_socket(s, cert_reqs=ssl.CERT_NONE, @@ -1460,12 +1465,12 @@ if support.verbose: sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem)) - _test_get_server_certificate('svn.python.org', 443, SVN_PYTHON_ORG_ROOT_CERT) + _test_get_server_certificate(REMOTE_HOST, 443, REMOTE_ROOT_CERT) if support.IPV6_ENABLED: _test_get_server_certificate('ipv6.google.com', 443) def test_ciphers(self): - remote = ("svn.python.org", 443) + remote = (REMOTE_HOST, 443) with support.transient_internet(remote[0]): with closing(ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_NONE, ciphers="ALL")) as s: @@ -1510,13 +1515,13 @@ def test_get_ca_certs_capath(self): # capath certs are loaded on request - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_verify_locations(capath=CAPATH) self.assertEqual(ctx.get_ca_certs(), []) s = ctx.wrap_socket(socket.socket(socket.AF_INET)) - s.connect(("svn.python.org", 443)) + s.connect((REMOTE_HOST, 443)) try: cert = s.getpeercert() self.assertTrue(cert) @@ -1527,12 +1532,12 @@ @needs_sni def test_context_setget(self): # Check that the context of a connected socket can be replaced. - with support.transient_internet("svn.python.org"): + with support.transient_internet(REMOTE_HOST): ctx1 = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ctx2 = ssl.SSLContext(ssl.PROTOCOL_SSLv23) s = socket.socket(socket.AF_INET) with closing(ctx1.wrap_socket(s)) as ss: - ss.connect(("svn.python.org", 443)) + ss.connect((REMOTE_HOST, 443)) self.assertIs(ss.context, ctx1) self.assertIs(ss._sslobj.context, ctx1) ss.context = ctx2 @@ -3026,7 +3031,7 @@ pass for filename in [ - CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, BYTES_CERTFILE, + CERTFILE, REMOTE_ROOT_CERT, BYTES_CERTFILE, ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY, SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA, BADCERT, BADKEY, EMPTYCERT]: diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.4.2 +Version: 1.5.0 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.4.2" -__version_info__ = (1, 4, 2) +__version__ = "1.5.0" +__version_info__ = (1, 5, 0) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -146,8 +146,9 @@ ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) #define _cffi_convert_array_from_object \ ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 #define _cffi_call_python \ - ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[25]) + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) #define _CFFI_NUM_EXPORTS 26 typedef struct _ctypedescr CTypeDescrObject; @@ -206,7 +207,8 @@ /********** end CPython-specific section **********/ #else _CFFI_UNUSED_FN -static void (*_cffi_call_python)(struct _cffi_externpy_s *, char *); +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org #endif diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -74,6 +74,7 @@ self._windows_unicode = None self._init_once_cache = {} self._cdef_version = None + self._embedding = None if hasattr(backend, 'set_ffi'): backend.set_ffi(self) for name in backend.__dict__: @@ -101,13 +102,21 @@ If 'packed' is specified as True, all structs declared inside this cdef are packed, i.e. laid out without any field alignment at all. """ + self._cdef(csource, override=override, packed=packed) + + def embedding_api(self, csource, packed=False): + self._cdef(csource, packed=packed, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): if not isinstance(csource, str): # unicode, on Python 2 if not isinstance(csource, basestring): raise TypeError("cdef() argument must be a string") csource = csource.encode('ascii') with self._lock: self._cdef_version = object() - self._parser.parse(csource, override=override, packed=packed) + self._parser.parse(csource, override=override, **options) self._cdefsources.append(csource) if override: for cache in self._function_caches: @@ -533,6 +542,31 @@ ('_UNICODE', '1')] kwds['define_macros'] = defmacros + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + if '__pypy__' in sys.builtin_module_names: + if hasattr(sys, 'prefix'): + import os + libdir = os.path.join(sys.prefix, 'bin') + dirs = kwds.setdefault('library_dirs', []) + if libdir not in dirs: + dirs.append(libdir) + pythonlib = "pypy-c" + else: + if sys.platform == "win32": + template = "python%d%d" + if sys.flags.debug: + template = template + '_d' + else: + template = "python%d.%d" + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + libraries = kwds.setdefault('libraries', []) + if pythonlib not in libraries: + libraries.append(pythonlib) + def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): raise ValueError("set_source() cannot be called several times " @@ -592,14 +626,23 @@ recompile(self, module_name, source, c_file=filename, call_c_compiler=False, **kwds) - def compile(self, tmpdir='.', verbose=0): + def compile(self, tmpdir='.', verbose=0, target=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ from .recompiler import recompile # if not hasattr(self, '_assigned_source'): raise ValueError("set_source() must be called before compile()") module_name, source, source_extension, kwds = self._assigned_source return recompile(self, module_name, source, tmpdir=tmpdir, - source_extension=source_extension, + target=target, source_extension=source_extension, compiler_verbose=verbose, **kwds) def init_once(self, func, tag): @@ -626,6 +669,32 @@ self._init_once_cache[tag] = (True, result) return result + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,8 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._override = False - self._packed = False + self._options = None self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -281,16 +280,15 @@ msg = 'parse error\n%s' % (msg,) raise api.CDefError(msg) - def parse(self, csource, override=False, packed=False): - prev_override = self._override - prev_packed = self._packed + def parse(self, csource, override=False, packed=False, dllexport=False): + prev_options = self._options try: - self._override = override - self._packed = packed + self._options = {'override': override, + 'packed': packed, + 'dllexport': dllexport} self._internal_parse(csource) finally: - self._override = prev_override - self._packed = prev_packed + self._options = prev_options def _internal_parse(self, csource): ast, macros, csource = self._parse(csource) @@ -376,10 +374,13 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._inside_extern_python: - self._declare('extern_python ' + decl.name, tp) + if self._options['dllexport']: + tag = 'dllexport_python ' + elif self._inside_extern_python: + tag = 'extern_python ' else: - self._declare('function ' + decl.name, tp) + tag = 'function ' + self._declare(tag + decl.name, tp) def _parse_decl(self, decl): node = decl.type @@ -449,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._override: + if not self._options['override']: raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -728,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._packed + tp.packed = self._options['packed'] if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,12 +21,14 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0): +def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, + embedding=False): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose) + outputfilename = _build(tmpdir, ext, compiler_verbose, + target_extension, embedding) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -36,7 +38,32 @@ os.environ[key] = value return outputfilename -def _build(tmpdir, ext, compiler_verbose=0): +def _save_val(name): + import distutils.sysconfig + config_vars = distutils.sysconfig.get_config_vars() + return config_vars.get(name, Ellipsis) + +def _restore_val(name, value): + import distutils.sysconfig + config_vars = distutils.sysconfig.get_config_vars() + config_vars[name] = value + if value is Ellipsis: + del config_vars[name] + +def _win32_hack_for_embedding(): + from distutils.msvc9compiler import MSVCCompiler + if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): + MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ + MSVCCompiler._remove_visual_c_ref + MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file + +def _win32_unhack_for_embedding(): + from distutils.msvc9compiler import MSVCCompiler + MSVCCompiler._remove_visual_c_ref = \ + MSVCCompiler._remove_visual_c_ref_CFFI_BAK + +def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, + embedding=False): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -49,18 +76,29 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: + if sys.platform == 'win32' and embedding: + _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 + old_SO = _save_val('SO') + old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: + if target_extension is not None: + _restore_val('SO', target_extension) + _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) + _restore_val('SO', old_SO) + _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) + if sys.platform == 'win32' and embedding: + _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) # - cmd_obj = dist.get_command_obj('build_ext') - [soname] = cmd_obj.get_outputs() return soname try: diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -3,6 +3,7 @@ from .cffi_opcode import * VERSION = "0x2601" +VERSION_EMBEDDED = "0x2701" class GlobalExpr: @@ -281,6 +282,29 @@ lines[i:i+1] = self._rel_readlines('parse_c_type.h') prnt(''.join(lines)) # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('#define _CFFI_PYTHON_STARTUP_CODE %s' % + (self._string_literal(self.ffi._embedding),)) + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + prnt(''.join(lines)) + version = VERSION_EMBEDDED + else: + version = VERSION + # # then paste the C source given by the user, verbatim. prnt('/************************************************************/') prnt() @@ -365,17 +389,16 @@ prnt() # # the init function - base_module_name = self.module_name.split('.')[-1] prnt('#ifdef PYPY_VERSION') prnt('PyMODINIT_FUNC') prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) prnt('{') if self._num_externpy: prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') - prnt(' _cffi_call_python = ' + prnt(' _cffi_call_python_org = ' '(void(*)(struct _cffi_externpy_s *, char *))p[1];') prnt(' }') - prnt(' p[0] = (const void *)%s;' % VERSION) + prnt(' p[0] = (const void *)%s;' % version) prnt(' p[1] = &_cffi_type_context;') prnt('}') # on Windows, distutils insists on putting init_cffi_xyz in @@ -394,14 +417,14 @@ prnt('PyInit_%s(void)' % (base_module_name,)) prnt('{') prnt(' return _cffi_init("%s", %s, &_cffi_type_context);' % ( - self.module_name, VERSION)) + self.module_name, version)) prnt('}') prnt('#else') prnt('PyMODINIT_FUNC') prnt('init%s(void)' % (base_module_name,)) prnt('{') prnt(' _cffi_init("%s", %s, &_cffi_type_context);' % ( - self.module_name, VERSION)) + self.module_name, version)) prnt('}') prnt('#endif') @@ -1123,7 +1146,10 @@ assert isinstance(tp, model.FunctionPtrType) self._do_collect_type(tp) - def _generate_cpy_extern_python_decl(self, tp, name): + def _generate_cpy_dllexport_python_collecttype(self, tp, name): + self._generate_cpy_extern_python_collecttype(tp, name) + + def _generate_cpy_extern_python_decl(self, tp, name, dllexport=False): prnt = self._prnt if isinstance(tp.result, model.VoidType): size_of_result = '0' @@ -1156,7 +1182,11 @@ size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( tp.result.get_c_name(''), size_of_a, tp.result.get_c_name(''), size_of_a) - prnt('static %s' % tp.result.get_c_name(name_and_arguments)) + if dllexport: + tag = 'CFFI_DLLEXPORT' + else: + tag = 'static' + prnt('%s %s' % (tag, tp.result.get_c_name(name_and_arguments))) prnt('{') prnt(' char a[%s];' % size_of_a) prnt(' char *p = a;') @@ -1174,6 +1204,9 @@ prnt() self._num_externpy += 1 + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._generate_cpy_extern_python_decl(tp, name, dllexport=True) + def _generate_cpy_extern_python_ctx(self, tp, name): if self.target_is_python: raise ffiplatform.VerificationError( @@ -1185,6 +1218,21 @@ self._lsts["global"].append( GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + def _generate_cpy_dllexport_python_ctx(self, tp, name): + self._generate_cpy_extern_python_ctx(tp, name) + + def _string_literal(self, s): + def _char_repr(c): + # escape with a '\' the characters '\', '"' or (for trigraphs) '?' + if c in '\\"?': return '\\' + c + if ' ' <= c < '\x7F': return c + if c == '\n': return '\\n' + return '\\%03o' % ord(c) + lines = [] + for line in s.splitlines(True): + lines.append('"%s"' % ''.join([_char_repr(c) for c in line])) + return ' \\\n'.join(lines) + # ---------- # emitting the opcodes for individual types @@ -1311,12 +1359,15 @@ def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, - compiler_verbose=1, **kwds): + compiler_verbose=1, target=None, **kwds): if not isinstance(module_name, str): module_name = module_name.encode('ascii') if ffi._windows_unicode: ffi._apply_windows_unicode(kwds) if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) if c_file is None: c_file, parts = _modname_to_file(tmpdir, module_name, source_extension) @@ -1325,13 +1376,40 @@ ext_c_file = os.path.join(*parts) else: ext_c_file = c_file - ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + if target == '*': + target_module_name = module_name + target_extension = None # use default + else: + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + else: + target += '.so' + # split along the first '.' (not the last one, otherwise the + # preceeding dots are interpreted as splitting package names) + index = target.find('.') + if index < 0: + raise ValueError("target argument %r should be a file name " + "containing a '.'" % (target,)) + target_module_name = target[:index] + target_extension = target[index:] + # + ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: cwd = os.getcwd() try: os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose, + target_extension, + embedding=embedding) finally: os.chdir(cwd) return outputfilename diff --git a/pypy/doc/embedding.rst b/pypy/doc/embedding.rst --- a/pypy/doc/embedding.rst +++ b/pypy/doc/embedding.rst @@ -10,6 +10,15 @@ with a ``libpypy-c.so`` or ``pypy-c.dll`` file. This is the default in recent versions of PyPy. +.. note:: + + The interface described in this page is kept for backward compatibility. + From PyPy 4.1, it is recommended to use instead CFFI's `native embedding + support,`__ which gives a simpler approach that works on CPython as well + as PyPy. + +.. __: http://cffi.readthedocs.org/en/latest/embedding.html + The resulting shared library exports very few functions, however they are enough to accomplish everything you need, provided you follow a few principles. The API is: diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst --- a/pypy/doc/getting-started-dev.rst +++ b/pypy/doc/getting-started-dev.rst @@ -19,7 +19,9 @@ * Clone this new repo (i.e. the fork) to your local machine with the command ``hg clone ssh://hg at bitbucket.org/yourname/pypy``. It is a very slow - operation but only ever needs to be done once. If you already cloned + operation but only ever needs to be done once. See also + http://pypy.org/download.html#building-from-source . + If you already cloned ``https://bitbucket.org/pypy/pypy`` before, even if some time ago, then you can reuse the same clone by editing the file ``.hg/hgrc`` in your clone to contain the line ``default = diff --git a/pypy/doc/how-to-contribute.rst b/pypy/doc/how-to-contribute.rst --- a/pypy/doc/how-to-contribute.rst +++ b/pypy/doc/how-to-contribute.rst @@ -67,8 +67,8 @@ **module** directory contains extension modules written in RPython * **rpython compiler** that resides in ``rpython/annotator`` and - ``rpython/rtyper`` directories. Consult :doc:`introduction to RPython ` for - further reading + ``rpython/rtyper`` directories. Consult `Getting Started with RPython`_ + for further reading * **JIT generator** lives in ``rpython/jit`` directory. optimizations live in ``rpython/jit/metainterp/optimizeopt``, the main JIT in @@ -80,3 +80,14 @@ The rest of directories serve specific niche goal and are unlikely a good entry point. + + +More documentation +------------------ + +* `Getting Started Developing With PyPy`_ + +* `Getting Started with RPython`_ + +.. _`Getting Started Developing With PyPy`: getting-started-dev.html +.. _`Getting started with RPython`: http://rpython.readthedocs.org/en/latest/getting-started.html diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -110,3 +110,16 @@ short-running Python callbacks. (CFFI on CPython has a hack to achieve the same result.) This can also be seen as a bug fix: previously, thread-local objects would be reset between two such calls. + +.. branch: globals-quasiimmut + +Optimize global lookups. + +.. branch: cffi-static-callback-embedding + +Updated to CFFI 1.5, which supports a new way to do embedding. +Deprecates http://pypy.readthedocs.org/en/latest/embedding.html. + +.. branch: fix-cpython-ssl-tests-2.7 + +Fix SSL tests by importing cpython's patch diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -84,13 +84,6 @@ from rpython.rlib.entrypoint import entrypoint_highlevel from rpython.rtyper.lltypesystem import rffi, lltype - w_pathsetter = space.appexec([], """(): - def f(path): - import sys - sys.path[:] = path - return f - """) - @entrypoint_highlevel('main', [rffi.CCHARP, rffi.INT], c_name='pypy_setup_home') def pypy_setup_home(ll_home, verbose): @@ -109,7 +102,10 @@ " not found in '%s' or in any parent directory" % home1) return rffi.cast(rffi.INT, 1) space.startup() - space.call_function(w_pathsetter, w_path) + space.appexec([w_path], """(path): + import sys + sys.path[:] = path + """) # import site try: space.setattr(space.getbuiltinmodule('sys'), @@ -149,6 +145,9 @@ return os_thread.setup_threads(space) os_thread.bootstrapper.acquire(space, None, None) + # XXX this doesn't really work. Don't use os.fork(), and + # if your embedder program uses fork(), don't use any PyPy + # code in the fork rthread.gc_thread_start() os_thread.bootstrapper.nbthreads += 1 os_thread.bootstrapper.release() diff --git a/pypy/module/__builtin__/test/test_classobj.py b/pypy/module/__builtin__/test/test_classobj.py --- a/pypy/module/__builtin__/test/test_classobj.py +++ b/pypy/module/__builtin__/test/test_classobj.py @@ -1084,7 +1084,7 @@ def is_strdict(space, w_class): from pypy.objspace.std.dictmultiobject import BytesDictStrategy w_d = w_class.getdict(space) - return space.wrap(isinstance(w_d.strategy, BytesDictStrategy)) + return space.wrap(isinstance(w_d.get_strategy(), BytesDictStrategy)) cls.w_is_strdict = cls.space.wrap(gateway.interp2app(is_strdict)) diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -1,8 +1,9 @@ import sys from pypy.interpreter.mixedmodule import MixedModule -from rpython.rlib import rdynload, clibffi +from rpython.rlib import rdynload, clibffi, entrypoint +from rpython.rtyper.lltypesystem import rffi -VERSION = "1.4.2" +VERSION = "1.5.0" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: @@ -65,6 +66,10 @@ if has_stdcall: interpleveldefs['FFI_STDCALL'] = 'space.wrap(%d)' % FFI_STDCALL + def startup(self, space): + from pypy.module._cffi_backend import embedding + embedding.glob.space = space + def get_dict_rtld_constants(): found = {} @@ -78,3 +83,11 @@ for _name, _value in get_dict_rtld_constants().items(): Module.interpleveldefs[_name] = 'space.wrap(%d)' % _value + + +# write this entrypoint() here, to make sure it is registered early enough + at entrypoint.entrypoint_highlevel('main', [rffi.INT, rffi.VOIDP], + c_name='pypy_init_embedded_cffi_module') +def pypy_init_embedded_cffi_module(version, init_struct): + from pypy.module._cffi_backend import embedding + return embedding.pypy_init_embedded_cffi_module(version, init_struct) diff --git a/pypy/module/_cffi_backend/cffi1_module.py b/pypy/module/_cffi_backend/cffi1_module.py --- a/pypy/module/_cffi_backend/cffi1_module.py +++ b/pypy/module/_cffi_backend/cffi1_module.py @@ -9,18 +9,18 @@ VERSION_MIN = 0x2601 -VERSION_MAX = 0x26FF +VERSION_MAX = 0x27FF VERSION_EXPORT = 0x0A03 -initfunctype = lltype.Ptr(lltype.FuncType([rffi.VOIDPP], lltype.Void)) +INITFUNCPTR = lltype.Ptr(lltype.FuncType([rffi.VOIDPP], lltype.Void)) def load_cffi1_module(space, name, path, initptr): # This is called from pypy.module.cpyext.api.load_extension_module() from pypy.module._cffi_backend.call_python import get_ll_cffi_call_python - initfunc = rffi.cast(initfunctype, initptr) + initfunc = rffi.cast(INITFUNCPTR, initptr) with lltype.scoped_alloc(rffi.VOIDPP.TO, 16, zero=True) as p: p[0] = rffi.cast(rffi.VOIDP, VERSION_EXPORT) p[1] = rffi.cast(rffi.VOIDP, get_ll_cffi_call_python()) @@ -41,7 +41,8 @@ w_name = space.wrap(name) module = Module(space, w_name) - module.setdictvalue(space, '__file__', space.wrap(path)) + if path is not None: + module.setdictvalue(space, '__file__', space.wrap(path)) module.setdictvalue(space, 'ffi', space.wrap(ffi)) module.setdictvalue(space, 'lib', space.wrap(lib)) w_modules_dict = space.sys.get('modules') diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py new file mode 100644 --- /dev/null +++ b/pypy/module/_cffi_backend/embedding.py @@ -0,0 +1,146 @@ +import os +from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.translator.tool.cbuild import ExternalCompilationInfo + +from pypy.interpreter.error import OperationError, oefmt + +# ____________________________________________________________ + + +EMBED_VERSION_MIN = 0xB011 +EMBED_VERSION_MAX = 0xB0FF + +STDERR = 2 +INITSTRUCTPTR = lltype.Ptr(lltype.Struct('CFFI_INIT', + ('name', rffi.CCHARP), + ('func', rffi.VOIDP), + ('code', rffi.CCHARP))) + +def load_embedded_cffi_module(space, version, init_struct): + from pypy.module._cffi_backend.cffi1_module import load_cffi1_module + declare_c_function() # translation-time hint only: + # declare _cffi_carefully_make_gil() + # + version = rffi.cast(lltype.Signed, version) + if not (EMBED_VERSION_MIN <= version <= EMBED_VERSION_MAX): + raise oefmt(space.w_ImportError, + "cffi embedded module has got unknown version tag %s", + hex(version)) + # + if space.config.objspace.usemodules.thread: + from pypy.module.thread import os_thread + os_thread.setup_threads(space) + # + name = rffi.charp2str(init_struct.name) + load_cffi1_module(space, name, None, init_struct.func) + code = rffi.charp2str(init_struct.code) + compiler = space.createcompiler() + pycode = compiler.compile(code, "" % name, 'exec', 0) + w_globals = space.newdict(module=True) + space.setitem_str(w_globals, "__builtins__", space.wrap(space.builtin)) + pycode.exec_code(space, w_globals, w_globals) + + +class Global: + pass +glob = Global() + +def pypy_init_embedded_cffi_module(version, init_struct): + # called from __init__.py + name = "?" + try: + init_struct = rffi.cast(INITSTRUCTPTR, init_struct) + name = rffi.charp2str(init_struct.name) + # + space = glob.space + must_leave = False + try: + must_leave = space.threadlocals.try_enter_thread(space) + load_embedded_cffi_module(space, version, init_struct) + res = 0 + except OperationError, operr: + operr.write_unraisable(space, "initialization of '%s'" % name, + with_traceback=True) + space.appexec([], r"""(): + import sys + sys.stderr.write('pypy version: %s.%s.%s\n' % + sys.pypy_version_info[:3]) + sys.stderr.write('sys.path: %r\n' % (sys.path,)) + """) + res = -1 + if must_leave: + space.threadlocals.leave_thread(space) + except Exception, e: + # oups! last-level attempt to recover. + try: + os.write(STDERR, "From initialization of '") + os.write(STDERR, name) + os.write(STDERR, "':\n") + os.write(STDERR, str(e)) + os.write(STDERR, "\n") + except: + pass + res = -1 + return rffi.cast(rffi.INT, res) + +# ____________________________________________________________ + + +eci = ExternalCompilationInfo(separate_module_sources=[ +r""" +/* XXX Windows missing */ +#include +#include +#include + +RPY_EXPORTED void rpython_startup_code(void); +RPY_EXPORTED int pypy_setup_home(char *, int); + +static unsigned char _cffi_ready = 0; +static const char *volatile _cffi_module_name; + +static void _cffi_init_error(const char *msg, const char *extra) +{ + fprintf(stderr, + "\nPyPy initialization failure when loading module '%s':\n%s%s\n", + _cffi_module_name, msg, extra); +} + +static void _cffi_init(void) +{ + Dl_info info; + char *home; + + rpython_startup_code(); + RPyGilAllocate(); + + if (dladdr(&_cffi_init, &info) == 0) { + _cffi_init_error("dladdr() failed: ", dlerror()); + return; + } + home = realpath(info.dli_fname, NULL); + if (pypy_setup_home(home, 1) != 0) { + _cffi_init_error("pypy_setup_home() failed", ""); + return; + } + _cffi_ready = 1; +} + +RPY_EXPORTED +int pypy_carefully_make_gil(const char *name) +{ + /* For CFFI: this initializes the GIL and loads the home path. + It can be called completely concurrently from unrelated threads. + It assumes that we don't hold the GIL before (if it exists), and we + don't hold it afterwards. + */ + static pthread_once_t once_control = PTHREAD_ONCE_INIT; + + _cffi_module_name = name; /* not really thread-safe, but better than + nothing */ + pthread_once(&once_control, _cffi_init); + return (int)_cffi_ready - 1; +} +"""]) + +declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.4.2", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.0", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/cpyext/Doc_stubgen_enable.patch b/pypy/module/cpyext/patches/Doc_stubgen_enable.patch rename from pypy/module/cpyext/Doc_stubgen_enable.patch rename to pypy/module/cpyext/patches/Doc_stubgen_enable.patch diff --git a/pypy/module/pypyjit/test_pypy_c/test_call.py b/pypy/module/pypyjit/test_pypy_c/test_call.py --- a/pypy/module/pypyjit/test_pypy_c/test_call.py +++ b/pypy/module/pypyjit/test_pypy_c/test_call.py @@ -83,9 +83,9 @@ p38 = call_r(ConstClass(_ll_1_threadlocalref_get__Ptr_GcStruct_objectLlT_Signed), #, descr=) p39 = getfield_gc_r(p38, descr=) i40 = force_token() - p41 = getfield_gc_pure_r(p38, descr=) + p41 = getfield_gc_r(p38, descr=) guard_value(p41, ConstPtr(ptr42), descr=...) - i42 = getfield_gc_pure_i(p38, descr=) + i42 = getfield_gc_i(p38, descr=) i43 = int_is_zero(i42) guard_true(i43, descr=...) i50 = force_token() @@ -435,21 +435,21 @@ guard_isnull(p5, descr=...) guard_nonnull_class(p12, ConstClass(W_IntObject), descr=...) guard_value(p2, ConstPtr(ptr21), descr=...) - i22 = getfield_gc_pure_i(p12, descr=) + i22 = getfield_gc_i(p12, descr=) i24 = int_lt(i22, 5000) guard_true(i24, descr=...) guard_not_invalidated(descr=...) p29 = call_r(ConstClass(_ll_1_threadlocalref_get__Ptr_GcStruct_objectLlT_Signed), #, descr=) p30 = getfield_gc_r(p29, descr=) p31 = force_token() - p32 = getfield_gc_pure_r(p29, descr=) + p32 = getfield_gc_r(p29, descr=) guard_value(p32, ConstPtr(ptr33), descr=...) - i34 = getfield_gc_pure_i(p29, descr=) + i34 = getfield_gc_i(p29, descr=) i35 = int_is_zero(i34) guard_true(i35, descr=...) p37 = getfield_gc_r(ConstPtr(ptr36), descr=) guard_nonnull_class(p37, ConstClass(W_IntObject), descr=...) - i39 = getfield_gc_pure_i(p37, descr=) + i39 = getfield_gc_i(p37, descr=) i40 = int_add_ovf(i22, i39) guard_no_overflow(descr=...) --TICK-- @@ -466,7 +466,7 @@ """, []) loop, = log.loops_by_id('call') assert loop.match(""" - i8 = getfield_gc_pure_i(p6, descr=) + i8 = getfield_gc_i(p6, descr=) i10 = int_lt(i8, 5000) guard_true(i10, descr=...) guard_not_invalidated? diff --git a/pypy/module/pypyjit/test_pypy_c/test_containers.py b/pypy/module/pypyjit/test_pypy_c/test_containers.py --- a/pypy/module/pypyjit/test_pypy_c/test_containers.py +++ b/pypy/module/pypyjit/test_pypy_c/test_containers.py @@ -84,7 +84,7 @@ guard_no_exception(descr=...) p20 = new_with_vtable(descr=...) call_n(ConstClass(_ll_dict_setitem_lookup_done_trampoline), p13, p10, p20, i12, i17, descr=) - setfield_gc(p20, i5, descr=) + setfield_gc(p20, i5, descr=) guard_no_exception(descr=...) i23 = call_i(ConstClass(ll_call_lookup_function), p13, p10, i12, 0, descr=) guard_no_exception(descr=...) @@ -93,7 +93,7 @@ p28 = getfield_gc_r(p13, descr=) p29 = getinteriorfield_gc_r(p28, i23, descr=>) guard_nonnull_class(p29, ConstClass(W_IntObject), descr=...) - i31 = getfield_gc_pure_i(p29, descr=) + i31 = getfield_gc_i(p29, descr=) i32 = int_sub_ovf(i31, i5) guard_no_overflow(descr=...) i34 = int_add_ovf(i32, 1) diff --git a/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py b/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py --- a/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py +++ b/pypy/module/pypyjit/test_pypy_c/test_micronumpy.py @@ -101,13 +101,13 @@ loop = log._filter(log.loops[0]) assert loop.match(""" guard_class(p1, #, descr=...) - p4 = getfield_gc_pure_r(p1, descr=) + p4 = getfield_gc_r(p1, descr=) i5 = getfield_gc_i(p0, descr=) - p6 = getfield_gc_pure_r(p4, descr=) - p7 = getfield_gc_pure_r(p6, descr=) + p6 = getfield_gc_r(p4, descr=) + p7 = getfield_gc_r(p6, descr=) guard_class(p7, ConstClass(Float64), descr=...) - i9 = getfield_gc_pure_i(p4, descr=) - i10 = getfield_gc_pure_i(p6, descr=) + i9 = getfield_gc_i(p4, descr=) + i10 = getfield_gc_i(p6, descr=) i12 = int_eq(i10, 61) i14 = int_eq(i10, 60) i15 = int_or(i12, i14) @@ -117,28 +117,28 @@ i18 = float_ne(f16, 0.000000) guard_true(i18, descr=...) guard_nonnull_class(p2, ConstClass(W_BoolBox), descr=...) - i20 = getfield_gc_pure_i(p2, descr=) + i20 = getfield_gc_i(p2, descr=) i21 = int_is_true(i20) guard_false(i21, descr=...) i22 = getfield_gc_i(p0, descr=) - i23 = getfield_gc_pure_i(p1, descr=) + i23 = getfield_gc_i(p1, descr=) guard_true(i23, descr=...) i25 = int_add(i22, 1) - p26 = getfield_gc_pure_r(p0, descr=) - i27 = getfield_gc_pure_i(p1, descr=) + p26 = getfield_gc_r(p0, descr=) + i27 = getfield_gc_i(p1, descr=) i28 = int_is_true(i27) guard_true(i28, descr=...) - i29 = getfield_gc_pure_i(p6, descr=) + i29 = getfield_gc_i(p6, descr=) guard_value(i29, 8, descr=...) i30 = int_add(i5, 8) - i31 = getfield_gc_pure_i(p1, descr=) + i31 = getfield_gc_i(p1, descr=) i32 = int_ge(i25, i31) guard_false(i32, descr=...) p34 = new_with_vtable(descr=...) {{{ - setfield_gc(p34, p1, descr=) + setfield_gc(p34, p1, descr=) setfield_gc(p34, i25, descr=) - setfield_gc(p34, p26, descr=) + setfield_gc(p34, p26, descr=) setfield_gc(p34, i30, descr=) }}} jump(..., descr=...) diff --git a/pypy/module/pypyjit/test_pypy_c/test_min_max.py b/pypy/module/pypyjit/test_pypy_c/test_min_max.py --- a/pypy/module/pypyjit/test_pypy_c/test_min_max.py +++ b/pypy/module/pypyjit/test_pypy_c/test_min_max.py @@ -54,7 +54,7 @@ i19 = int_add(i11, 1) setfield_gc(p2, i19, descr=...) guard_nonnull_class(p18, ConstClass(W_IntObject), descr=...) - i20 = getfield_gc_pure_i(p18, descr=...) + i20 = getfield_gc_i(p18, descr=...) i21 = int_gt(i20, i14) guard_true(i21, descr=...) jump(..., descr=...) diff --git a/pypy/module/pypyjit/test_pypy_c/test_misc.py b/pypy/module/pypyjit/test_pypy_c/test_misc.py --- a/pypy/module/pypyjit/test_pypy_c/test_misc.py +++ b/pypy/module/pypyjit/test_pypy_c/test_misc.py @@ -113,7 +113,7 @@ i12 = int_is_true(i4) guard_true(i12, descr=...) guard_not_invalidated(descr=...) - i10p = getfield_gc_pure_i(p10, descr=...) + i10p = getfield_gc_i(p10, descr=...) i10 = int_mul_ovf(2, i10p) guard_no_overflow(descr=...) i14 = int_add_ovf(i13, i10) diff --git a/pypy/module/pypyjit/test_pypy_c/test_string.py b/pypy/module/pypyjit/test_pypy_c/test_string.py --- a/pypy/module/pypyjit/test_pypy_c/test_string.py +++ b/pypy/module/pypyjit/test_pypy_c/test_string.py @@ -82,7 +82,7 @@ strsetitem(p25, 0, i23) p93 = call_r(ConstClass(fromstr), p25, 16, descr=) guard_no_exception(descr=...) - i95 = getfield_gc_pure_i(p93, descr=) + i95 = getfield_gc_i(p93, descr=) i96 = int_gt(i95, #) guard_false(i96, descr=...) i94 = call_i(ConstClass(rbigint._toint_helper), p93, descr=) diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/backend_tests.py @@ -1353,8 +1353,8 @@ ffi = FFI(backend=self.Backend()) ffi.cdef("enum foo;") from cffi import __version_info__ - if __version_info__ < (1, 5): - py.test.skip("re-enable me in version 1.5") + if __version_info__ < (1, 6): + py.test.skip("re-enable me in version 1.6") e = py.test.raises(CDefError, ffi.cast, "enum foo", -1) assert str(e.value) == ( "'enum foo' has no values explicitly defined: refusing to guess " diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_version.py b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_version.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_version.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi0/test_version.py @@ -54,3 +54,10 @@ content = open(p).read() #v = BACKEND_VERSIONS.get(v, v) assert (('assert __version__ == "%s"' % v) in content) + +def test_embedding_h(): + parent = os.path.dirname(os.path.dirname(cffi.__file__)) + v = cffi.__version__ + p = os.path.join(parent, 'cffi', '_embedding.h') + content = open(p).read() + assert ('cffi version: %s"' % (v,)) in content diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_new_ffi_1.py b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_new_ffi_1.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_new_ffi_1.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_new_ffi_1.py @@ -1719,3 +1719,10 @@ exec("from _test_import_from_lib.lib import *", d) assert (set(key for key in d if not key.startswith('_')) == set(['myfunc', 'MYFOO'])) + # + # also test "import *" on the module itself, which should be + # equivalent to "import ffi, lib" + d = {} + exec("from _test_import_from_lib import *", d) + assert (sorted([x for x in d.keys() if not x.startswith('__')]) == + ['ffi', 'lib']) diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py --- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py +++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_zdist.py @@ -60,11 +60,16 @@ if (name.endswith('.so') or name.endswith('.pyd') or name.endswith('.dylib')): found_so = os.path.join(curdir, name) - # foo.cpython-34m.so => foo - name = name.split('.')[0] - # foo_d.so => foo (Python 2 debug builds) + # foo.so => foo + parts = name.split('.') + del parts[-1] + if len(parts) > 1 and parts[-1] != 'bar': + # foo.cpython-34m.so => foo, but foo.bar.so => foo.bar + del parts[-1] + name = '.'.join(parts) + # foo_d => foo (Python 2 debug builds) if name.endswith('_d') and hasattr(sys, 'gettotalrefcount'): - name = name.rsplit('_', 1)[0] + name = name[:-2] name += '.SO' if name.startswith('pycparser') and name.endswith('.egg'): continue # no clue why this shows up sometimes and not others @@ -209,6 +214,58 @@ 'Release': '?'}}) @chdir_to_tmp + def test_api_compile_explicit_target_1(self): + ffi = cffi.FFI() + ffi.set_source("mod_name_in_package.mymod", "/*code would be here*/") + x = ffi.compile(target="foo.bar.*") + if sys.platform != 'win32': + sofile = self.check_produced_files({ + 'foo.bar.SO': None, + 'mod_name_in_package': {'mymod.c': None, + 'mymod.o': None}}) + assert os.path.isabs(x) and os.path.samefile(x, sofile) + else: + self.check_produced_files({ + 'foo.bar.SO': None, + 'mod_name_in_package': {'mymod.c': None}, + 'Release': '?'}) + + @chdir_to_tmp + def test_api_compile_explicit_target_2(self): + ffi = cffi.FFI() + ffi.set_source("mod_name_in_package.mymod", "/*code would be here*/") + x = ffi.compile(target=os.path.join("mod_name_in_package", "foo.bar.*")) + if sys.platform != 'win32': + sofile = self.check_produced_files({ + 'mod_name_in_package': {'foo.bar.SO': None, + 'mymod.c': None, + 'mymod.o': None}}) + assert os.path.isabs(x) and os.path.samefile(x, sofile) + else: + self.check_produced_files({ + 'mod_name_in_package': {'foo.bar.SO': None, + 'mymod.c': None}, + 'Release': '?'}) + + @chdir_to_tmp + def test_api_compile_explicit_target_3(self): + ffi = cffi.FFI() + ffi.set_source("mod_name_in_package.mymod", "/*code would be here*/") + x = ffi.compile(target="foo.bar.baz") + if sys.platform != 'win32': + self.check_produced_files({ + 'foo.bar.baz': None, + 'mod_name_in_package': {'mymod.c': None, + 'mymod.o': None}}) + sofile = os.path.join(str(self.udir), 'foo.bar.baz') + assert os.path.isabs(x) and os.path.samefile(x, sofile) + else: + self.check_produced_files({ + 'foo.bar.baz': None, + 'mod_name_in_package': {'mymod.c': None}, + 'Release': '?'}) + + @chdir_to_tmp def test_api_distutils_extension_1(self): ffi = cffi.FFI() ffi.set_source("mod_name_in_package.mymod", "/*code would be here*/") diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/__init__.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/__init__.py new file mode 100644 --- /dev/null +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/__init__.py @@ -0,0 +1,1 @@ +# Generated by pypy/tool/import_cffi.py diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/add1-test.c b/pypy/module/test_lib_pypy/cffi_tests/embedding/add1-test.c new file mode 100644 --- /dev/null +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/add1-test.c @@ -0,0 +1,13 @@ +#include + +extern int add1(int, int); + + +int main(void) +{ + int x, y; + x = add1(40, 2); + y = add1(100, -5); + printf("got: %d %d\n", x, y); + return 0; +} diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/add1.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/add1.py new file mode 100644 --- /dev/null +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/add1.py @@ -0,0 +1,34 @@ +# Generated by pypy/tool/import_cffi.py +import cffi + +ffi = cffi.FFI() + +ffi.embedding_api(""" + int add1(int, int); +""") + +ffi.embedding_init_code(r""" + import sys, time + sys.stdout.write("preparing") + for i in range(3): + sys.stdout.flush() + time.sleep(0.02) + sys.stdout.write(".") + sys.stdout.write("\n") + + from _add1_cffi import ffi + + int(ord("A")) # check that built-ins are there + + @ffi.def_extern() + def add1(x, y): + sys.stdout.write("adding %d and %d\n" % (x, y)) + sys.stdout.flush() + return x + y +""") + +ffi.set_source("_add1_cffi", """ +""") + +fn = ffi.compile(verbose=True) +print('FILENAME: %s' % (fn,)) diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/add2-test.c b/pypy/module/test_lib_pypy/cffi_tests/embedding/add2-test.c new file mode 100644 --- /dev/null +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/add2-test.c @@ -0,0 +1,14 @@ +#include + +extern int add1(int, int); +extern int add2(int, int, int); + + +int main(void) +{ + int x, y; + x = add1(40, 2); + y = add2(100, -5, -20); + printf("got: %d %d\n", x, y); + return 0; +} diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/add2.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/add2.py new file mode 100644 --- /dev/null +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/add2.py @@ -0,0 +1,30 @@ +# Generated by pypy/tool/import_cffi.py +import cffi + +ffi = cffi.FFI() + +ffi.embedding_api(""" + int add2(int, int, int); +""") + +ffi.embedding_init_code(r""" + import sys + sys.stdout.write("prepADD2\n") + + assert '_add2_cffi' in sys.modules + m = sys.modules['_add2_cffi'] + import _add2_cffi + ffi = _add2_cffi.ffi + + @ffi.def_extern() + def add2(x, y, z): + sys.stdout.write("adding %d and %d and %d\n" % (x, y, z)) + sys.stdout.flush() + return x + y + z +""") + +ffi.set_source("_add2_cffi", """ +""") + +fn = ffi.compile(verbose=True) +print('FILENAME: %s' % (fn,)) diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/add3.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/add3.py new file mode 100644 --- /dev/null +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/add3.py @@ -0,0 +1,25 @@ +# Generated by pypy/tool/import_cffi.py +import cffi + +ffi = cffi.FFI() + +ffi.embedding_api(""" + int add3(int, int, int, int); +""") + +ffi.embedding_init_code(r""" + from _add3_cffi import ffi + import sys + + @ffi.def_extern() + def add3(x, y, z, t): + sys.stdout.write("adding %d, %d, %d, %d\n" % (x, y, z, t)) + sys.stdout.flush() + return x + y + z + t +""") + +ffi.set_source("_add3_cffi", """ +""") + +fn = ffi.compile(verbose=True) +print('FILENAME: %s' % (fn,)) diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/add_recursive-test.c b/pypy/module/test_lib_pypy/cffi_tests/embedding/add_recursive-test.c new file mode 100644 --- /dev/null +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/add_recursive-test.c @@ -0,0 +1,27 @@ +#include + +#ifdef _MSC_VER +# define DLLIMPORT __declspec(dllimport) +#else +# define DLLIMPORT extern +#endif + +DLLIMPORT int add_rec(int, int); +DLLIMPORT int (*my_callback)(int); + +static int some_callback(int x) +{ + printf("some_callback(%d)\n", x); + fflush(stdout); + return add_rec(x, 9); +} + +int main(void) +{ + int x, y; + my_callback = some_callback; + x = add_rec(40, 2); + y = add_rec(100, -5); + printf("got: %d %d\n", x, y); + return 0; +} diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/add_recursive.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/add_recursive.py new file mode 100644 --- /dev/null +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/add_recursive.py @@ -0,0 +1,34 @@ +# Generated by pypy/tool/import_cffi.py +import cffi + +ffi = cffi.FFI() + +ffi.embedding_api(""" + int (*my_callback)(int); + int add_rec(int, int); +""") + +ffi.embedding_init_code(r""" + from _add_recursive_cffi import ffi, lib + import sys + print("preparing REC") + sys.stdout.flush() + + @ffi.def_extern() + def add_rec(x, y): + print("adding %d and %d" % (x, y)) + sys.stdout.flush() + return x + y + + x = lib.my_callback(400) + print('<<< %d >>>' % (x,)) +""") + +ffi.set_source("_add_recursive_cffi", """ +/* use CFFI_DLLEXPORT: on windows, it expands to __declspec(dllexport), + which is needed to export a variable from a dll */ +CFFI_DLLEXPORT int (*my_callback)(int); +""") + +fn = ffi.compile(verbose=True) +print('FILENAME: %s' % (fn,)) diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/perf-test.c b/pypy/module/test_lib_pypy/cffi_tests/embedding/perf-test.c new file mode 100644 --- /dev/null +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/perf-test.c @@ -0,0 +1,86 @@ +#include +#include +#include +#ifdef PTEST_USE_THREAD +# include +# include +static sem_t done; +#endif + + +extern int add1(int, int); + + +static double time_delta(struct timeval *stop, struct timeval *start) +{ + return (stop->tv_sec - start->tv_sec) + + 1e-6 * (stop->tv_usec - start->tv_usec); +} + +static double measure(void) +{ + long long i, iterations; + int result; + struct timeval start, stop; + double elapsed; + + add1(0, 0); /* prepare off-line */ + + i = 0; + iterations = 1000; + result = gettimeofday(&start, NULL); + assert(result == 0); + + while (1) { + for (; i < iterations; i++) { + add1(((int)i) & 0xaaaaaa, ((int)i) & 0x555555); + } + result = gettimeofday(&stop, NULL); + assert(result == 0); + + elapsed = time_delta(&stop, &start); + assert(elapsed >= 0.0); + if (elapsed > 2.5) + break; + iterations = iterations * 3 / 2; + } + + return elapsed / (double)iterations; +} + +static void *start_routine(void *arg) +{ + double t = measure(); + printf("time per call: %.3g\n", t); + +#ifdef PTEST_USE_THREAD + int status = sem_post(&done); + assert(status == 0); +#endif + + return arg; +} + + +int main(void) +{ +#ifndef PTEST_USE_THREAD + start_routine(0); +#else + pthread_t th; + int i, status = sem_init(&done, 0, 0); + assert(status == 0); + + add1(0, 0); /* this is the main thread */ + + for (i = 0; i < PTEST_USE_THREAD; i++) { + status = pthread_create(&th, NULL, start_routine, NULL); + assert(status == 0); + } + for (i = 0; i < PTEST_USE_THREAD; i++) { + status = sem_wait(&done); + assert(status == 0); + } +#endif + return 0; +} diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/perf.py b/pypy/module/test_lib_pypy/cffi_tests/embedding/perf.py new file mode 100644 --- /dev/null +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/perf.py @@ -0,0 +1,22 @@ +# Generated by pypy/tool/import_cffi.py +import cffi + +ffi = cffi.FFI() + +ffi.embedding_api(""" + int add1(int, int); +""") + +ffi.embedding_init_code(r""" + from _perf_cffi import ffi + + @ffi.def_extern() + def add1(x, y): + return x + y +""") + +ffi.set_source("_perf_cffi", """ +""") + +fn = ffi.compile(verbose=True) +print('FILENAME: %s' % (fn,)) From pypy.commits at gmail.com Wed Feb 24 09:48:27 2016 From: pypy.commits at gmail.com (rlamy) Date: Wed, 24 Feb 2016 06:48:27 -0800 (PST) Subject: [pypy-commit] pypy desc-specialize: Use @specialize decorators instead of direct assignments to ._annspecialcase_ Message-ID: <56cdc2bb.8ee61c0a.acdac.35ad@mx.google.com> Author: Ronan Lamy Branch: desc-specialize Changeset: r82482:fd243b77d69b Date: 2016-02-21 15:00 +0100 http://bitbucket.org/pypy/pypy/changeset/fd243b77d69b/ Log: Use @specialize decorators instead of direct assignments to ._annspecialcase_ diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -356,12 +356,12 @@ class BufferInterfaceNotFound(Exception): pass + at specialize.memo() def wrappable_class_name(Class): try: return Class.typedef.name except AttributeError: return 'internal subclass of %s' % (Class.__name__,) -wrappable_class_name._annspecialcase_ = 'specialize:memo' class CannotHaveLock(Exception): """Raised by space.allocate_lock() if we're translating.""" @@ -391,7 +391,7 @@ self.check_signal_action = None # changed by the signal module self.user_del_action = UserDelAction(self) self._code_of_sys_exc_info = None - + # can be overridden to a subclass self.initialize() @@ -808,12 +808,13 @@ assert type(s) is str return self.interned_strings.get(s) is not None + @specialize.arg(1) def descr_self_interp_w(self, RequiredClass, w_obj): if not isinstance(w_obj, RequiredClass): raise DescrMismatch() return w_obj - descr_self_interp_w._annspecialcase_ = 'specialize:arg(1)' + @specialize.arg(1) def interp_w(self, RequiredClass, w_obj, can_be_None=False): """ Unwrap w_obj, checking that it is an instance of the required internal @@ -828,7 +829,6 @@ wrappable_class_name(RequiredClass), w_obj.getclass(self)) return w_obj - interp_w._annspecialcase_ = 'specialize:arg(1)' def unpackiterable(self, w_iterable, expected_length=-1): """Unpack an iterable into a real (interpreter-level) list. @@ -1245,6 +1245,7 @@ self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) + @specialize.arg(2) def appexec(self, posargs_w, source): """ return value from executing given source at applevel. EXPERIMENTAL. The source must look like @@ -1256,7 +1257,6 @@ w_func = self.fromcache(AppExecCache).getorbuild(source) args = Arguments(self, list(posargs_w)) return self.call_args(w_func, args) - appexec._annspecialcase_ = 'specialize:arg(2)' def _next_or_none(self, w_it): try: @@ -1266,6 +1266,7 @@ raise return None + @specialize.arg(3) def compare_by_iteration(self, w_iterable1, w_iterable2, op): w_it1 = self.iter(w_iterable1) w_it2 = self.iter(w_iterable2) @@ -1288,7 +1289,6 @@ if op == 'gt': return self.gt(w_x1, w_x2) if op == 'ge': return self.ge(w_x1, w_x2) assert False, "bad value for op" - compare_by_iteration._annspecialcase_ = 'specialize:arg(3)' def decode_index(self, w_index_or_slice, seqlength): """Helper for custom sequence implementations diff --git a/pypy/interpreter/error.py b/pypy/interpreter/error.py --- a/pypy/interpreter/error.py +++ b/pypy/interpreter/error.py @@ -446,6 +446,7 @@ space.wrap(msg)) return OperationError(exc, w_error) + at specialize.arg(3) def wrap_oserror2(space, e, w_filename=None, exception_name='w_OSError', w_exception_class=None): assert isinstance(e, OSError) @@ -473,8 +474,8 @@ w_error = space.call_function(exc, space.wrap(errno), space.wrap(msg)) return OperationError(exc, w_error) -wrap_oserror2._annspecialcase_ = 'specialize:arg(3)' + at specialize.arg(3) def wrap_oserror(space, e, filename=None, exception_name='w_OSError', w_exception_class=None): if filename is not None: @@ -485,7 +486,6 @@ return wrap_oserror2(space, e, None, exception_name=exception_name, w_exception_class=w_exception_class) -wrap_oserror._annspecialcase_ = 'specialize:arg(3)' def exception_from_saved_errno(space, w_type): from rpython.rlib.rposix import get_saved_errno diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -138,6 +138,7 @@ # / \ # 5 6 + at specialize.memo() def get_unique_interplevel_subclass(config, cls, hasdict, wants_slots, needsdel=False, weakrefable=False): "NOT_RPYTHON: initialization-time only" @@ -153,7 +154,6 @@ assert key not in _subclass_cache _subclass_cache[key] = subcls return subcls -get_unique_interplevel_subclass._annspecialcase_ = "specialize:memo" _subclass_cache = {} def enum_interplevel_subclasses(config, cls): diff --git a/pypy/module/_collections/interp_deque.py b/pypy/module/_collections/interp_deque.py --- a/pypy/module/_collections/interp_deque.py +++ b/pypy/module/_collections/interp_deque.py @@ -1,4 +1,5 @@ import sys +from rpython.rlib.objectmodel import specialize from pypy.interpreter import gateway from pypy.interpreter.baseobjspace import W_Root from pypy.interpreter.typedef import TypeDef, make_weakref_descr @@ -320,12 +321,12 @@ w_currently_in_repr = ec._py_repr = space.newdict() return dequerepr(space, w_currently_in_repr, space.wrap(self)) + @specialize.arg(2) def compare(self, w_other, op): space = self.space if not isinstance(w_other, W_Deque): return space.w_NotImplemented return space.compare_by_iteration(space.wrap(self), w_other, op) - compare._annspecialcase_ = 'specialize:arg(2)' def lt(self, w_other): return self.compare(w_other, 'lt') diff --git a/pypy/module/_pypyjson/targetjson.py b/pypy/module/_pypyjson/targetjson.py --- a/pypy/module/_pypyjson/targetjson.py +++ b/pypy/module/_pypyjson/targetjson.py @@ -4,6 +4,7 @@ sys.path.insert(0, str(ROOT)) import time +from rpython.rlib.objectmodel import specialize from pypy.interpreter.error import OperationError from pypy.module._pypyjson.interp_decoder import loads @@ -91,6 +92,7 @@ def wrapfloat(self, x): return W_Float(x) + @specialize.argtype(1) def wrap(self, x): if isinstance(x, int): return W_Int(x) @@ -100,7 +102,6 @@ ## assert False else: return W_Unicode(unicode(x)) - wrap._annspecialcase_ = "specialize:argtype(1)" fakespace = FakeSpace() diff --git a/pypy/module/_rawffi/interp_rawffi.py b/pypy/module/_rawffi/interp_rawffi.py --- a/pypy/module/_rawffi/interp_rawffi.py +++ b/pypy/module/_rawffi/interp_rawffi.py @@ -8,6 +8,7 @@ from rpython.rtyper.tool import rffi_platform from rpython.rlib.unroll import unrolling_iterable import rpython.rlib.rposix as rposix +from rpython.rlib.objectmodel import specialize _MS_WINDOWS = os.name == "nt" @@ -251,6 +252,7 @@ _ARM = rffi_platform.getdefined('__arm__', '') + at specialize.arg(2) def read_ptr(ptr, ofs, TP): T = lltype.Ptr(rffi.CArray(TP)) for c in unroll_letters_for_floats: @@ -270,8 +272,8 @@ return ptr_val else: return rffi.cast(T, ptr)[ofs] -read_ptr._annspecialcase_ = 'specialize:arg(2)' + at specialize.argtype(2) def write_ptr(ptr, ofs, value): TP = lltype.typeOf(value) T = lltype.Ptr(rffi.CArray(TP)) @@ -292,7 +294,6 @@ return else: rffi.cast(T, ptr)[ofs] = value -write_ptr._annspecialcase_ = 'specialize:argtype(2)' def segfault_exception(space, reason): w_mod = space.getbuiltinmodule("_rawffi") @@ -365,14 +366,15 @@ def getrawsize(self): raise NotImplementedError("abstract base class") + at specialize.arg(0) def unwrap_truncate_int(TP, space, w_arg): if space.isinstance_w(w_arg, space.w_int): return rffi.cast(TP, space.int_w(w_arg)) else: return rffi.cast(TP, space.bigint_w(w_arg).ulonglongmask()) -unwrap_truncate_int._annspecialcase_ = 'specialize:arg(0)' + at specialize.arg(1) def unwrap_value(space, push_func, add_arg, argdesc, letter, w_arg): w = space.wrap if letter in TYPEMAP_PTR_LETTERS: @@ -414,10 +416,10 @@ else: raise OperationError(space.w_TypeError, space.wrap("cannot directly write value")) -unwrap_value._annspecialcase_ = 'specialize:arg(1)' ll_typemap_iter = unrolling_iterable(LL_TYPEMAP.items()) + at specialize.arg(1) def wrap_value(space, func, add_arg, argdesc, letter): for c, ll_type in ll_typemap_iter: if letter == c: @@ -430,7 +432,6 @@ return space.wrap(func(add_arg, argdesc, ll_type)) raise OperationError(space.w_TypeError, space.wrap("cannot directly read value")) -wrap_value._annspecialcase_ = 'specialize:arg(1)' class W_FuncPtr(W_Root): diff --git a/pypy/module/_rawffi/structure.py b/pypy/module/_rawffi/structure.py --- a/pypy/module/_rawffi/structure.py +++ b/pypy/module/_rawffi/structure.py @@ -18,6 +18,7 @@ from rpython.rlib.rarithmetic import intmask, signedtype, r_uint, \ r_ulonglong from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.rlib.objectmodel import specialize @@ -269,6 +270,7 @@ def NUM_BITS(x): return x >> 16 + at specialize.arg(1) def BIT_MASK(x, ll_t): if ll_t is lltype.SignedLongLong or ll_t is lltype.UnsignedLongLong: one = r_ulonglong(1) @@ -276,8 +278,8 @@ one = r_uint(1) # to avoid left shift by x == sizeof(ll_t) return (((one << (x - 1)) - 1) << 1) + 1 -BIT_MASK._annspecialcase_ = 'specialize:arg(1)' + at specialize.argtype(2) def push_field(self, num, value): ptr = rffi.ptradd(self.ll_buffer, self.shape.ll_positions[num]) TP = lltype.typeOf(value) @@ -298,8 +300,8 @@ value = rffi.cast(TP, current) break write_ptr(ptr, 0, value) -push_field._annspecialcase_ = 'specialize:argtype(2)' + at specialize.arg(2) def cast_pos(self, i, ll_t): pos = rffi.ptradd(self.ll_buffer, self.shape.ll_positions[i]) value = read_ptr(pos, 0, ll_t) @@ -322,7 +324,6 @@ value = rffi.cast(ll_t, value) break return value -cast_pos._annspecialcase_ = 'specialize:arg(2)' class W_StructureInstance(W_DataInstance): def __init__(self, space, shape, address): diff --git a/pypy/module/cppyy/test/test_zjit.py b/pypy/module/cppyy/test/test_zjit.py --- a/pypy/module/cppyy/test/test_zjit.py +++ b/pypy/module/cppyy/test/test_zjit.py @@ -124,13 +124,13 @@ assert isinstance(w_obj, FakeFloat) return w_obj.val + @specialize.arg(1) def interp_w(self, RequiredClass, w_obj, can_be_None=False): if can_be_None and w_obj is None: return None if not isinstance(w_obj, RequiredClass): raise TypeError return w_obj - interp_w._annspecialcase_ = 'specialize:arg(1)' def getarg_w(self, code, w_obj): # for retrieving buffers return FakeBuffer(w_obj) diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -1107,7 +1107,7 @@ if not use_micronumpy: return use_micronumpy # import to register api functions by side-effect - import pypy.module.cpyext.ndarrayobject + import pypy.module.cpyext.ndarrayobject global GLOBALS, SYMBOLS_C, separate_module_files GLOBALS["PyArray_Type#"]= ('PyTypeObject*', "space.gettypeobject(W_NDimArray.typedef)") SYMBOLS_C += ['PyArray_Type', '_PyArray_FILLWBYTE', '_PyArray_ZEROS'] @@ -1295,9 +1295,8 @@ miniglobals = {'__name__': __name__, # for module name propagation } exec source.compile() in miniglobals - call_external_function = miniglobals['cpy_call_external'] + call_external_function = specialize.ll(miniglobals['cpy_call_external']) call_external_function._dont_inline_ = True - call_external_function._annspecialcase_ = 'specialize:ll' call_external_function._gctransformer_hint_close_stack_ = True # don't inline, as a hack to guarantee that no GC pointer is alive # anywhere in call_external_function diff --git a/pypy/module/math/interp_math.py b/pypy/module/math/interp_math.py --- a/pypy/module/math/interp_math.py +++ b/pypy/module/math/interp_math.py @@ -2,6 +2,7 @@ import sys from rpython.rlib import rfloat +from rpython.rlib.objectmodel import specialize from pypy.interpreter.error import OperationError class State: @@ -17,6 +18,7 @@ else: return space.float_w(space.float(w_x)) + at specialize.arg(1) def math1(space, f, w_x): x = _get_double(space, w_x) try: @@ -28,8 +30,8 @@ raise OperationError(space.w_ValueError, space.wrap("math domain error")) return space.wrap(y) -math1._annspecialcase_ = 'specialize:arg(1)' + at specialize.arg(1) def math1_w(space, f, w_x): x = _get_double(space, w_x) try: @@ -41,8 +43,8 @@ raise OperationError(space.w_ValueError, space.wrap("math domain error")) return r -math1_w._annspecialcase_ = 'specialize:arg(1)' + at specialize.arg(1) def math2(space, f, w_x, w_snd): x = _get_double(space, w_x) snd = _get_double(space, w_snd) @@ -55,7 +57,6 @@ raise OperationError(space.w_ValueError, space.wrap("math domain error")) return space.wrap(r) -math2._annspecialcase_ = 'specialize:arg(1)' def trunc(space, w_x): """Truncate x.""" diff --git a/pypy/module/pyexpat/interp_pyexpat.py b/pypy/module/pyexpat/interp_pyexpat.py --- a/pypy/module/pyexpat/interp_pyexpat.py +++ b/pypy/module/pyexpat/interp_pyexpat.py @@ -3,6 +3,7 @@ from pypy.interpreter.gateway import interp2app, unwrap_spec, WrappedDefault from pypy.interpreter.error import OperationError, oefmt from rpython.rlib import rgc, jit +from rpython.rlib.objectmodel import specialize from rpython.rtyper.lltypesystem import rffi, lltype from rpython.rtyper.tool import rffi_platform from rpython.translator.tool.cbuild import ExternalCompilationInfo @@ -565,6 +566,7 @@ return self.w_character_data_handler or space.w_None return self.handlers[index] + @specialize.arg(2) def sethandler(self, space, name, w_handler, index, setter, handler): if name == 'CharacterDataHandler': self.flush_character_buffer(space) @@ -576,8 +578,6 @@ self.handlers[index] = w_handler setter(self.itself, handler) - sethandler._annspecialcase_ = 'specialize:arg(2)' - all_chars = ''.join(chr(i) for i in range(256)) def UnknownEncodingHandler(self, space, name, info): diff --git a/pypy/objspace/descroperation.py b/pypy/objspace/descroperation.py --- a/pypy/objspace/descroperation.py +++ b/pypy/objspace/descroperation.py @@ -9,54 +9,54 @@ from rpython.rlib.objectmodel import specialize from rpython.rlib import jit + at specialize.memo() def object_getattribute(space): "Utility that returns the app-level descriptor object.__getattribute__." w_src, w_getattribute = space.lookup_in_type_where(space.w_object, '__getattribute__') return w_getattribute -object_getattribute._annspecialcase_ = 'specialize:memo' + at specialize.memo() def object_setattr(space): "Utility that returns the app-level descriptor object.__setattr__." w_src, w_setattr = space.lookup_in_type_where(space.w_object, '__setattr__') return w_setattr -object_setattr._annspecialcase_ = 'specialize:memo' + at specialize.memo() def object_delattr(space): "Utility that returns the app-level descriptor object.__delattr__." w_src, w_delattr = space.lookup_in_type_where(space.w_object, '__delattr__') return w_delattr -object_delattr._annspecialcase_ = 'specialize:memo' + at specialize.memo() def object_hash(space): "Utility that returns the app-level descriptor object.__hash__." w_src, w_hash = space.lookup_in_type_where(space.w_object, '__hash__') return w_hash -object_hash._annspecialcase_ = 'specialize:memo' + at specialize.memo() def type_eq(space): "Utility that returns the app-level descriptor type.__eq__." w_src, w_eq = space.lookup_in_type_where(space.w_type, '__eq__') return w_eq -type_eq._annspecialcase_ = 'specialize:memo' + at specialize.memo() def list_iter(space): "Utility that returns the app-level descriptor list.__iter__." w_src, w_iter = space.lookup_in_type_where(space.w_list, '__iter__') return w_iter -list_iter._annspecialcase_ = 'specialize:memo' + at specialize.memo() def tuple_iter(space): "Utility that returns the app-level descriptor tuple.__iter__." w_src, w_iter = space.lookup_in_type_where(space.w_tuple, '__iter__') return w_iter -tuple_iter._annspecialcase_ = 'specialize:memo' def raiseattrerror(space, w_obj, name, w_descr=None): if w_descr is None: diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py --- a/pypy/objspace/fake/objspace.py +++ b/pypy/objspace/fake/objspace.py @@ -195,6 +195,7 @@ def wrapbytes(self, x): return w_some_obj() + @specialize.argtype(1) def wrap(self, x): if not we_are_translated(): if isinstance(x, gateway.interp2app): @@ -208,7 +209,6 @@ return w_some_obj() self._wrap_not_rpython(x) return w_some_obj() - wrap._annspecialcase_ = "specialize:argtype(1)" def _wrap_not_rpython(self, x): "NOT_RPYTHON" @@ -293,10 +293,10 @@ is_root(w_complex) return 1.1, 2.2 + @specialize.arg(1) def allocate_instance(self, cls, w_subtype): is_root(w_subtype) return instantiate(cls) - allocate_instance._annspecialcase_ = "specialize:arg(1)" def decode_index(self, w_index_or_slice, seqlength): is_root(w_index_or_slice) diff --git a/pypy/objspace/fake/test/test_checkmodule.py b/pypy/objspace/fake/test/test_checkmodule.py --- a/pypy/objspace/fake/test/test_checkmodule.py +++ b/pypy/objspace/fake/test/test_checkmodule.py @@ -9,9 +9,9 @@ def make_checker(): check = [] + @specialize.memo() def see(): check.append(True) - see._annspecialcase_ = 'specialize:memo' return see, check def test_wrap_interp2app(): diff --git a/pypy/objspace/std/formatting.py b/pypy/objspace/std/formatting.py --- a/pypy/objspace/std/formatting.py +++ b/pypy/objspace/std/formatting.py @@ -2,6 +2,7 @@ import sys from rpython.rlib import jit +from rpython.rlib.objectmodel import specialize from rpython.rlib.rarithmetic import INT_MAX from rpython.rlib.rfloat import DTSF_ALT, formatd, isnan, isinf from rpython.rlib.rstring import StringBuilder, UnicodeBuilder @@ -351,6 +352,7 @@ s, ord(c), self.fmtpos - 1) raise OperationError(space.w_ValueError, space.wrap(msg)) + @specialize.argtype(1) def std_wp(self, r): length = len(r) if do_unicode and isinstance(r, str): @@ -376,7 +378,6 @@ if padding > 0: result.append_multiple_char(const(' '), padding) # add any remaining padding at the right - std_wp._annspecialcase_ = 'specialize:argtype(1)' def std_wp_number(self, r, prefix=''): result = self.result diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -2,6 +2,7 @@ from rpython.rlib import jit, objectmodel, debug, rerased from rpython.rlib.rarithmetic import intmask, r_uint +from rpython.rlib.objectmodel import specialize from pypy.interpreter.baseobjspace import W_Root from pypy.objspace.std.dictmultiobject import ( @@ -507,6 +508,7 @@ class Object(ObjectMixin, BaseMapdictObject, W_Root): pass # mainly for tests + at specialize.arg(1) def get_subclass_of_correct_size(space, cls, w_type): assert space.config.objspace.std.withmapdict map = w_type.terminator @@ -519,11 +521,11 @@ return classes[size] else: return classes[len(classes)-1] -get_subclass_of_correct_size._annspecialcase_ = "specialize:arg(1)" SUBCLASSES_MIN_FIELDS = 5 # XXX tweak these numbers SUBCLASSES_MAX_FIELDS = 5 + at specialize.memo() def memo_get_subclass_of_correct_size(space, supercls): key = space, supercls try: @@ -539,7 +541,6 @@ assert len(set(result)) == 1 _subclass_cache[key] = result return result -memo_get_subclass_of_correct_size._annspecialcase_ = "specialize:memo" _subclass_cache = {} erase_item, unerase_item = rerased.new_erasing_pair("mapdict storage item") diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py --- a/pypy/objspace/std/objspace.py +++ b/pypy/objspace/std/objspace.py @@ -340,6 +340,7 @@ assert isinstance(w_starttype, W_TypeObject) return w_type.lookup_starting_at(w_starttype, name) + @specialize.arg(1) def allocate_instance(self, cls, w_subtype): """Allocate the memory needed for an instance of an internal or user-defined type, without actually __init__ializing the instance.""" @@ -369,7 +370,6 @@ "%N.__new__(%N): only for the type %N", w_type, w_subtype, w_type) return instance - allocate_instance._annspecialcase_ = "specialize:arg(1)" # two following functions are almost identical, but in fact they # have different return type. First one is a resizable list, second From pypy.commits at gmail.com Wed Feb 24 09:48:29 2016 From: pypy.commits at gmail.com (rlamy) Date: Wed, 24 Feb 2016 06:48:29 -0800 (PST) Subject: [pypy-commit] pypy desc-specialize: Extract normalize_args() method out of funcdesc.specialize Message-ID: <56cdc2bd.0775c20a.81e6.0f87@mx.google.com> Author: Ronan Lamy Branch: desc-specialize Changeset: r82483:5089fd09c1d1 Date: 2016-02-24 15:47 +0100 http://bitbucket.org/pypy/pypy/changeset/5089fd09c1d1/ Log: Extract normalize_args() method out of funcdesc.specialize diff --git a/rpython/annotator/description.py b/rpython/annotator/description.py --- a/rpython/annotator/description.py +++ b/rpython/annotator/description.py @@ -275,19 +275,7 @@ getattr(self.bookkeeper, "position_key", None) is not None): _, block, i = self.bookkeeper.position_key op = block.operations[i] - enforceargs = getattr(self.pyobj, '_annenforceargs_', None) - signature = getattr(self.pyobj, '_signature_', None) - if enforceargs and signature: - raise Exception("%r: signature and enforceargs cannot both be " - "used" % (self,)) - if enforceargs: - if not callable(enforceargs): - from rpython.annotator.signature import Sig - enforceargs = Sig(*enforceargs) - self.pyobj._annenforceargs_ = enforceargs - enforceargs(self, inputcells) # can modify inputcells in-place - if signature: - enforce_signature_args(self, signature[0], inputcells) # mutates inputcells + self.normalize_args(inputcells) if getattr(self.pyobj, '_annspecialcase_', '').endswith("call_location"): return self.specializer(self, inputcells, op) else: @@ -319,6 +307,27 @@ result = unionof(result, s_previous_result) return result + def normalize_args(self, inputs_s): + """ + Canonicalize argument annotations into the exact parameter + annotations of a specific specialized graph. + + Note: this method has no return value but mutates its argument instead. + """ + enforceargs = getattr(self.pyobj, '_annenforceargs_', None) + signature = getattr(self.pyobj, '_signature_', None) + if enforceargs and signature: + raise Exception("%r: signature and enforceargs cannot both be " + "used" % (self,)) + if enforceargs: + if not callable(enforceargs): + from rpython.annotator.signature import Sig + enforceargs = Sig(*enforceargs) + self.pyobj._annenforceargs_ = enforceargs + enforceargs(self, inputs_s) # can modify inputs_s in-place + if signature: + enforce_signature_args(self, signature[0], inputs_s) # mutates inputs_s + def get_graph(self, args, op): inputs_s = self.parse_arguments(args) return self.specialize(inputs_s, op) From pypy.commits at gmail.com Wed Feb 24 09:53:06 2016 From: pypy.commits at gmail.com (mattip) Date: Wed, 24 Feb 2016 06:53:06 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: ghost-write a rough draft blog post about cpyexy-gc-support Message-ID: <56cdc3d2.c96cc20a.f363c.0e78@mx.google.com> Author: mattip Branch: extradoc Changeset: r5609:827afaef5ee9 Date: 2016-02-24 15:50 +0100 http://bitbucket.org/pypy/extradoc/changeset/827afaef5ee9/ Log: ghost-write a rough draft blog post about cpyexy-gc-support diff --git a/blog/draft/cpyext-gcsupport.rst b/blog/draft/cpyext-gcsupport.rst new file mode 100644 --- /dev/null +++ b/blog/draft/cpyext-gcsupport.rst @@ -0,0 +1,23 @@ +CAPI Support update +=================== + +I have merged a rewrite of the interaction between c-API c-level objects and +interpreter level objects. Each refcounted c-level object is now reflected in +an interpreter level object, and the garbage collector can release the object +pair only if the refcount is 0 and the interpreter level object is not longer +referenced. + +The rewrite significantly simplifies our previous code, and should make using +the c-API less slow (it is still slower than using pure python though). +XXX citations needed ... + +The good news is that now PyPy can support the upstream `lxml`_ package, which is +is one of the most popular packages on PyPI (specifically version X.X.X with old +PyPy specific hacks removed). We do recommend using the `cffi lxml`_ alternative, +since it will be faster on PyPy. + +We are actively working on extending our c-API support, and hope to soon merge +a branch to support more of the c-API functions. Please try it out and let us +know how it works for you. + +Armin Rigo and the PyPy team From pypy.commits at gmail.com Wed Feb 24 11:55:40 2016 From: pypy.commits at gmail.com (Raemi) Date: Wed, 24 Feb 2016 08:55:40 -0800 (PST) Subject: [pypy-commit] pypy stmgc-c8: another merge Message-ID: <56cde08c.878e1c0a.de005.6409@mx.google.com> Author: Remi Meier Branch: stmgc-c8 Changeset: r82485:3edb1e823e07 Date: 2016-02-24 17:54 +0100 http://bitbucket.org/pypy/pypy/changeset/3edb1e823e07/ Log: another merge diff too long, truncating to 2000 out of 2914 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -75,6 +75,7 @@ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ +^lib_pypy/_libmpdec/.+.o$ ^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst --- a/pypy/doc/faq.rst +++ b/pypy/doc/faq.rst @@ -54,7 +54,8 @@ It is quite common nowadays that xyz is available on PyPI_ and installable with ``pip install xyz``. The simplest solution is to `use virtualenv (as documented here)`_. Then enter (activate) the virtualenv -and type: ``pip install xyz``. +and type: ``pip install xyz``. If you don't know or don't want virtualenv, +you can also install ``pip`` globally by saying ``pypy -m ensurepip``. If you get errors from the C compiler, the module is a CPython C Extension module using unsupported features. `See below.`_ diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -133,3 +133,9 @@ `rpython/jit/metainterp/optimizeopt/pure.py`, which can result in better codegen for traces containing a large number of pure getfield operations. +.. branch: exctrans + +Try to ensure that no new functions get annotated during the 'source_c' phase. +Refactor sandboxing to operate at a higher level. + +.. branch: cpyext-bootstrap diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -84,11 +84,68 @@ return rffi.cast(rffi.INT, res) # ____________________________________________________________ +if os.name == 'nt': + do_startup = r''' +#include +#define WIN32_LEAN_AND_MEAN +#include +RPY_EXPORTED void rpython_startup_code(void); +RPY_EXPORTED int pypy_setup_home(char *, int); +static unsigned char _cffi_ready = 0; +static const char *volatile _cffi_module_name; -eci = ExternalCompilationInfo(separate_module_sources=[ -r""" -/* XXX Windows missing */ +static void _cffi_init_error(const char *msg, const char *extra) +{ + fprintf(stderr, + "\nPyPy initialization failure when loading module '%s':\n%s%s\n", + _cffi_module_name, msg, extra); +} + +BOOL CALLBACK _cffi_init(PINIT_ONCE InitOnce, PVOID Parameter, PVOID *lpContex) +{ + + HMODULE hModule; + TCHAR home[_MAX_PATH]; + rpython_startup_code(); + RPyGilAllocate(); + + GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + (LPCTSTR)&_cffi_init, &hModule); + if (hModule == 0 ) { + /* TODO turn the int into a string with FormatMessage */ + + _cffi_init_error("dladdr() failed: ", ""); + return TRUE; + } + GetModuleFileName(hModule, home, _MAX_PATH); + if (pypy_setup_home(home, 1) != 0) { + _cffi_init_error("pypy_setup_home() failed", ""); + return TRUE; + } + _cffi_ready = 1; + fprintf(stderr, "startup succeeded, home %s\n", home); + return TRUE; +} + +RPY_EXPORTED +int pypy_carefully_make_gil(const char *name) +{ + /* For CFFI: this initializes the GIL and loads the home path. + It can be called completely concurrently from unrelated threads. + It assumes that we don't hold the GIL before (if it exists), and we + don't hold it afterwards. + */ + static INIT_ONCE s_init_once; + + _cffi_module_name = name; /* not really thread-safe, but better than + nothing */ + InitOnceExecuteOnce(&s_init_once, _cffi_init, NULL, NULL); + return (int)_cffi_ready - 1; +}''' +else: + do_startup = r""" #include #include #include @@ -141,6 +198,7 @@ pthread_once(&once_control, _cffi_init); return (int)_cffi_ready - 1; } -"""]) +""" +eci = ExternalCompilationInfo(separate_module_sources=[do_startup]) declare_c_function = rffi.llexternal_use_eci(eci) diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -102,7 +102,7 @@ fd = space.appexec([w_socket, space.wrap(orig_fd.fileno()), space.wrap(socket.AF_INET), space.wrap(socket.SOCK_STREAM), space.wrap(0)], - """(_socket, fd, family, type, proto): + """(_socket, fd, family, type, proto): return _socket.fromfd(fd, family, type, proto)""") assert space.unwrap(space.call_method(fd, 'fileno')) @@ -326,7 +326,7 @@ def test_ntoa_exception(self): import _socket - raises(_socket.error, _socket.inet_ntoa, "ab") + raises(_socket.error, _socket.inet_ntoa, b"ab") def test_aton_exceptions(self): import _socket @@ -418,7 +418,7 @@ # it if there is no connection. try: s.connect(("www.python.org", 80)) - except _socket.gaierror, ex: + except _socket.gaierror as ex: skip("GAIError - probably no connection: %s" % str(ex.args)) name = s.getpeername() # Will raise socket.error if not connected assert name[1] == 80 @@ -465,7 +465,7 @@ sizes = {socket.htonl: 32, socket.ntohl: 32, socket.htons: 16, socket.ntohs: 16} for func, size in sizes.items(): - mask = (1L<f_lineno = 48; /* Does not work with CPython */ @@ -51,6 +52,7 @@ Py_XDECREF(empty_string); Py_XDECREF(empty_tuple); Py_XDECREF(py_globals); + Py_XDECREF(py_locals); Py_XDECREF(py_code); Py_XDECREF(py_frame); return NULL; diff --git a/pypy/module/cpyext/test/test_typeobject.py b/pypy/module/cpyext/test/test_typeobject.py --- a/pypy/module/cpyext/test/test_typeobject.py +++ b/pypy/module/cpyext/test/test_typeobject.py @@ -374,6 +374,11 @@ module = self.import_extension('foo', [ ("test_type", "METH_O", ''' + /* "args->ob_type" is a strange way to get at 'type', + which should have a different tp_getattro/tp_setattro + than its tp_base, which is 'object'. + */ + if (!args->ob_type->tp_setattro) { PyErr_SetString(PyExc_ValueError, "missing tp_setattro"); @@ -382,8 +387,12 @@ if (args->ob_type->tp_setattro == args->ob_type->tp_base->tp_setattro) { - PyErr_SetString(PyExc_ValueError, "recursive tp_setattro"); - return NULL; + /* Note that unlike CPython, in PyPy 'type.tp_setattro' + is the same function as 'object.tp_setattro'. This + test used to check that it was not, but that was an + artifact of the bootstrap logic only---in the final + C sources I checked and they are indeed the same. + So we ignore this problem here. */ } if (!args->ob_type->tp_getattro) { diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -146,7 +146,7 @@ assert len(slot_names) == 2 struct = getattr(pto, slot_names[0]) if not struct: - assert not space.config.translating + #assert not space.config.translating assert not pto.c_tp_flags & Py_TPFLAGS_HEAPTYPE if slot_names[0] == 'c_tp_as_number': STRUCT_TYPE = PyNumberMethods @@ -310,55 +310,6 @@ realize=type_realize, dealloc=type_dealloc) - # some types are difficult to create because of cycles. - # - object.ob_type = type - # - type.ob_type = type - # - tuple.ob_type = type - # - type.tp_base = object - # - tuple.tp_base = object - # - type.tp_bases is a tuple - # - object.tp_bases is a tuple - # - tuple.tp_bases is a tuple - - # insert null placeholders to please create_ref() - track_reference(space, lltype.nullptr(PyObject.TO), space.w_type) - track_reference(space, lltype.nullptr(PyObject.TO), space.w_object) - track_reference(space, lltype.nullptr(PyObject.TO), space.w_tuple) - track_reference(space, lltype.nullptr(PyObject.TO), space.w_str) - - # create the objects - py_type = create_ref(space, space.w_type) - py_object = create_ref(space, space.w_object) - py_tuple = create_ref(space, space.w_tuple) - py_str = create_ref(space, space.w_str) - # XXX py_str is not initialized here correctly, because we are - # not tracking it, it gets an empty c_ob_type from py_basestring - - # form cycles - pto_type = rffi.cast(PyTypeObjectPtr, py_type) - py_type.c_ob_type = pto_type - py_object.c_ob_type = pto_type - py_tuple.c_ob_type = pto_type - - pto_object = rffi.cast(PyTypeObjectPtr, py_object) - pto_type.c_tp_base = pto_object - pto_tuple = rffi.cast(PyTypeObjectPtr, py_tuple) - pto_tuple.c_tp_base = pto_object - - pto_type.c_tp_bases.c_ob_type = pto_tuple - pto_object.c_tp_bases.c_ob_type = pto_tuple - pto_tuple.c_tp_bases.c_ob_type = pto_tuple - - for typ in (py_type, py_object, py_tuple, py_str): - heaptype = rffi.cast(PyHeapTypeObject, typ) - heaptype.c_ht_name.c_ob_type = pto_type - - # Restore the mapping - track_reference(space, py_type, space.w_type, replace=True) - track_reference(space, py_object, space.w_object, replace=True) - track_reference(space, py_tuple, space.w_tuple, replace=True) - track_reference(space, py_str, space.w_str, replace=True) - @cpython_api([PyObject], lltype.Void, external=False) def subtype_dealloc(space, obj): @@ -476,6 +427,8 @@ pto.c_tp_as_sequence = heaptype.c_as_sequence pto.c_tp_as_mapping = heaptype.c_as_mapping pto.c_tp_as_buffer = heaptype.c_as_buffer + pto.c_tp_basicsize = -1 # hopefully this makes malloc bail out + pto.c_tp_itemsize = 0 return rffi.cast(PyObject, heaptype) @@ -511,8 +464,6 @@ pto.c_tp_name = PyString_AsString(space, heaptype.c_ht_name) else: pto.c_tp_name = rffi.str2charp(w_type.name) - pto.c_tp_basicsize = -1 # hopefully this makes malloc bail out - pto.c_tp_itemsize = 0 # uninitialized fields: # c_tp_print, c_tp_getattr, c_tp_setattr # XXX implement @@ -520,8 +471,11 @@ w_base = best_base(space, w_type.bases_w) pto.c_tp_base = rffi.cast(PyTypeObjectPtr, make_ref(space, w_base)) - finish_type_1(space, pto) - finish_type_2(space, pto, w_type) + if hasattr(space, '_cpyext_type_init'): + space._cpyext_type_init.append((pto, w_type)) + else: + finish_type_1(space, pto) + finish_type_2(space, pto, w_type) pto.c_tp_basicsize = rffi.sizeof(typedescr.basestruct) if pto.c_tp_base: diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -116,7 +116,7 @@ def _find_map_attr(self, name, index): while isinstance(self, PlainAttribute): - if name == self.name and index == self.index: + if index == self.index and name == self.name: return self self = self.back return None @@ -168,7 +168,6 @@ jit.isconstant(name) and jit.isconstant(index)) def add_attr(self, obj, name, index, w_value): - # grumble, jit needs this attr = self._get_new_attr(name, index) oldattr = obj._get_mapdict_map() if not jit.we_are_jitted(): @@ -305,7 +304,7 @@ new_obj._get_mapdict_map().add_attr(new_obj, self.name, self.index, w_value) def delete(self, obj, name, index): - if name == self.name and index == self.index: + if index == self.index and name == self.name: # ok, attribute is deleted if not self.ever_mutated: self.ever_mutated = True diff --git a/pypy/objspace/std/setobject.py b/pypy/objspace/std/setobject.py --- a/pypy/objspace/std/setobject.py +++ b/pypy/objspace/std/setobject.py @@ -942,7 +942,7 @@ return False if w_set.length() == 0: return True - # it's possible to have 0-lenght strategy that's not empty + # it's possible to have 0-length strategy that's not empty if w_set.strategy is w_other.strategy: return self._issubset_unwrapped(w_set, w_other) if not self.may_contain_equal_elements(w_other.strategy): diff --git a/pypy/test_all.py b/pypy/test_all.py --- a/pypy/test_all.py +++ b/pypy/test_all.py @@ -26,11 +26,10 @@ #Add toplevel repository dir to sys.path sys.path.insert(0,os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) import pytest - import pytest_cov if sys.platform == 'win32': #Try to avoid opeing a dialog box if one of the tests causes a system error # We do this in runner.py, but buildbots run twisted which ruins inheritance - # in windows subprocesses. + # in windows subprocesses. import ctypes winapi = ctypes.windll.kernel32 SetErrorMode = winapi.SetErrorMode @@ -44,4 +43,4 @@ old_mode = SetErrorMode(flags) SetErrorMode(old_mode | flags) - sys.exit(pytest.main(plugins=[pytest_cov])) + sys.exit(pytest.main()) diff --git a/pytest_cov.py b/pytest_cov.py deleted file mode 100644 --- a/pytest_cov.py +++ /dev/null @@ -1,353 +0,0 @@ -"""produce code coverage reports using the 'coverage' package, including support for distributed testing. - -This plugin produces coverage reports. It supports centralised testing and distributed testing in -both load and each modes. It also supports coverage of subprocesses. - -All features offered by the coverage package should be available, either through pytest-cov or -through coverage's config file. - - -Installation ------------- - -The `pytest-cov`_ package may be installed with pip or easy_install:: - - pip install pytest-cov - easy_install pytest-cov - -.. _`pytest-cov`: http://pypi.python.org/pypi/pytest-cov/ - - -Uninstallation --------------- - -Uninstalling packages is supported by pip:: - - pip uninstall pytest-cov - -However easy_install does not provide an uninstall facility. - -.. IMPORTANT:: - - Ensure that you manually delete the init_cov_core.pth file in your site-packages directory. - - This file starts coverage collection of subprocesses if appropriate during site initialisation - at python startup. - - -Usage ------ - -Centralised Testing -~~~~~~~~~~~~~~~~~~~ - -Centralised testing will report on the combined coverage of the main process and all of it's -subprocesses. - -Running centralised testing:: - - py.test --cov myproj tests/ - -Shows a terminal report:: - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Distributed Testing: Load -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Distributed testing with dist mode set to load will report on the combined coverage of all slaves. -The slaves may be spread out over any number of hosts and each slave may be located anywhere on the -file system. Each slave will have it's subprocesses measured. - -Running distributed testing with dist mode set to load:: - - py.test --cov myproj -n 2 tests/ - -Shows a terminal report:: - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Again but spread over different hosts and different directories:: - - py.test --cov myproj --dist load - --tx ssh=memedough at host1//chdir=testenv1 - --tx ssh=memedough at host2//chdir=/tmp/testenv2//python=/tmp/env1/bin/python - --rsyncdir myproj --rsyncdir tests --rsync examples - tests/ - -Shows a terminal report:: - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Distributed Testing: Each -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Distributed testing with dist mode set to each will report on the combined coverage of all slaves. -Since each slave is running all tests this allows generating a combined coverage report for multiple -environments. - -Running distributed testing with dist mode set to each:: - - py.test --cov myproj --dist each - --tx popen//chdir=/tmp/testenv3//python=/usr/local/python27/bin/python - --tx ssh=memedough at host2//chdir=/tmp/testenv4//python=/tmp/env2/bin/python - --rsyncdir myproj --rsyncdir tests --rsync examples - tests/ - -Shows a terminal report:: - - ---------------------------------------- coverage ---------------------------------------- - platform linux2, python 2.6.5-final-0 - platform linux2, python 2.7.0-final-0 - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -Reporting ---------- - -It is possible to generate any combination of the reports for a single test run. - -The available reports are terminal (with or without missing line numbers shown), HTML, XML and -annotated source code. - -The terminal report without line numbers (default):: - - py.test --cov-report term --cov myproj tests/ - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover - ---------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% - myproj/feature4286 94 7 92% - ---------------------------------------- - TOTAL 353 20 94% - - -The terminal report with line numbers:: - - py.test --cov-report term-missing --cov myproj tests/ - - -------------------- coverage: platform linux2, python 2.6.4-final-0 --------------------- - Name Stmts Miss Cover Missing - -------------------------------------------------- - myproj/__init__ 2 0 100% - myproj/myproj 257 13 94% 24-26, 99, 149, 233-236, 297-298, 369-370 - myproj/feature4286 94 7 92% 183-188, 197 - -------------------------------------------------- - TOTAL 353 20 94% - - -The remaining three reports output to files without showing anything on the terminal (useful for -when the output is going to a continuous integration server):: - - py.test --cov-report html - --cov-report xml - --cov-report annotate - --cov myproj tests/ - - -Coverage Data File ------------------- - -The data file is erased at the beginning of testing to ensure clean data for each test run. - -The data file is left at the end of testing so that it is possible to use normal coverage tools to -examine it. - - -Coverage Config File --------------------- - -This plugin provides a clean minimal set of command line options that are added to pytest. For -further control of coverage use a coverage config file. - -For example if tests are contained within the directory tree being measured the tests may be -excluded if desired by using a .coveragerc file with the omit option set:: - - py.test --cov-config .coveragerc - --cov myproj - myproj/tests/ - -Where the .coveragerc file contains file globs:: - - [run] - omit = tests/* - -For full details refer to the `coverage config file`_ documentation. - -.. _`coverage config file`: http://nedbatchelder.com/code/coverage/config.html - -Note that this plugin controls some options and setting the option in the config file will have no -effect. These include specifying source to be measured (source option) and all data file handling -(data_file and parallel options). - - -Limitations ------------ - -For distributed testing the slaves must have the pytest-cov package installed. This is needed since -the plugin must be registered through setuptools / distribute for pytest to start the plugin on the -slave. - -For subprocess measurement environment variables must make it from the main process to the -subprocess. The python used by the subprocess must have pytest-cov installed. The subprocess must -do normal site initialisation so that the environment variables can be detected and coverage -started. - - -Acknowledgements ----------------- - -Whilst this plugin has been built fresh from the ground up it has been influenced by the work done -on pytest-coverage (Ross Lawley, James Mills, Holger Krekel) and nose-cover (Jason Pellerin) which are -other coverage plugins. - -Ned Batchelder for coverage and its ability to combine the coverage results of parallel runs. - -Holger Krekel for pytest with its distributed testing support. - -Jason Pellerin for nose. - -Michael Foord for unittest2. - -No doubt others have contributed to these tools as well. -""" - - -def pytest_addoption(parser): - """Add options to control coverage.""" - - group = parser.getgroup('coverage reporting with distributed testing support') - group.addoption('--cov', action='append', default=[], metavar='path', - dest='cov_source', - help='measure coverage for filesystem path (multi-allowed)') - group.addoption('--cov-report', action='append', default=[], metavar='type', - choices=['term', 'term-missing', 'annotate', 'html', 'xml'], - dest='cov_report', - help='type of report to generate: term, term-missing, annotate, html, xml (multi-allowed)') - group.addoption('--cov-config', action='store', default='.coveragerc', metavar='path', - dest='cov_config', - help='config file for coverage, default: .coveragerc') - - -def pytest_configure(config): - """Activate coverage plugin if appropriate.""" - - if config.getvalue('cov_source'): - config.pluginmanager.register(CovPlugin(), '_cov') - - -class CovPlugin(object): - """Use coverage package to produce code coverage reports. - - Delegates all work to a particular implementation based on whether - this test process is centralised, a distributed master or a - distributed slave. - """ - - def __init__(self): - """Creates a coverage pytest plugin. - - We read the rc file that coverage uses to get the data file - name. This is needed since we give coverage through it's API - the data file name. - """ - - # Our implementation is unknown at this time. - self.cov_controller = None - - def pytest_sessionstart(self, session): - """At session start determine our implementation and delegate to it.""" - - import cov_core - - cov_source = session.config.getvalue('cov_source') - cov_report = session.config.getvalue('cov_report') or ['term'] - cov_config = session.config.getvalue('cov_config') - - session_name = session.__class__.__name__ - is_master = (session.config.pluginmanager.hasplugin('dsession') or - session_name == 'DSession') - is_slave = (hasattr(session.config, 'slaveinput') or - session_name == 'SlaveSession') - nodeid = None - - if is_master: - controller_cls = cov_core.DistMaster - elif is_slave: - controller_cls = cov_core.DistSlave - nodeid = session.config.slaveinput.get('slaveid', getattr(session, 'nodeid')) - else: - controller_cls = cov_core.Central - - self.cov_controller = controller_cls(cov_source, - cov_report, - cov_config, - session.config, - nodeid) - - self.cov_controller.start() - - def pytest_configure_node(self, node): - """Delegate to our implementation.""" - - self.cov_controller.configure_node(node) - pytest_configure_node.optionalhook = True - - def pytest_testnodedown(self, node, error): - """Delegate to our implementation.""" - - self.cov_controller.testnodedown(node, error) - pytest_testnodedown.optionalhook = True - - def pytest_sessionfinish(self, session, exitstatus): - """Delegate to our implementation.""" - - self.cov_controller.finish() - - def pytest_terminal_summary(self, terminalreporter): - """Delegate to our implementation.""" - - self.cov_controller.summary(terminalreporter._tw) - - -def pytest_funcarg__cov(request): - """A pytest funcarg that provides access to the underlying coverage object.""" - - # Check with hasplugin to avoid getplugin exception in older pytest. - if request.config.pluginmanager.hasplugin('_cov'): - plugin = request.config.pluginmanager.getplugin('_cov') - if plugin.cov_controller: - return plugin.cov_controller.cov - return None diff --git a/rpython/annotator/builtin.py b/rpython/annotator/builtin.py --- a/rpython/annotator/builtin.py +++ b/rpython/annotator/builtin.py @@ -39,8 +39,9 @@ return s_result s_realresult = immutablevalue(realresult) if not s_result.contains(s_realresult): - raise Exception("%s%r returned %r, which is not contained in %s" % ( - func, args, realresult, s_result)) + raise AnnotatorError( + "%s%r returned %r, which is not contained in %s" % ( + func, args, realresult, s_result)) return s_realresult # ____________________________________________________________ @@ -56,14 +57,14 @@ s_start, s_stop = args[:2] s_step = args[2] else: - raise Exception("range() takes 1 to 3 arguments") + raise AnnotatorError("range() takes 1 to 3 arguments") empty = False # so far if not s_step.is_constant(): step = 0 # this case signals a variable step else: step = s_step.const if step == 0: - raise Exception("range() with step zero") + raise AnnotatorError("range() with step zero") if s_start.is_constant() and s_stop.is_constant(): try: if len(xrange(s_start.const, s_stop.const, step)) == 0: @@ -285,7 +286,8 @@ else: @analyzer_for(unicodedata.decimal) def unicodedata_decimal(s_uchr): - raise TypeError("unicodedate.decimal() calls should not happen at interp-level") + raise AnnotatorError( + "unicodedate.decimal() calls should not happen at interp-level") @analyzer_for(OrderedDict) def analyze(): @@ -299,9 +301,9 @@ @analyzer_for(weakref.ref) def weakref_ref(s_obj): if not isinstance(s_obj, SomeInstance): - raise Exception("cannot take a weakref to %r" % (s_obj,)) + raise AnnotatorError("cannot take a weakref to %r" % (s_obj,)) if s_obj.can_be_None: - raise Exception("should assert that the instance we take " + raise AnnotatorError("should assert that the instance we take " "a weakref to cannot be None") return SomeWeakRef(s_obj.classdef) @@ -311,3 +313,14 @@ @analyzer_for(rpython.rlib.objectmodel.free_non_gc_object) def robjmodel_free_non_gc_object(obj): pass + +#________________________________ +# pdb + +import pdb + + at analyzer_for(pdb.set_trace) +def pdb_set_trace(*args_s): + raise AnnotatorError( + "you left pdb.set_trace() in your interpreter! " + "If you want to attach a gdb instead, call rlib.debug.attach_gdb()") diff --git a/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/test/zrpy_vmprof_test.py @@ -0,0 +1,86 @@ + +import os, py +from rpython.jit.backend.test.support import CCompiledMixin +from rpython.rlib.jit import JitDriver +from rpython.tool.udir import udir +from rpython.translator.translator import TranslationContext +from rpython.jit.backend.detect_cpu import getcpuclass + +class CompiledVmprofTest(CCompiledMixin): + CPUClass = getcpuclass() + + def _get_TranslationContext(self): + t = TranslationContext() + t.config.translation.gc = 'incminimark' + t.config.translation.list_comprehension_operations = True + return t + + def test_vmprof(self): + from rpython.rlib import rvmprof + + class MyCode: + _vmprof_unique_id = 0 + def __init__(self, name): + self.name = name + + def get_name(code): + return code.name + + code2 = MyCode("py:y:foo:4") + rvmprof.register_code(code2, get_name) + + try: + rvmprof.register_code_object_class(MyCode, get_name) + except rvmprof.VMProfPlatformUnsupported, e: + py.test.skip(str(e)) + + def get_unique_id(code): + return rvmprof.get_unique_id(code) + + driver = JitDriver(greens = ['code'], reds = ['i', 's', 'num'], + is_recursive=True, get_unique_id=get_unique_id) + + @rvmprof.vmprof_execute_code("xcode13", lambda code, num: code) + def main(code, num): + return main_jitted(code, num) + + def main_jitted(code, num): + s = 0 + i = 0 + while i < num: + driver.jit_merge_point(code=code, i=i, s=s, num=num) + s += (i << 1) + if i % 3 == 0 and code is not code2: + main(code2, 100) + i += 1 + return s + + tmpfilename = str(udir.join('test_rvmprof')) + + def f(num): + code = MyCode("py:x:foo:3") + rvmprof.register_code(code, get_name) + fd = os.open(tmpfilename, os.O_WRONLY | os.O_CREAT, 0666) + period = 0.0001 + rvmprof.enable(fd, period) + res = main(code, num) + #assert res == 499999500000 + rvmprof.disable() + os.close(fd) + return 0 + + def check_vmprof_output(): + from vmprof import read_profile + tmpfile = str(udir.join('test_rvmprof')) + stats = read_profile(tmpfile) + t = stats.get_tree() + assert t.name == 'py:x:foo:3' + assert len(t.children) == 1 # jit + + self.meta_interp(f, [1000000], inline=True) + try: + import vmprof + except ImportError: + pass + else: + check_vmprof_output() \ No newline at end of file diff --git a/rpython/jit/backend/test/test_rvmprof.py b/rpython/jit/backend/test/test_rvmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/test/test_rvmprof.py @@ -0,0 +1,49 @@ +import py +from rpython.rlib import jit +from rpython.rtyper.annlowlevel import llhelper +from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.rlib.rvmprof import cintf +from rpython.jit.backend.x86.arch import WORD +from rpython.jit.codewriter.policy import JitPolicy + +class BaseRVMProfTest(object): + def test_one(self): + py.test.skip("needs thread-locals in the JIT, which is only available " + "after translation") + visited = [] + + def helper(): + stack = cintf.vmprof_tl_stack.getraw() + if stack: + # not during tracing + visited.append(stack.c_value) + else: + visited.append(0) + + llfn = llhelper(lltype.Ptr(lltype.FuncType([], lltype.Void)), helper) + + driver = jit.JitDriver(greens=[], reds='auto') + + def f(n): + i = 0 + while i < n: + driver.jit_merge_point() + i += 1 + llfn() + + class Hooks(jit.JitHookInterface): + def after_compile(self, debug_info): + self.raw_start = debug_info.asminfo.rawstart + + hooks = Hooks() + + null = lltype.nullptr(cintf.VMPROFSTACK) + cintf.vmprof_tl_stack.setraw(null) # make it empty + self.meta_interp(f, [10], policy=JitPolicy(hooks)) + v = set(visited) + assert 0 in v + v.remove(0) + assert len(v) == 1 + assert 0 <= list(v)[0] - hooks.raw_start <= 10*1024 + assert cintf.vmprof_tl_stack.getraw() == null + # ^^^ make sure we didn't leave anything dangling diff --git a/rpython/jit/backend/x86/arch.py b/rpython/jit/backend/x86/arch.py --- a/rpython/jit/backend/x86/arch.py +++ b/rpython/jit/backend/x86/arch.py @@ -34,7 +34,7 @@ if WORD == 4: # ebp + ebx + esi + edi + 15 extra words = 19 words - FRAME_FIXED_SIZE = 19 + FRAME_FIXED_SIZE = 19 + 4 # 4 for vmprof, XXX make more compact! PASS_ON_MY_FRAME = 15 JITFRAME_FIXED_SIZE = 6 + 8 * 2 # 6 GPR + 8 XMM * 2 WORDS/float # 'threadlocal_addr' is passed as 2nd argument on the stack, @@ -44,7 +44,7 @@ THREADLOCAL_OFS = (FRAME_FIXED_SIZE + 2) * WORD else: # rbp + rbx + r12 + r13 + r14 + r15 + threadlocal + 12 extra words = 19 - FRAME_FIXED_SIZE = 19 + FRAME_FIXED_SIZE = 19 + 4 # 4 for vmprof, XXX make more compact! PASS_ON_MY_FRAME = 12 JITFRAME_FIXED_SIZE = 28 # 13 GPR + 15 XMM # 'threadlocal_addr' is passed as 2nd argument in %esi, diff --git a/rpython/jit/backend/x86/assembler.py b/rpython/jit/backend/x86/assembler.py --- a/rpython/jit/backend/x86/assembler.py +++ b/rpython/jit/backend/x86/assembler.py @@ -11,7 +11,7 @@ from rpython.jit.metainterp.compile import ResumeGuardDescr from rpython.rtyper.lltypesystem import lltype, rffi, rstr, llmemory from rpython.rtyper.lltypesystem.lloperation import llop -from rpython.rtyper.annlowlevel import llhelper, cast_instance_to_gcref +from rpython.rtyper.annlowlevel import cast_instance_to_gcref from rpython.rtyper import rclass from rpython.rlib.jit import AsmInfo from rpython.jit.backend.model import CompiledLoopToken @@ -999,11 +999,56 @@ else: return FRAME_FIXED_SIZE + def _call_header_vmprof(self): + from rpython.rlib.rvmprof.rvmprof import cintf, VMPROF_JITTED_TAG + + # tloc = address of pypy_threadlocal_s + if IS_X86_32: + # Can't use esi here, its old value is not saved yet. + # But we can use eax and ecx. + self.mc.MOV_rs(edx.value, THREADLOCAL_OFS) + tloc = edx + old = ecx + else: + # The thread-local value is already in esi. + # We should avoid if possible to use ecx or edx because they + # would be used to pass arguments #3 and #4 (even though, so + # far, the assembler only receives two arguments). + tloc = esi + old = r11 + # eax = address in the stack of a 3-words struct vmprof_stack_s + self.mc.LEA_rs(eax.value, (FRAME_FIXED_SIZE - 4) * WORD) + # old = current value of vmprof_tl_stack + offset = cintf.vmprof_tl_stack.getoffset() + self.mc.MOV_rm(old.value, (tloc.value, offset)) + # eax->next = old + self.mc.MOV_mr((eax.value, 0), old.value) + # eax->value = my esp + self.mc.MOV_mr((eax.value, WORD), esp.value) + # eax->kind = VMPROF_JITTED_TAG + self.mc.MOV_mi((eax.value, WORD * 2), VMPROF_JITTED_TAG) + # save in vmprof_tl_stack the new eax + self.mc.MOV_mr((tloc.value, offset), eax.value) + + def _call_footer_vmprof(self): + from rpython.rlib.rvmprof.rvmprof import cintf + # edx = address of pypy_threadlocal_s + self.mc.MOV_rs(edx.value, THREADLOCAL_OFS) + self.mc.AND_ri(edx.value, ~1) + # eax = (our local vmprof_tl_stack).next + self.mc.MOV_rs(eax.value, (FRAME_FIXED_SIZE - 4 + 0) * WORD) + # save in vmprof_tl_stack the value eax + offset = cintf.vmprof_tl_stack.getoffset() + self.mc.MOV_mr((edx.value, offset), eax.value) + def _call_header(self): self.mc.SUB_ri(esp.value, self._get_whole_frame_size() * WORD) self.mc.MOV_sr(PASS_ON_MY_FRAME * WORD, ebp.value) if IS_X86_64: self.mc.MOV_sr(THREADLOCAL_OFS, esi.value) + if not self.cpu.gc_ll_descr.stm and self.cpu.translate_support_code: + self._call_header_vmprof() # on X86_64, this uses esi + if IS_X86_64: self.mc.MOV_rr(ebp.value, edi.value) else: self.mc.MOV_rs(ebp.value, (self._get_whole_frame_size() + 1) * WORD) @@ -1041,6 +1086,8 @@ self._call_footer_shadowstack() # the return value is the jitframe + if not self.cpu.gc_ll_descr.stm and self.cpu.translate_support_code: + self._call_footer_vmprof() self.mc.MOV_rr(eax.value, ebp.value) for i in range(len(self.cpu.CALLEE_SAVE_REGISTERS)-1, -1, -1): diff --git a/rpython/jit/backend/x86/test/test_rvmprof.py b/rpython/jit/backend/x86/test/test_rvmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/x86/test/test_rvmprof.py @@ -0,0 +1,7 @@ + +import py +from rpython.jit.backend.test.test_rvmprof import BaseRVMProfTest +from rpython.jit.backend.x86.test.test_basic import Jit386Mixin + +class TestFfiCall(Jit386Mixin, BaseRVMProfTest): + pass \ No newline at end of file diff --git a/rpython/jit/backend/x86/test/test_zrpy_vmprof.py b/rpython/jit/backend/x86/test/test_zrpy_vmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/x86/test/test_zrpy_vmprof.py @@ -0,0 +1,7 @@ + +from rpython.jit.backend.llsupport.test.zrpy_vmprof_test import CompiledVmprofTest + +class TestZVMprof(CompiledVmprofTest): + + gcrootfinder = "shadowstack" + gc = "incminimark" \ No newline at end of file diff --git a/rpython/jit/backend/x86/test/test_zvmprof.py b/rpython/jit/backend/x86/test/test_zvmprof.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/x86/test/test_zvmprof.py @@ -0,0 +1,7 @@ + +from rpython.jit.backend.llsupport.test.zrpy_vmprof_test import CompiledVmprofTest + +class TestZVMprof(CompiledVmprofTest): + + gcrootfinder = "shadowstack" + gc = "incminimark" \ No newline at end of file diff --git a/rpython/jit/codewriter/test/test_jtransform.py b/rpython/jit/codewriter/test/test_jtransform.py --- a/rpython/jit/codewriter/test/test_jtransform.py +++ b/rpython/jit/codewriter/test/test_jtransform.py @@ -1344,7 +1344,7 @@ tlfield = ThreadLocalField(lltype.Signed, 'foobar_test_', loop_invariant=loop_inv) OS_THREADLOCALREF_GET = effectinfo.EffectInfo.OS_THREADLOCALREF_GET - c = const(tlfield.offset) + c = const(tlfield.getoffset()) v = varoftype(lltype.Signed) op = SpaceOperation('threadlocalref_get', [c], v) cc = FakeBuiltinCallControl() diff --git a/rpython/jit/metainterp/quasiimmut.py b/rpython/jit/metainterp/quasiimmut.py --- a/rpython/jit/metainterp/quasiimmut.py +++ b/rpython/jit/metainterp/quasiimmut.py @@ -51,6 +51,7 @@ class QuasiImmut(object): llopaque = True compress_limit = 30 + looptokens_wrefs = None def __init__(self, cpu): self.cpu = cpu @@ -75,7 +76,7 @@ def compress_looptokens_list(self): self.looptokens_wrefs = [wref for wref in self.looptokens_wrefs if wref() is not None] - # NB. we must keep around the looptoken_wrefs that are + # NB. we must keep around the looptokens_wrefs that are # already invalidated; see below self.compress_limit = (len(self.looptokens_wrefs) + 15) * 2 @@ -83,6 +84,9 @@ # When this is called, all the loops that we record become # invalid: all GUARD_NOT_INVALIDATED in these loops (and # in attached bridges) must now fail. + if self.looptokens_wrefs is None: + # can't happen, but helps compiled tests + return wrefs = self.looptokens_wrefs self.looptokens_wrefs = [] for wref in wrefs: diff --git a/rpython/jit/metainterp/test/test_jitdriver.py b/rpython/jit/metainterp/test/test_jitdriver.py --- a/rpython/jit/metainterp/test/test_jitdriver.py +++ b/rpython/jit/metainterp/test/test_jitdriver.py @@ -193,7 +193,7 @@ return pc + 1 driver = JitDriver(greens=["pc"], reds='auto', - get_unique_id=get_unique_id) + get_unique_id=get_unique_id, is_recursive=True) def f(arg): i = 0 diff --git a/rpython/jit/metainterp/test/test_recursive.py b/rpython/jit/metainterp/test/test_recursive.py --- a/rpython/jit/metainterp/test/test_recursive.py +++ b/rpython/jit/metainterp/test/test_recursive.py @@ -1312,7 +1312,7 @@ return (code + 1) * 2 driver = JitDriver(greens=["pc", "code"], reds='auto', - get_unique_id=get_unique_id) + get_unique_id=get_unique_id, is_recursive=True) def f(pc, code): i = 0 diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py --- a/rpython/rlib/jit.py +++ b/rpython/rlib/jit.py @@ -624,6 +624,8 @@ raise AttributeError("no 'greens' or 'reds' supplied") if virtualizables is not None: self.virtualizables = virtualizables + if get_unique_id is not None: + assert is_recursive, "get_unique_id and is_recursive must be specified at the same time" for v in self.virtualizables: assert v in self.reds # if reds are automatic, they won't be passed to jit_merge_point, so diff --git a/rpython/rlib/rthread.py b/rpython/rlib/rthread.py --- a/rpython/rlib/rthread.py +++ b/rpython/rlib/rthread.py @@ -320,7 +320,7 @@ offset = CDefinedIntSymbolic('RPY_TLOFS_%s' % self.fieldname, default='?') offset.loop_invariant = loop_invariant - self.offset = offset + self._offset = offset # for STM only PSTRUCTTYPE = _field2structptr(FIELDTYPE) @@ -387,7 +387,7 @@ ThreadLocalField.__init__(self, lltype.Signed, 'tlref%d' % unique_id, loop_invariant=loop_invariant) setraw = self.setraw - offset = self.offset + offset = self._offset def get(): if we_are_translated(): diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -5,41 +5,41 @@ from rpython.rtyper.lltypesystem import lltype, llmemory, rffi from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.rtyper.tool import rffi_platform as platform +from rpython.rlib import rthread from rpython.jit.backend import detect_cpu class VMProfPlatformUnsupported(Exception): pass +ROOT = py.path.local(rpythonroot).join('rpython', 'rlib', 'rvmprof') +SRC = ROOT.join('src') + +if sys.platform.startswith('linux'): + _libs = ['dl'] +else: + _libs = [] +eci_kwds = dict( + include_dirs = [SRC], + includes = ['rvmprof.h'], + libraries = _libs, + separate_module_files = [SRC.join('rvmprof.c')], + post_include_bits=['#define RPYTHON_VMPROF\n'], + ) +global_eci = ExternalCompilationInfo(**eci_kwds) + + def setup(): if not detect_cpu.autodetect().startswith(detect_cpu.MODEL_X86_64): raise VMProfPlatformUnsupported("rvmprof only supports" " x86-64 CPUs for now") - - ROOT = py.path.local(rpythonroot).join('rpython', 'rlib', 'rvmprof') - SRC = ROOT.join('src') - - - if sys.platform.startswith('linux'): - libs = ['dl'] - else: - libs = [] - - eci_kwds = dict( - include_dirs = [SRC], - includes = ['rvmprof.h'], - libraries = libs, - separate_module_files = [SRC.join('rvmprof.c')], - post_include_bits=['#define RPYTHON_VMPROF\n'], - ) - eci = ExternalCompilationInfo(**eci_kwds) - platform.verify_eci(ExternalCompilationInfo( compile_extra=['-DRPYTHON_LL2CTYPES'], **eci_kwds)) + eci = global_eci vmprof_init = rffi.llexternal("vmprof_init", [rffi.INT, rffi.DOUBLE, rffi.CCHARP], rffi.CCHARP, compilation_info=eci) @@ -55,7 +55,8 @@ rffi.INT, compilation_info=eci) vmprof_ignore_signals = rffi.llexternal("vmprof_ignore_signals", [rffi.INT], lltype.Void, - compilation_info=eci) + compilation_info=eci, + _nowrapper=True) return CInterface(locals()) @@ -67,112 +68,34 @@ def _freeze_(self): return True -def token2lltype(tok): - if tok == 'i': - return lltype.Signed - if tok == 'r': - return llmemory.GCREF - raise NotImplementedError(repr(tok)) -def make_trampoline_function(name, func, token, restok): - from rpython.jit.backend import detect_cpu +# --- copy a few declarations from src/vmprof_stack.h --- - cont_name = 'rpyvmprof_f_%s_%s' % (name, token) - tramp_name = 'rpyvmprof_t_%s_%s' % (name, token) - orig_tramp_name = tramp_name +VMPROF_CODE_TAG = 1 - func.c_name = cont_name - func._dont_inline_ = True +VMPROFSTACK = lltype.ForwardReference() +PVMPROFSTACK = lltype.Ptr(VMPROFSTACK) +VMPROFSTACK.become(rffi.CStruct("vmprof_stack_s", + ('next', PVMPROFSTACK), + ('value', lltype.Signed), + ('kind', lltype.Signed))) +# ---------- - if sys.platform == 'darwin': - # according to internet "At the time UNIX was written in 1974...." - # "... all C functions are prefixed with _" - cont_name = '_' + cont_name - tramp_name = '_' + tramp_name - PLT = "" - size_decl = "" - type_decl = "" - extra_align = "" - else: - PLT = "@PLT" - type_decl = "\t.type\t%s, @function" % (tramp_name,) - size_decl = "\t.size\t%s, .-%s" % ( - tramp_name, tramp_name) - extra_align = "\t.cfi_def_cfa_offset 8" - assert detect_cpu.autodetect().startswith(detect_cpu.MODEL_X86_64), ( - "rvmprof only supports x86-64 CPUs for now") +vmprof_tl_stack = rthread.ThreadLocalField(PVMPROFSTACK, "vmprof_tl_stack") +do_use_eci = rffi.llexternal_use_eci( + ExternalCompilationInfo(includes=['vmprof_stack.h'], + include_dirs = [SRC])) - # mapping of argument count (not counting the final uid argument) to - # the register that holds this uid argument - reg = {0: '%rdi', - 1: '%rsi', - 2: '%rdx', - 3: '%rcx', - 4: '%r8', - 5: '%r9', - } - try: - reg = reg[len(token)] - except KeyError: - raise NotImplementedError( - "not supported: %r takes more than 5 arguments" % (func,)) +def enter_code(unique_id): + do_use_eci() + s = lltype.malloc(VMPROFSTACK, flavor='raw') + s.c_next = vmprof_tl_stack.get_or_make_raw() + s.c_value = unique_id + s.c_kind = VMPROF_CODE_TAG + vmprof_tl_stack.setraw(s) + return s - target = udir.join('module_cache') - target.ensure(dir=1) - target = target.join('trampoline_%s_%s.vmprof.s' % (name, token)) - # NOTE! the tabs in this file are absolutely essential, things - # that don't start with \t are silently ignored (: WAT!?) - target.write("""\ -\t.text -\t.globl\t%(tramp_name)s -%(type_decl)s -%(tramp_name)s: -\t.cfi_startproc -\tpushq\t%(reg)s -\t.cfi_def_cfa_offset 16 -\tcall %(cont_name)s%(PLT)s -\taddq\t$8, %%rsp -%(extra_align)s -\tret -\t.cfi_endproc -%(size_decl)s -""" % locals()) - - def tok2cname(tok): - if tok == 'i': - return 'long' - if tok == 'r': - return 'void *' - raise NotImplementedError(repr(tok)) - - header = 'RPY_EXTERN %s %s(%s);\n' % ( - tok2cname(restok), - orig_tramp_name, - ', '.join([tok2cname(tok) for tok in token] + ['long'])) - - header += """\ -static int cmp_%s(void *addr) { - if (addr == %s) return 1; -#ifdef VMPROF_ADDR_OF_TRAMPOLINE - return VMPROF_ADDR_OF_TRAMPOLINE(addr); -#undef VMPROF_ADDR_OF_TRAMPOLINE -#else - return 0; -#endif -#define VMPROF_ADDR_OF_TRAMPOLINE cmp_%s -} -""" % (tramp_name, orig_tramp_name, tramp_name) - - eci = ExternalCompilationInfo( - post_include_bits = [header], - separate_module_files = [str(target)], - ) - - return rffi.llexternal( - orig_tramp_name, - [token2lltype(tok) for tok in token] + [lltype.Signed], - token2lltype(restok), - compilation_info=eci, - _nowrapper=True, sandboxsafe=True, - random_effects_on_gcobjs=True) +def leave_code(s): + vmprof_tl_stack.setraw(s.c_next) + lltype.free(s, flavor='raw') diff --git a/rpython/rlib/rvmprof/rvmprof.py b/rpython/rlib/rvmprof/rvmprof.py --- a/rpython/rlib/rvmprof/rvmprof.py +++ b/rpython/rlib/rvmprof/rvmprof.py @@ -4,12 +4,19 @@ from rpython.rlib.rvmprof import cintf from rpython.rtyper.annlowlevel import cast_instance_to_gcref from rpython.rtyper.annlowlevel import cast_base_ptr_to_instance -from rpython.rtyper.lltypesystem import rffi +from rpython.rtyper.lltypesystem import rffi, llmemory +from rpython.rtyper.lltypesystem.lloperation import llop MAX_FUNC_NAME = 1023 # ____________________________________________________________ +# keep in sync with vmprof_stack.h +VMPROF_CODE_TAG = 1 +VMPROF_BLACKHOLE_TAG = 2 +VMPROF_JITTED_TAG = 3 +VMPROF_JITTING_TAG = 4 +VMPROF_GC_TAG = 5 class VMProfError(Exception): def __init__(self, msg): @@ -19,17 +26,16 @@ class VMProf(object): + _immutable_fields_ = ['is_enabled?'] + def __init__(self): "NOT_RPYTHON: use _get_vmprof()" self._code_classes = set() self._gather_all_code_objs = lambda: None self._cleanup_() - if sys.maxint == 2147483647: - self._code_unique_id = 0 # XXX this is wrong, it won't work on 32bit - else: - self._code_unique_id = 0x7000000000000000 + self._code_unique_id = 4 self.cintf = cintf.setup() - + def _cleanup_(self): self.is_enabled = False @@ -127,7 +133,6 @@ if self.cintf.vmprof_register_virtual_function(name, uid, 500000) < 0: raise VMProfError("vmprof buffers full! disk full or too slow") - def vmprof_execute_code(name, get_code_fn, result_class=None): """Decorator to be used on the function that interprets a code object. @@ -136,12 +141,7 @@ 'get_code_fn(*args)' is called to extract the code object from the arguments given to the decorated function. - The original function can return None, an integer, or an instance. - In the latter case (only), 'result_class' must be set. - - NOTE: for now, this assumes that the decorated functions only takes - instances or plain integer arguments, and at most 5 of them - (including 'self' if applicable). + 'result_class' is ignored (backward compatibility). """ def decorate(func): try: @@ -149,52 +149,19 @@ except cintf.VMProfPlatformUnsupported: return func - if hasattr(func, 'im_self'): - assert func.im_self is None - func = func.im_func - - def lower(*args): - if len(args) == 0: - return (), "" - ll_args, token = lower(*args[1:]) - ll_arg = args[0] - if isinstance(ll_arg, int): - tok = "i" - else: - tok = "r" From pypy.commits at gmail.com Wed Feb 24 11:55:38 2016 From: pypy.commits at gmail.com (Raemi) Date: Wed, 24 Feb 2016 08:55:38 -0800 (PST) Subject: [pypy-commit] pypy stmgc-c8: merge and fix tons of things Message-ID: <56cde08a.03321c0a.43247.ffffefdd@mx.google.com> Author: Remi Meier Branch: stmgc-c8 Changeset: r82484:4d0277154891 Date: 2016-02-24 17:34 +0100 http://bitbucket.org/pypy/pypy/changeset/4d0277154891/ Log: merge and fix tons of things diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -123,3 +123,13 @@ .. branch: fix-cpython-ssl-tests-2.7 Fix SSL tests by importing cpython's patch + +.. branch: remove-getfield-pure + +Remove pure variants of ``getfield_gc_*`` operations from the JIT. Relevant +optimizations instead consult the field descriptor to determine the purity of +the operation. Additionally, pure ``getfield`` operations are now handled +entirely by `rpython/jit/metainterp/optimizeopt/heap.py` rather than +`rpython/jit/metainterp/optimizeopt/pure.py`, which can result in better codegen +for traces containing a large number of pure getfield operations. + diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -59,7 +59,7 @@ return None return borrow_from(w_dict, w_res) - at cpython_api([PyObject, rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=-1) def PyDict_DelItemString(space, w_dict, key_ptr): """Remove the entry in dictionary p which has a key specified by the string key. Return 0 on success or -1 on failure.""" diff --git a/pypy/module/cpyext/eval.py b/pypy/module/cpyext/eval.py --- a/pypy/module/cpyext/eval.py +++ b/pypy/module/cpyext/eval.py @@ -128,7 +128,7 @@ filename = "" return run_string(space, source, filename, start, w_globals, w_locals) - at cpython_api([rffi.CCHARP, rffi.INT_real, PyObject, PyObject, + at cpython_api([CONST_STRING, rffi.INT_real, PyObject, PyObject, PyCompilerFlagsPtr], PyObject) def PyRun_StringFlags(space, source, start, w_globals, w_locals, flagsptr): """Execute Python source code from str in the context specified by the @@ -189,7 +189,7 @@ pi[0] = space.getindex_w(w_obj, None) return 1 - at cpython_api([rffi.CCHARP, rffi.CCHARP, rffi.INT_real, PyCompilerFlagsPtr], + at cpython_api([CONST_STRING, CONST_STRING, rffi.INT_real, PyCompilerFlagsPtr], PyObject) def Py_CompileStringFlags(space, source, filename, start, flagsptr): """Parse and compile the Python source code in str, returning the diff --git a/pypy/module/cpyext/pystrtod.py b/pypy/module/cpyext/pystrtod.py --- a/pypy/module/cpyext/pystrtod.py +++ b/pypy/module/cpyext/pystrtod.py @@ -1,6 +1,6 @@ import errno from pypy.interpreter.error import OperationError -from pypy.module.cpyext.api import cpython_api +from pypy.module.cpyext.api import cpython_api, CONST_STRING from pypy.module.cpyext.pyobject import PyObject from rpython.rlib import rdtoa from rpython.rlib import rfloat @@ -22,7 +22,7 @@ rfloat.DIST_NAN: Py_DTST_NAN } - at cpython_api([rffi.CCHARP, rffi.CCHARPP, PyObject], rffi.DOUBLE, error=-1.0) + at cpython_api([CONST_STRING, rffi.CCHARPP, PyObject], rffi.DOUBLE, error=-1.0) @jit.dont_look_inside # direct use of _get_errno() def PyOS_string_to_double(space, s, endptr, w_overflow_exception): """Convert a string s to a double, raising a Python diff --git a/rpython/jit/backend/llsupport/rewrite.py b/rpython/jit/backend/llsupport/rewrite.py --- a/rpython/jit/backend/llsupport/rewrite.py +++ b/rpython/jit/backend/llsupport/rewrite.py @@ -241,7 +241,6 @@ self.emit_gc_store_or_indexed(op, ptr_box, index_box, value_box, fieldsize, itemsize, ofs) elif op.getopnum() in (rop.GETFIELD_GC_I, rop.GETFIELD_GC_F, rop.GETFIELD_GC_R, - rop.GETFIELD_GC_PURE_I, rop.GETFIELD_GC_PURE_F, rop.GETFIELD_GC_PURE_R, rop.GETFIELD_RAW_I, rop.GETFIELD_RAW_F, rop.GETFIELD_RAW_R): ofs, itemsize, sign = unpack_fielddescr(op.getdescr()) ptr_box = op.getarg(0) diff --git a/rpython/jit/backend/llsupport/stmrewrite.py b/rpython/jit/backend/llsupport/stmrewrite.py --- a/rpython/jit/backend/llsupport/stmrewrite.py +++ b/rpython/jit/backend/llsupport/stmrewrite.py @@ -1,7 +1,8 @@ from rpython.jit.backend.llsupport.rewrite import GcRewriterAssembler from rpython.jit.backend.llsupport.descr import ( CallDescr, ArrayOrFieldDescr, unpack_fielddescr) -from rpython.jit.metainterp.resoperation import ResOperation, rop +from rpython.jit.metainterp.resoperation import ( + ResOperation, rop, ResOpWithDescr, OpHelpers) from rpython.jit.metainterp.history import ConstInt from rpython.rlib.objectmodel import specialize from rpython.rlib.debug import (have_debug_prints, debug_start, debug_stop, @@ -129,12 +130,17 @@ newop = GcRewriterAssembler.emit_gc_load_or_indexed( self, op, ptr_box, index_box, itemsize, factor, offset, sign, type) ptr_box = newop.getarg(0) - if (op and not op.is_always_pure() and ptr_box.type == 'r' - and ptr_box not in self.read_barrier_applied - and not self.write_barrier_applied(ptr_box)): - op1 = ResOperation(rop.STM_READ, [ptr_box], None) - self.read_barrier_applied[ptr_box] = None - self.emit_op(op1) + if op: + is_pure = op.is_always_pure() + if not is_pure and isinstance(op, ResOpWithDescr): + is_pure = OpHelpers.is_pure_with_descr(op.getopnum(), op.getdescr()) + if (ptr_box.type == 'r' # not raw + and not is_pure # needs stm_read + and ptr_box not in self.read_barrier_applied + and not self.write_barrier_applied(ptr_box)): + op1 = ResOperation(rop.STM_READ, [ptr_box], None) + self.read_barrier_applied[ptr_box] = None + self.emit_op(op1) return newop diff --git a/rpython/jit/backend/llsupport/test/test_stmrewrite.py b/rpython/jit/backend/llsupport/test/test_stmrewrite.py --- a/rpython/jit/backend/llsupport/test/test_stmrewrite.py +++ b/rpython/jit/backend/llsupport/test/test_stmrewrite.py @@ -1338,7 +1338,7 @@ self.check_rewrite(""" [p1, p3, i1, p4] - p2 = getfield_gc%(pure)s_r(p1, descr=uxdescr) + p2 = getfield_gc_r(p1, descr=uxdescr) i4 = getarrayitem_gc%(pure)s_i(p4, i1, descr=vdescr) jump(p2) """ % d, """ diff --git a/rpython/jit/metainterp/resoperation.py b/rpython/jit/metainterp/resoperation.py --- a/rpython/jit/metainterp/resoperation.py +++ b/rpython/jit/metainterp/resoperation.py @@ -1156,7 +1156,6 @@ 'ARRAYLEN_GC/1d/i', 'STRLEN/1/i', 'STRGETITEM/2/i', - 'GETFIELD_GC_PURE/1d/rfi', 'GETARRAYITEM_GC_PURE/2d/rfi', #'GETFIELD_RAW_PURE/1d/rfi', these two operations not useful and #'GETARRAYITEM_RAW_PURE/2d/fi', dangerous when unrolling speculatively diff --git a/rpython/memory/gctransform/boehm.py b/rpython/memory/gctransform/boehm.py --- a/rpython/memory/gctransform/boehm.py +++ b/rpython/memory/gctransform/boehm.py @@ -77,7 +77,7 @@ def gct_fv_gc_malloc_varsize(self, hop, flags, TYPE, v_length, c_const_size, c_item_size, c_offset_to_length): - # XXX same behavior for zero=True: in theory that's wrong + # XXX same behavior for zero=True: in theory that's wrong if c_offset_to_length is None: v_raw = hop.genop("direct_call", [self.malloc_varsize_no_length_ptr, v_length, @@ -159,6 +159,11 @@ resulttype = lltype.Signed) hop.genop('int_invert', [v_int], resultvar=hop.spaceop.result) + def gcheader_initdata(self, defnode): + hdr = lltype.malloc(self.HDR, immortal=True) + hdr.hash = lltype.identityhash_nocache(defnode.obj._as_ptr()) + return hdr._obj + ########## weakrefs ########## # Boehm: weakref objects are small structures containing only a Boehm diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py --- a/rpython/memory/gctransform/framework.py +++ b/rpython/memory/gctransform/framework.py @@ -427,7 +427,6 @@ s_gcref = SomePtr(llmemory.GCREF) gcdata = self.gcdata - translator = self.translator #use the GC flag to find which malloc method to use #malloc_zero_filled == Ture -> malloc_fixedsize/varsize_clear #malloc_zero_filled == Flase -> malloc_fixedsize/varsize @@ -1615,6 +1614,24 @@ resulttype=llmemory.Address) llops.genop('raw_memclear', [v_adr, v_totalsize]) + def gcheader_initdata(self, defnode): + o = lltype.top_container(defnode.obj) + needs_hash = self.get_prebuilt_hash(o) is not None + hdr = self.gc_header_for(o, needs_hash) + return hdr._obj + + def get_prebuilt_hash(self, obj): + # for prebuilt objects that need to have their hash stored and + # restored. Note that only structures that are StructNodes all + # the way have their hash stored (and not e.g. structs with var- + # sized arrays at the end). 'obj' must be the top_container. + TYPE = lltype.typeOf(obj) + if not isinstance(TYPE, lltype.GcStruct): + return None + if TYPE._is_varsize(): + return None + return getattr(obj, '_hash_cache_', None) + def emit_stm_memclearinit(self, llops, v_size, c_size, c_fixedofs, v_a): assert self.translator.config.translation.stm if c_size is None: diff --git a/rpython/memory/gctransform/refcounting.py b/rpython/memory/gctransform/refcounting.py --- a/rpython/memory/gctransform/refcounting.py +++ b/rpython/memory/gctransform/refcounting.py @@ -285,3 +285,7 @@ resulttype=llmemory.Address) hop.genop("direct_call", [self.identityhash_ptr, v_adr], resultvar=hop.spaceop.result) + + def gcheader_initdata(self, defnode): + top = lltype.top_container(defnode.obj) + return self.gcheaderbuilder.header_of_object(top)._obj diff --git a/rpython/memory/gctransform/stmframework.py b/rpython/memory/gctransform/stmframework.py --- a/rpython/memory/gctransform/stmframework.py +++ b/rpython/memory/gctransform/stmframework.py @@ -284,6 +284,14 @@ hop.genop("cast_opaque_ptr", [v_result], resultvar=op.result) hop.genop("stm_set_into_obj", [v_result, c_ofstolength, v_length]) + def get_prebuilt_hash(self, obj): + return None # done differently with the stmgc + + def get_stm_prebuilt_hash(self, obj): + h = BaseFrameworkGCTransformer.get_prebuilt_hash(self, obj) + if h is None: + h = lltype.identityhash(obj._as_ptr()) + return h diff --git a/rpython/memory/gctransform/test/test_framework.py b/rpython/memory/gctransform/test/test_framework.py --- a/rpython/memory/gctransform/test/test_framework.py +++ b/rpython/memory/gctransform/test/test_framework.py @@ -48,7 +48,7 @@ cbuild = CStandaloneBuilder(t, entrypoint, t.config, gcpolicy=gcpolicy) - db = cbuild.generate_graphs_for_llinterp() + db = cbuild.build_database() entrypointptr = cbuild.getentrypointptr() entrygraph = entrypointptr._obj.graph @@ -129,7 +129,7 @@ t.config.translation.gc = gc cbuild = CStandaloneBuilder(t, entrypoint, t.config, gcpolicy=gcpolicy) - db = cbuild.generate_graphs_for_llinterp() + db = cbuild.build_database() def test_no_collect_stm(): test_no_collect("stmgc") @@ -161,7 +161,8 @@ t.config.translation.gc = gc cbuild = CStandaloneBuilder(t, entrypoint, t.config, gcpolicy=gcpolicy) - f = py.test.raises(Exception, cbuild.generate_graphs_for_llinterp) + with py.test.raises(Exception) as f: + cbuild.build_database() expected = "'no_collect' function can trigger collection: > f, '};' print >> f, 'static long rpy_prebuilt_hashes[] = {' for _, node in gclist: - h = database.gcpolicy.get_stm_prebuilt_hash(node.obj) + h = database.gctransformer.get_stm_prebuilt_hash(node.obj) print >> f, '\t%s,' % (name_signed(h, database),) print >> f, '};' print >> f @@ -967,7 +955,6 @@ # sg = SourceGenerator(database) sg.set_strategy(targetdir, split) - database.prepare_inline_helpers() sg.gen_readable_parts_of_source(f) headers_to_precompile = sg.headers_to_precompile[:] headers_to_precompile.insert(0, incfilename) diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -3,8 +3,7 @@ Void, OpaqueType, Float, RuntimeTypeInfo, getRuntimeTypeInfo, Char, _subarray) from rpython.rtyper.lltypesystem import llmemory, llgroup -from rpython.translator.c.funcgen import FunctionCodeGenerator -from rpython.translator.c.external import CExternalFunctionCodeGenerator +from rpython.translator.c.funcgen import make_funcgen from rpython.translator.c.support import USESLOTS # set to False if necessary while refactoring from rpython.translator.c.support import cdecl, forward_cdecl, somelettersfrom from rpython.translator.c.support import c_char_array_constant, barebonearray @@ -594,7 +593,17 @@ class StructNode(ContainerNode): nodekind = 'struct' if USESLOTS: - __slots__ = ('is_weakref',) + __slots__ = ('gc_init', 'is_weakref') + + def __init__(self, db, T, obj): + ContainerNode.__init__(self, db, T, obj) + if needs_gcheader(T): + gct = self.db.gctransformer + if gct is not None: + self.gc_init = gct.gcheader_initdata(self) + db.getcontainernode(self.gc_init) + else: + self.gc_init = None def basename(self): T = self.getTYPE() @@ -621,8 +630,7 @@ data = [] if needs_gcheader(T): - gc_init = self.db.gcpolicy.struct_gcheader_initdata(self) - data.append(('gcheader', gc_init)) + data.append(('gcheader', self.gc_init)) for name in defnode.fieldnames: data.append((name, getattr(self.obj, name))) @@ -695,7 +703,7 @@ def implementation(self): hash_typename = self.get_hash_typename() - hash = self.db.gcpolicy.get_prebuilt_hash(self.obj) + hash = self.db.gctransformer.get_prebuilt_hash(self.obj) assert hash is not None lines = list(self.initializationexpr()) lines.insert(0, '%s = { {' % ( @@ -705,7 +713,8 @@ return lines def gcstructnode_factory(db, T, obj): - if db.gcpolicy.get_prebuilt_hash(obj) is not None: + if (db.gctransformer and + db.gctransformer.get_prebuilt_hash(obj) is not None): cls = GcStructNodeWithHash else: cls = StructNode @@ -715,7 +724,17 @@ class ArrayNode(ContainerNode): nodekind = 'array' if USESLOTS: - __slots__ = () + __slots__ = ('gc_init',) + + def __init__(self, db, T, obj): + ContainerNode.__init__(self, db, T, obj) + if needs_gcheader(T): + gct = self.db.gctransformer + if gct is not None: + self.gc_init = gct.gcheader_initdata(self) + db.getcontainernode(self.gc_init) + else: + self.gc_init = None def getptrname(self, static=False): if barebonearray(self.getTYPE()): @@ -735,8 +754,7 @@ T = self.getTYPE() yield '{' if needs_gcheader(T): - gc_init = self.db.gcpolicy.array_gcheader_initdata(self) - lines = generic_initializationexpr(self.db, gc_init, 'gcheader', + lines = generic_initializationexpr(self.db, self.gc_init, 'gcheader', '%sgcheader' % (decoration,)) for line in lines: yield line @@ -836,81 +854,64 @@ comma = '' expr += comma i = expr.find('\n') - if i<0: i = len(expr) + if i < 0: + i = len(expr) expr = '%s\t/* %s */%s' % (expr[:i], decoration, expr[i:]) return expr.split('\n') # ____________________________________________________________ -class FuncNode(ContainerNode): +class FuncNodeBase(ContainerNode): nodekind = 'func' eci_name = 'compilation_info' # there not so many node of this kind, slots should not # be necessary - - def __init__(self, db, T, obj, forcename=None): + def __init__(self, db, T, obj, ptrname): Node.__init__(self, db) self.globalcontainer = True self.T = T self.obj = obj - callable = getattr(obj, '_callable', None) - if (callable is not None and - getattr(callable, 'c_name', None) is not None): - self.name = forcename or obj._callable.c_name - elif getattr(obj, 'external', None) == 'C' and not db.need_sandboxing(obj): - self.name = forcename or self.basename() - else: - self.name = (forcename or - db.namespace.uniquename('g_' + self.basename())) - self.make_funcgens() + self.name = ptrname self.typename = db.gettype(T) #, who_asks=self) def getptrname(self, static=False): return self.name - def make_funcgens(self): - self.funcgens = select_function_code_generators(self.obj, self.db, self.name) - if self.funcgens: - argnames = self.funcgens[0].argnames() #Assume identical for all funcgens - self.implementationtypename = self.db.gettype(self.T, argnames=argnames) - self._funccodegen_owner = self.funcgens[0] - else: - self._funccodegen_owner = None - def basename(self): return self.obj._name + +class FuncNode(FuncNodeBase): + def __init__(self, db, T, obj, ptrname): + FuncNodeBase.__init__(self, db, T, obj, ptrname) + exception_policy = getattr(obj, 'exception_policy', None) + self.funcgen = make_funcgen(obj.graph, db, exception_policy, ptrname) + argnames = self.funcgen.argnames() + self.implementationtypename = db.gettype(T, argnames=argnames) + self._funccodegen_owner = self.funcgen + def enum_dependencies(self): - if not self.funcgens: - return [] - return self.funcgens[0].allconstantvalues() #Assume identical for all funcgens + return self.funcgen.allconstantvalues() def forward_declaration(self): callable = getattr(self.obj, '_callable', None) is_exported = getattr(callable, 'exported_symbol', False) - for funcgen in self.funcgens: - yield '%s;' % ( - forward_cdecl(self.implementationtypename, - funcgen.name(self.name), self.db.standalone, - is_exported=is_exported)) + yield '%s;' % ( + forward_cdecl(self.implementationtypename, + self.name, self.db.standalone, is_exported=is_exported)) + + def graphs_to_patch(self): + for i in self.funcgen.graphs_to_patch(): + yield i def implementation(self): - for funcgen in self.funcgens: - for s in self.funcgen_implementation(funcgen): - yield s - - def graphs_to_patch(self): - for funcgen in self.funcgens: - for i in funcgen.graphs_to_patch(): - yield i - - def funcgen_implementation(self, funcgen): + funcgen = self.funcgen funcgen.implementation_begin() # recompute implementationtypename as the argnames may have changed argnames = funcgen.argnames() implementationtypename = self.db.gettype(self.T, argnames=argnames) - yield '%s {' % cdecl(implementationtypename, funcgen.name(self.name)) + yield '%s {' % cdecl(implementationtypename, self.name) # # declare the local variables # @@ -921,7 +922,7 @@ while start < len(localnames): # pack the local declarations over as few lines as possible total = lengths[start] + 8 - end = start+1 + end = start + 1 while total + lengths[end] < 77: total += lengths[end] + 1 end += 1 @@ -952,44 +953,55 @@ del bodyiter funcgen.implementation_end() -def sandbox_stub(fnobj, db): - # unexpected external function for --sandbox translation: replace it - # with a "Not Implemented" stub. To support these functions, port them - # to the new style registry (e.g. rpython.module.ll_os.RegisterOs). - from rpython.translator.sandbox import rsandbox - graph = rsandbox.get_external_function_sandbox_graph(fnobj, db, - force_stub=True) - return [FunctionCodeGenerator(graph, db)] +class ExternalFuncNode(FuncNodeBase): + def __init__(self, db, T, obj, ptrname): + FuncNodeBase.__init__(self, db, T, obj, ptrname) + self._funccodegen_owner = None -def sandbox_transform(fnobj, db): - # for --sandbox: replace a function like os_open_llimpl() with - # code that communicates with the external process to ask it to - # perform the operation. - from rpython.translator.sandbox import rsandbox - graph = rsandbox.get_external_function_sandbox_graph(fnobj, db) - return [FunctionCodeGenerator(graph, db)] + def enum_dependencies(self): + return [] -def select_function_code_generators(fnobj, db, functionname): - sandbox = db.need_sandboxing(fnobj) - if hasattr(fnobj, 'graph'): - if sandbox and sandbox != "if_external": - # apply the sandbox transformation - return sandbox_transform(fnobj, db) - exception_policy = getattr(fnobj, 'exception_policy', None) - return [FunctionCodeGenerator(fnobj.graph, db, exception_policy, - functionname)] - elif getattr(fnobj, 'external', None) is not None: - if sandbox: - return sandbox_stub(fnobj, db) - elif fnobj.external == 'C': - return [] - else: - assert fnobj.external == 'CPython' - return [CExternalFunctionCodeGenerator(fnobj, db)] - elif hasattr(fnobj._callable, "c_name"): - return [] # this case should only be used for entrypoints + def forward_declaration(self): + return [] + + def implementation(self): + return [] + +def new_funcnode(db, T, obj, forcename=None): + if db.sandbox: + if (getattr(obj, 'external', None) is not None and + not obj._safe_not_sandboxed): + from rpython.translator.sandbox import rsandbox + obj.__dict__['graph'] = rsandbox.get_sandbox_stub( + obj, db.translator.rtyper) + obj.__dict__.pop('_safe_not_sandboxed', None) + obj.__dict__.pop('external', None) + if forcename: + name = forcename else: - raise ValueError("don't know how to generate code for %r" % (fnobj,)) + name = _select_name(db, obj) + if hasattr(obj, 'graph'): + return FuncNode(db, T, obj, name) + elif getattr(obj, 'external', None) is not None: + assert obj.external == 'C' + if db.sandbox: + assert obj._safe_not_sandboxed + return ExternalFuncNode(db, T, obj, name) + elif hasattr(obj._callable, "c_name"): + return ExternalFuncNode(db, T, obj, name) # this case should only be used for entrypoints + else: + raise ValueError("don't know how to generate code for %r" % (obj,)) + + +def _select_name(db, obj): + try: + return obj._callable.c_name + except AttributeError: + pass + if getattr(obj, 'external', None) == 'C': + return obj._name + return db.namespace.uniquename('g_' + obj._name) + class ExtType_OpaqueNode(ContainerNode): nodekind = 'rpyopaque' @@ -1104,7 +1116,7 @@ Array: ArrayNode, GcArray: ArrayNode, FixedSizeArray: FixedSizeArrayNode, - FuncType: FuncNode, + FuncType: new_funcnode, OpaqueType: opaquenode_factory, llmemory._WeakRefType: weakrefnode_factory, llgroup.GroupType: GroupNode, diff --git a/rpython/translator/c/test/test_database.py b/rpython/translator/c/test/test_database.py --- a/rpython/translator/c/test/test_database.py +++ b/rpython/translator/c/test/test_database.py @@ -9,8 +9,6 @@ def dump_on_stdout(database): - if database.gctransformer: - database.prepare_inline_helpers() print '/*********************************/' structdeflist = database.getstructdeflist() for node in structdeflist: @@ -171,7 +169,7 @@ F = FuncType([Signed], Signed) f = functionptr(F, "f", graph=graph) - db = LowLevelDatabase(t) + db = LowLevelDatabase(t, exctransformer=t.getexceptiontransformer()) db.get(f) db.complete() dump_on_stdout(db) @@ -186,7 +184,7 @@ return p.x * p.y t, graph = makegraph(ll_f, [int]) - db = LowLevelDatabase(t) + db = LowLevelDatabase(t, exctransformer=t.getexceptiontransformer()) db.get(getfunctionptr(graph)) db.complete() dump_on_stdout(db) @@ -207,7 +205,7 @@ return s.ptr1.x * s.ptr2.x t, graph = makegraph(ll_f, [int]) - db = LowLevelDatabase(t) + db = LowLevelDatabase(t, exctransformer=t.getexceptiontransformer()) db.get(getfunctionptr(graph)) db.complete() dump_on_stdout(db) diff --git a/rpython/translator/c/test/test_refcount.py b/rpython/translator/c/test/test_refcount.py --- a/rpython/translator/c/test/test_refcount.py +++ b/rpython/translator/c/test/test_refcount.py @@ -106,37 +106,6 @@ assert fn(1) == 4 assert fn(0) == 5 - def test_del_basic(self): - py.test.skip("xxx fix or kill") - S = lltype.GcStruct('S', ('x', lltype.Signed), rtti=True) - TRASH = lltype.GcStruct('TRASH', ('x', lltype.Signed)) - GLOBAL = lltype.Struct('GLOBAL', ('x', lltype.Signed)) - glob = lltype.malloc(GLOBAL, immortal=True) - def destructor(s): - glob.x = s.x + 1 - def type_info_S(s): - return lltype.getRuntimeTypeInfo(S) - - def g(n): - s = lltype.malloc(S) - s.x = n - # now 's' should go away - def entrypoint(n): - g(n) - # llop.gc__collect(lltype.Void) - return glob.x - - t = TranslationContext() - t.buildannotator().build_types(entrypoint, [int]) - rtyper = t.buildrtyper() - destrptr = rtyper.annotate_helper_fn(destructor, [lltype.Ptr(S)]) - rtyper.attachRuntimeTypeInfoFunc(S, type_info_S, destrptr=destrptr) - rtyper.specialize() - fn = self.compile_func(entrypoint, None, t) - - res = fn(123) - assert res == 124 - def test_del_catches(self): import os def g(): diff --git a/rpython/translator/platform/windows.py b/rpython/translator/platform/windows.py --- a/rpython/translator/platform/windows.py +++ b/rpython/translator/platform/windows.py @@ -151,7 +151,7 @@ # Increase stack size, for the linker and the stack check code. stack_size = 8 << 20 # 8 Mb - self.link_flags.append('/STACK:%d' % stack_size) + self.link_flags = self.link_flags + ('/STACK:%d' % stack_size,) # The following symbol is used in c/src/stack.h self.cflags.append('/DMAX_STACK_SIZE=%d' % (stack_size - 1024)) diff --git a/rpython/translator/sandbox/rsandbox.py b/rpython/translator/sandbox/rsandbox.py --- a/rpython/translator/sandbox/rsandbox.py +++ b/rpython/translator/sandbox/rsandbox.py @@ -16,7 +16,6 @@ from rpython.rlib import rposix from rpython.rtyper.lltypesystem import lltype, rffi from rpython.rtyper.llannotation import lltype_to_annotation -from rpython.tool.sourcetools import func_with_new_name from rpython.rtyper.annlowlevel import MixLevelHelperAnnotator from rpython.tool.ansi_print import ansi_log @@ -37,7 +36,8 @@ sandboxsafe=True) - at signature(types.int(), types.ptr(rffi.CCHARP.TO), types.int(), returns=types.none()) + at signature(types.int(), types.ptr(rffi.CCHARP.TO), types.int(), + returns=types.none()) def writeall_not_sandboxed(fd, buf, length): while length > 0: size = rffi.cast(rffi.SIZE_T, length) @@ -85,15 +85,24 @@ return loader def reraise_error(error, loader): - if error == 1: raise OSError(load_int(loader), "external error") - elif error == 2: raise IOError - elif error == 3: raise OverflowError - elif error == 4: raise ValueError - elif error == 5: raise ZeroDivisionError - elif error == 6: raise MemoryError - elif error == 7: raise KeyError - elif error == 8: raise IndexError - else: raise RuntimeError + if error == 1: + raise OSError(load_int(loader), "external error") + elif error == 2: + raise IOError + elif error == 3: + raise OverflowError + elif error == 4: + raise ValueError + elif error == 5: + raise ZeroDivisionError + elif error == 6: + raise MemoryError + elif error == 7: + raise KeyError + elif error == 8: + raise IndexError + else: + raise RuntimeError @signature(types.str(), returns=types.impossible()) @@ -101,51 +110,46 @@ STDERR = 2 with rffi.scoped_str2charp(msg + '\n') as buf: writeall_not_sandboxed(STDERR, buf, len(msg) + 1) - raise RuntimeError(msg) # XXX in RPython, the msg is ignored at the moment + raise RuntimeError(msg) # XXX in RPython, the msg is ignored + +def make_stub(fnname, msg): + """Build always-raising stub function to replace unsupported external.""" + log.WARNING(msg) + + def execute(*args): + not_implemented_stub(msg) + execute.__name__ = 'sandboxed_%s' % (fnname,) + return execute + +def sig_ll(fnobj): + FUNCTYPE = lltype.typeOf(fnobj) + args_s = [lltype_to_annotation(ARG) for ARG in FUNCTYPE.ARGS] + s_result = lltype_to_annotation(FUNCTYPE.RESULT) + return args_s, s_result dump_string = rmarshal.get_marshaller(str) -load_int = rmarshal.get_loader(int) +load_int = rmarshal.get_loader(int) -def get_external_function_sandbox_graph(fnobj, db, force_stub=False): - """Build the graph of a helper trampoline function to be used - in place of real calls to the external function 'fnobj'. The - trampoline marshals its input arguments, dumps them to STDOUT, - and waits for an answer on STDIN. +def get_sandbox_stub(fnobj, rtyper): + fnname = fnobj._name + args_s, s_result = sig_ll(fnobj) + msg = "Not implemented: sandboxing for external function '%s'" % (fnname,) + execute = make_stub(fnname, msg) + return _annotate(rtyper, execute, args_s, s_result) + +def make_sandbox_trampoline(fnname, args_s, s_result): + """Create a trampoline function with the specified signature. + + The trampoline is meant to be used in place of real calls to the external + function named 'fnname'. It marshals its input arguments, dumps them to + STDOUT, and waits for an answer on STDIN. """ - if getattr(getattr(fnobj, '_callable', None), - '_sandbox_external_name', None): - fnname = fnobj._callable._sandbox_external_name - else: - fnname = fnobj._name - if hasattr(fnobj, 'graph'): - # get the annotation of the input arguments and the result - graph = fnobj.graph - annotator = db.translator.annotator - args_s = [annotator.binding(v) for v in graph.getargs()] - s_result = annotator.binding(graph.getreturnvar()) - else: - # pure external function - fall back to the annotations - # corresponding to the ll types - FUNCTYPE = lltype.typeOf(fnobj) - args_s = [lltype_to_annotation(ARG) for ARG in FUNCTYPE.ARGS] - s_result = lltype_to_annotation(FUNCTYPE.RESULT) - try: - if force_stub: # old case - don't try to support suggested_primitive - raise NotImplementedError("sandboxing for external function '%s'" - % (fnname,)) - dump_arguments = rmarshal.get_marshaller(tuple(args_s)) load_result = rmarshal.get_loader(s_result) - - except (NotImplementedError, - rmarshal.CannotMarshal, - rmarshal.CannotUnmarshall), e: - msg = 'Not Implemented: %s' % (e,) - log.WARNING(msg) - def execute(*args): - not_implemented_stub(msg) - + except (rmarshal.CannotMarshal, rmarshal.CannotUnmarshall) as e: + msg = "Cannot sandbox function '%s': %s" % (fnname, e) + execute = make_stub(fnname, msg) else: def execute(*args): # marshal the function name and input arguments @@ -158,9 +162,12 @@ result = load_result(loader) loader.check_finished() return result - execute = func_with_new_name(execute, 'sandboxed_' + fnname) + execute.__name__ = 'sandboxed_%s' % (fnname,) + return execute - ann = MixLevelHelperAnnotator(db.translator.rtyper) - graph = ann.getgraph(execute, args_s, s_result) + +def _annotate(rtyper, f, args_s, s_result): + ann = MixLevelHelperAnnotator(rtyper) + graph = ann.getgraph(f, args_s, s_result) ann.finish() return graph diff --git a/rpython/translator/stm/funcgen.py b/rpython/translator/stm/funcgen.py --- a/rpython/translator/stm/funcgen.py +++ b/rpython/translator/stm/funcgen.py @@ -33,15 +33,19 @@ def __init__(self, db, T, obj): assert isinstance(obj._name, int) - self.db = db - self.T = T - self.obj = obj + ContainerNode.__init__(self, db, T, obj) def initializationexpr(self, decoration=''): yield '{ { }, %s }' % ( name_small_integer(self.obj.typeid16, self.db)) # self.obj.prebuilt_hash + def enum_dependencies(self): + return [] + + def basename(self): + return self.nodekind + def stm_hint_commit_soon(funcgen, op): return ('if (!stm_is_atomic(&stm_thread_local))\n' From pypy.commits at gmail.com Wed Feb 24 13:09:25 2016 From: pypy.commits at gmail.com (Raemi) Date: Wed, 24 Feb 2016 10:09:25 -0800 (PST) Subject: [pypy-commit] pypy stmgc-c8: fix Message-ID: <56cdf1d5.8ee61c0a.acdac.ffff841b@mx.google.com> Author: Remi Meier Branch: stmgc-c8 Changeset: r82487:26b15c64b274 Date: 2016-02-24 19:08 +0100 http://bitbucket.org/pypy/pypy/changeset/26b15c64b274/ Log: fix diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -145,7 +145,7 @@ return self._size_estimate >> NUM_DIGITS @jit.dont_look_inside - def _update_size_estimate(self, new_size_estimate): + def _update_size_estimate(self, obj): oldattr = self attr = obj._get_mapdict_map() size_est = (oldattr._size_estimate + attr.size_estimate() @@ -183,7 +183,7 @@ def add_attr(self, obj, name, index, w_value): self._reorder_and_add(obj, name, index, w_value) if not jit.we_are_jitted(): - oldattr._update_size_estimate(attr.size_estimate()) + self._update_size_estimate(obj) def _add_attr_without_reordering(self, obj, name, index, w_value): attr = self._get_new_attr(name, index) From pypy.commits at gmail.com Wed Feb 24 17:30:40 2016 From: pypy.commits at gmail.com (Raemi) Date: Wed, 24 Feb 2016 14:30:40 -0800 (PST) Subject: [pypy-commit] pypy stmgc-c8: fix for changed method signature Message-ID: <56ce2f10.455e1c0a.91c53.166f@mx.google.com> Author: Remi Meier Branch: stmgc-c8 Changeset: r82488:ad865d0c916c Date: 2016-02-24 23:29 +0100 http://bitbucket.org/pypy/pypy/changeset/ad865d0c916c/ Log: fix for changed method signature diff --git a/rpython/translator/backendopt/gilanalysis.py b/rpython/translator/backendopt/gilanalysis.py --- a/rpython/translator/backendopt/gilanalysis.py +++ b/rpython/translator/backendopt/gilanalysis.py @@ -32,7 +32,7 @@ return graphanalyze.BoolGraphAnalyzer.analyze_direct_call( self, graph, seen) - def analyze_external_call(self, op, seen=None): + def analyze_external_call(self, funcobj, seen=None): # if 'funcobj' releases the GIL, then the GIL-releasing # functions themselves will call enter/leave transactional # zone. This case is covered above. diff --git a/rpython/translator/backendopt/writeanalyze.py b/rpython/translator/backendopt/writeanalyze.py --- a/rpython/translator/backendopt/writeanalyze.py +++ b/rpython/translator/backendopt/writeanalyze.py @@ -61,15 +61,14 @@ def _array_result(self, TYPE): return frozenset([("array", TYPE)]) - def analyze_external_call(self, op, seen=None): + def analyze_external_call(self, funcobj, seen=None): try: - funcobj = op.args[0].value._obj random = funcobj.random_effects_on_gcobjs - except (AttributeError, lltype.DelayedPointer): + except AttributeError: random = True if random: return self.top_result() - return graphanalyze.GraphAnalyzer.analyze_external_call(self, op, seen) + return graphanalyze.GraphAnalyzer.analyze_external_call(self, funcobj, seen) def _interiorfield_result(self, TYPE, fieldname): return frozenset([("interiorfield", TYPE, fieldname)]) From pypy.commits at gmail.com Thu Feb 25 04:02:31 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 01:02:31 -0800 (PST) Subject: [pypy-commit] pypy stmgc-c8: disable cpyext on stm Message-ID: <56cec327.890bc30a.2c3bc.35ad@mx.google.com> Author: Remi Meier Branch: stmgc-c8 Changeset: r82489:19cfa846e509 Date: 2016-02-25 10:01 +0100 http://bitbucket.org/pypy/pypy/changeset/19cfa846e509/ Log: disable cpyext on stm We don't support the rawrefcount functionality in our GC, and I think there is no fallback available. diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -231,6 +231,9 @@ # for now, disable _vmprof: the JIT's stm parts are not adapted # to track the stack depth config.objspace.usemodules._vmprof = False + # we don't support rlib.rawrefcount for our GC, so we need + # to disable cpyext... + config.objspace.usemodules.cpyext = False if config.objspace.allworkingmodules: from pypy.config.pypyoption import enable_allworkingmodules From pypy.commits at gmail.com Thu Feb 25 04:07:43 2016 From: pypy.commits at gmail.com (rlamy) Date: Thu, 25 Feb 2016 01:07:43 -0800 (PST) Subject: [pypy-commit] pypy desc-specialize: Create special FunctionDesc subclass for @specialize.memo() functions Message-ID: <56cec45f.a185c20a.f07ce.36bf@mx.google.com> Author: Ronan Lamy Branch: desc-specialize Changeset: r82490:67633b1da4fa Date: 2016-02-24 19:00 +0100 http://bitbucket.org/pypy/pypy/changeset/67633b1da4fa/ Log: Create special FunctionDesc subclass for @specialize.memo() functions diff --git a/rpython/annotator/bookkeeper.py b/rpython/annotator/bookkeeper.py --- a/rpython/annotator/bookkeeper.py +++ b/rpython/annotator/bookkeeper.py @@ -22,6 +22,7 @@ from rpython.annotator import description from rpython.annotator.signature import annotationoftype from rpython.annotator.argument import simple_args +from rpython.annotator.specialize import memo from rpython.rlib.objectmodel import r_dict, r_ordereddict, Symbolic from rpython.tool.algo.unionfind import UnionFind from rpython.rtyper import extregistry @@ -417,6 +418,8 @@ # (if any), according to the current policy tag = getattr(pyfunc, '_annspecialcase_', None) specializer = self.annotator.policy.get_specializer(tag) + if specializer is memo: + return description.MemoDesc(self, pyfunc, name, signature, defaults, specializer) return description.FunctionDesc(self, pyfunc, name, signature, defaults, specializer) def getfrozen(self, pyobj): diff --git a/rpython/annotator/description.py b/rpython/annotator/description.py --- a/rpython/annotator/description.py +++ b/rpython/annotator/description.py @@ -395,6 +395,15 @@ return s_sigs +class MemoDesc(FunctionDesc): + def pycall(self, whence, args, s_previous_result, op=None): + inputcells = self.parse_arguments(args) + s_result = self.specialize(inputcells, op) + assert not isinstance(s_result, FunctionGraph) + assert s_result.contains(s_previous_result) + return s_result + + class MethodDesc(Desc): knowntype = types.MethodType From pypy.commits at gmail.com Thu Feb 25 04:07:45 2016 From: pypy.commits at gmail.com (rlamy) Date: Thu, 25 Feb 2016 01:07:45 -0800 (PST) Subject: [pypy-commit] pypy desc-specialize: fixes Message-ID: <56cec461.2457c20a.45871.3992@mx.google.com> Author: Ronan Lamy Branch: desc-specialize Changeset: r82491:aef099eea410 Date: 2016-02-25 10:06 +0100 http://bitbucket.org/pypy/pypy/changeset/aef099eea410/ Log: fixes diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -1295,7 +1295,7 @@ miniglobals = {'__name__': __name__, # for module name propagation } exec source.compile() in miniglobals - call_external_function = specialize.ll(miniglobals['cpy_call_external']) + call_external_function = specialize.ll()(miniglobals['cpy_call_external']) call_external_function._dont_inline_ = True call_external_function._gctransformer_hint_close_stack_ = True # don't inline, as a hack to guarantee that no GC pointer is alive diff --git a/rpython/annotator/description.py b/rpython/annotator/description.py --- a/rpython/annotator/description.py +++ b/rpython/annotator/description.py @@ -6,7 +6,7 @@ from rpython.annotator.argument import rawshape, ArgErr, simple_args from rpython.tool.sourcetools import valid_identifier from rpython.tool.pairtype import extendabletype -from rpython.annotator.model import AnnotatorError, s_ImpossibleValue +from rpython.annotator.model import AnnotatorError, s_ImpossibleValue, unionof class CallFamily(object): """A family of Desc objects that could be called from common call sites. @@ -116,7 +116,6 @@ self.s_value = s_ImpossibleValue # union of possible values def update(self, other): - from rpython.annotator.model import unionof self.descs.update(other.descs) self.read_locations.update(other.read_locations) self.s_value = unionof(self.s_value, other.s_value) @@ -303,7 +302,6 @@ # Some specializations may break the invariant of returning # annotations that are always more general than the previous time. # We restore it here: - from rpython.annotator.model import unionof result = unionof(result, s_previous_result) return result @@ -399,8 +397,9 @@ def pycall(self, whence, args, s_previous_result, op=None): inputcells = self.parse_arguments(args) s_result = self.specialize(inputcells, op) - assert not isinstance(s_result, FunctionGraph) - assert s_result.contains(s_previous_result) + if isinstance(s_result, FunctionGraph): + s_result = s_result.getreturnvar().annotation + s_result = unionof(s_result, s_previous_result) return s_result diff --git a/rpython/rtyper/lltypesystem/llmemory.py b/rpython/rtyper/lltypesystem/llmemory.py --- a/rpython/rtyper/lltypesystem/llmemory.py +++ b/rpython/rtyper/lltypesystem/llmemory.py @@ -377,7 +377,6 @@ def _sizeof_none(TYPE): assert not TYPE._is_varsize() return ItemOffset(TYPE) -_sizeof_none._annspecialcase_ = 'specialize:memo' @specialize.memo() def _internal_array_field(TYPE): diff --git a/rpython/rtyper/rpbc.py b/rpython/rtyper/rpbc.py --- a/rpython/rtyper/rpbc.py +++ b/rpython/rtyper/rpbc.py @@ -362,9 +362,9 @@ def get_concrete_llfn(self, s_pbc, args_s, op): bk = self.rtyper.annotator.bookkeeper funcdesc, = s_pbc.descriptions - args = simple_args(args_s) with bk.at_position(None): - graph = funcdesc.get_graph(args, op) + argspec = simple_args(args_s) + graph = funcdesc.get_graph(argspec, op) llfn = self.rtyper.getcallable(graph) return inputconst(typeOf(llfn), llfn) From pypy.commits at gmail.com Thu Feb 25 04:23:16 2016 From: pypy.commits at gmail.com (fijal) Date: Thu, 25 Feb 2016 01:23:16 -0800 (PST) Subject: [pypy-commit] benchmarks single-run: fix Message-ID: <56cec804.aa0ac20a.a021f.39a3@mx.google.com> Author: fijal Branch: single-run Changeset: r349:898cedf9e20c Date: 2016-02-25 10:23 +0100 http://bitbucket.org/pypy/benchmarks/changeset/898cedf9e20c/ Log: fix diff --git a/bench-data.json b/bench-data.json --- a/bench-data.json +++ b/bench-data.json @@ -31,7 +31,9 @@ "warmup": 40 }, "bm_mdp": { - "description": "Some AI ..." + "description": "Some AI that uses a lot of fractions", + "total_runs": 2, + "warmup": 1 }, "pypy_interp": { "description": "interpreting py.py", diff --git a/benchmarks.py b/benchmarks.py --- a/benchmarks.py +++ b/benchmarks.py @@ -202,7 +202,7 @@ print err raise Exception("sphinx-build.py failed") res = float(out.splitlines()[-1]) - return RawResult([res]) + return RawResult([res], None) BM_cpython_doc.benchmark_name = 'sphinx' From pypy.commits at gmail.com Thu Feb 25 05:01:16 2016 From: pypy.commits at gmail.com (mjacob) Date: Thu, 25 Feb 2016 02:01:16 -0800 (PST) Subject: [pypy-commit] pypy llvm-translation-backend: hg merge default (+ fixes) Message-ID: <56ced0ec.8ee61c0a.e599c.ffffafdc@mx.google.com> Author: Manuel Jacob Branch: llvm-translation-backend Changeset: r82492:d01ebac8cf46 Date: 2016-02-24 10:56 +0100 http://bitbucket.org/pypy/pypy/changeset/d01ebac8cf46/ Log: hg merge default (+ fixes) diff too long, truncating to 2000 out of 11381 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -22,6 +22,7 @@ ^pypy/module/cpyext/test/.+\.obj$ ^pypy/module/cpyext/test/.+\.manifest$ ^pypy/module/test_lib_pypy/ctypes_tests/.+\.o$ +^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^pypy/module/cppyy/src/.+\.o$ ^pypy/module/cppyy/bench/.+\.so$ ^pypy/module/cppyy/bench/.+\.root$ @@ -35,7 +36,6 @@ ^pypy/module/test_lib_pypy/cffi_tests/__pycache__.+$ ^pypy/doc/.+\.html$ ^pypy/doc/config/.+\.rst$ -^pypy/doc/basicblock\.asc$ ^pypy/doc/.+\.svninfo$ ^rpython/translator/c/src/libffi_msvc/.+\.obj$ ^rpython/translator/c/src/libffi_msvc/.+\.dll$ @@ -45,53 +45,33 @@ ^rpython/translator/c/src/cjkcodecs/.+\.obj$ ^rpython/translator/c/src/stacklet/.+\.o$ ^rpython/translator/c/src/.+\.o$ -^rpython/translator/jvm/\.project$ -^rpython/translator/jvm/\.classpath$ -^rpython/translator/jvm/eclipse-bin$ -^rpython/translator/jvm/src/pypy/.+\.class$ -^rpython/translator/benchmark/docutils$ -^rpython/translator/benchmark/templess$ -^rpython/translator/benchmark/gadfly$ -^rpython/translator/benchmark/mako$ -^rpython/translator/benchmark/bench-custom\.benchmark_result$ -^rpython/translator/benchmark/shootout_benchmarks$ +^rpython/translator/llvm/.+\.so$ ^rpython/translator/goal/target.+-c$ ^rpython/translator/goal/.+\.exe$ ^rpython/translator/goal/.+\.dll$ ^pypy/goal/pypy-translation-snapshot$ ^pypy/goal/pypy-c -^pypy/goal/pypy-jvm -^pypy/goal/pypy-jvm.jar ^pypy/goal/.+\.exe$ ^pypy/goal/.+\.dll$ ^pypy/goal/.+\.lib$ ^pypy/_cache$ -^pypy/doc/statistic/.+\.html$ -^pypy/doc/statistic/.+\.eps$ -^pypy/doc/statistic/.+\.pdf$ -^rpython/translator/cli/src/pypylib\.dll$ -^rpython/translator/cli/src/query\.exe$ -^rpython/translator/cli/src/main\.exe$ +^lib-python/2.7/lib2to3/.+\.pickle$ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ ^lib_pypy/_libmpdec/.+.o$ -^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ ^include/.+\.inl$ ^pypy/doc/_build/.*$ ^pypy/doc/config/.+\.html$ ^pypy/doc/config/style\.css$ -^pypy/doc/jit/.+\.html$ -^pypy/doc/jit/style\.css$ ^pypy/doc/image/lattice1\.png$ ^pypy/doc/image/lattice2\.png$ ^pypy/doc/image/lattice3\.png$ ^pypy/doc/image/stackless_informal\.png$ ^pypy/doc/image/parsing_example.+\.png$ ^rpython/doc/_build/.*$ -^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^compiled ^.git/ ^release/ diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -41,29 +41,29 @@ Amaury Forgeot d'Arc Antonio Cuni Samuele Pedroni + Matti Picus Alex Gaynor Brian Kearns - Matti Picus Philip Jenvey Michael Hudson + Ronan Lamy David Schneider + Manuel Jacob Holger Krekel Christian Tismer Hakan Ardo - Manuel Jacob - Ronan Lamy Benjamin Peterson + Richard Plangger Anders Chrigstrom Eric van Riet Paap Wim Lavrijsen - Richard Plangger Richard Emslie Alexander Schremmer Dan Villiom Podlaski Christiansen + Remi Meier Lukas Diekmann Sven Hager Anders Lehmann - Remi Meier Aurelien Campeas Niklaus Haldimann Camillo Bruni @@ -72,8 +72,8 @@ Romain Guillebert Leonardo Santagada Seo Sanghyeon + Ronny Pfannschmidt Justin Peel - Ronny Pfannschmidt David Edelsohn Anders Hammarquist Jakub Gustak @@ -95,6 +95,7 @@ Tyler Wade Michael Foord Stephan Diehl + Vincent Legoll Stefan Schwarzer Valentino Volonghi Tomek Meka @@ -105,9 +106,9 @@ Jean-Paul Calderone Timo Paulssen Squeaky + Marius Gedminas Alexandre Fayolle Simon Burton - Marius Gedminas Martin Matusiak Konstantin Lopuhin Wenzhu Man @@ -116,16 +117,20 @@ Ivan Sichmann Freitas Greg Price Dario Bertini + Stefano Rivera Mark Pearse Simon Cross Andreas Stührk - Stefano Rivera + Edd Barrett Jean-Philippe St. Pierre Guido van Rossum Pavel Vinogradov + Jeremy Thurgood Paweł Piotr Przeradowski + Spenser Bauman Paul deGrandis Ilya Osadchiy + marky1991 Tobias Oberstein Adrian Kuhn Boris Feigin @@ -134,14 +139,12 @@ Georg Brandl Bert Freudenberg Stian Andreassen - Edd Barrett + Tobias Pape Wanja Saatkamp Gerald Klix Mike Blume - Tobias Pape Oscar Nierstrasz Stefan H. Muller - Jeremy Thurgood Rami Chowdhury Eugene Oden Henry Mason @@ -153,6 +156,8 @@ Lukas Renggli Guenter Jantzen Ned Batchelder + Tim Felgentreff + Anton Gulenko Amit Regmi Ben Young Nicolas Chauvat @@ -162,12 +167,12 @@ Nicholas Riley Jason Chu Igor Trindade Oliveira - Tim Felgentreff + Yichao Yu Rocco Moretti Gintautas Miliauskas Michael Twomey Lucian Branescu Mihaila - Yichao Yu + Devin Jeanpierre Gabriel Lavoie Olivier Dormond Jared Grubb @@ -191,33 +196,33 @@ Stanislaw Halik Mikael Schönenberg Berkin Ilbeyi - Elmo M?ntynen + Elmo Mäntynen + Faye Zhao Jonathan David Riehl Anders Qvist Corbin Simpson Chirag Jadwani Beatrice During Alex Perry - Vincent Legoll + Vaibhav Sood Alan McIntyre - Spenser Bauman + William Leslie Alexander Sedov Attila Gobi + Jasper.Schulz Christopher Pope - Devin Jeanpierre - Vaibhav Sood Christian Tismer Marc Abramowitz Dan Stromberg Arjun Naik Valentina Mukhamedzhanova Stefano Parmesan + Mark Young Alexis Daboville Jens-Uwe Mager Carl Meyer Karl Ramm Pieter Zieschang - Anton Gulenko Gabriel Lukas Vacek Andrew Dalke @@ -225,6 +230,7 @@ Jakub Stasiak Nathan Taylor Vladimir Kryachko + Omer Katz Jacek Generowicz Alejandro J. Cura Jacob Oscarson @@ -239,6 +245,7 @@ Lars Wassermann Philipp Rustemeuer Henrik Vendelbo + Richard Lancaster Dan Buch Miguel de Val Borro Artur Lisiecki @@ -250,18 +257,18 @@ Tomo Cocoa Kim Jin Su Toni Mattis + Amber Brown Lucas Stadler Julian Berman Markus Holtermann roberto at goyle Yury V. Zaytsev Anna Katrina Dominguez - William Leslie Bobby Impollonia - Faye Zhao timo at eistee.fritz.box Andrew Thompson Yusei Tahara + Aaron Tubbs Ben Darnell Roberto De Ioris Juan Francisco Cantero Hurtado @@ -273,6 +280,7 @@ Christopher Armstrong Michael Hudson-Doyle Anders Sigfridsson + Nikolay Zinov Yasir Suhail Jason Michalski rafalgalczynski at gmail.com @@ -282,6 +290,7 @@ Gustavo Niemeyer Stephan Busemann Rafał Gałczyński + Matt Bogosian Christian Muirhead Berker Peksag James Lan @@ -316,9 +325,9 @@ Stefan Marr jiaaro Mads Kiilerich - Richard Lancaster opassembler.py Antony Lee + Jason Madden Yaroslav Fedevych Jim Hunziker Markus Unterwaditzer @@ -327,6 +336,7 @@ squeaky Zearin soareschen + Jonas Pfannschmidt Kurt Griffiths Mike Bayer Matthew Miller diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -188,7 +188,7 @@ # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'include')) + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py --- a/lib_pypy/_pypy_testcapi.py +++ b/lib_pypy/_pypy_testcapi.py @@ -7,6 +7,7 @@ content = fid.read() # from cffi's Verifier() key = '\x00'.join([sys.version[:3], content]) + key += 'cpyext-gc-support-2' # this branch requires recompilation! if sys.version_info >= (3,): key = key.encode('utf-8') k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) @@ -62,7 +63,7 @@ if sys.platform == 'win32': # XXX pyconfig.h uses a pragma to link to the import library, # which is currently python27.lib - library = os.path.join(thisdir, '..', 'include', 'python27') + library = os.path.join(thisdir, '..', 'libs', 'python27') if not os.path.exists(library + '.lib'): # For a local translation or nightly build library = os.path.join(thisdir, '..', 'pypy', 'goal', 'python27') diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.1 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.1" -__version_info__ = (1, 5, 1) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h --- a/lib_pypy/cffi/_embedding.h +++ b/lib_pypy/cffi/_embedding.h @@ -233,7 +233,7 @@ f = PySys_GetObject((char *)"stderr"); if (f != NULL && f != Py_None) { PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.5.1" + "\ncompiled with cffi version: 1.5.2" "\n_cffi_backend module: ", f); modules = PyImport_GetModuleDict(); mod = PyDict_GetItemString(modules, "_cffi_backend"); diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -1,4 +1,4 @@ -import sys, sysconfig, types +import sys, types from .lock import allocate_lock try: @@ -550,16 +550,34 @@ lst.append(value) # if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. if hasattr(sys, 'prefix'): - import os - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - pythonlib = "pypy-c" + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) else: if sys.platform == "win32": template = "python%d%d" if hasattr(sys, 'gettotalrefcount'): template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" if sysconfig.get_config_var('DEBUG_EXT'): template += sysconfig.get_config_var('DEBUG_EXT') diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py --- a/lib_pypy/cffi/vengine_cpy.py +++ b/lib_pypy/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py --- a/lib_pypy/cffi/vengine_gen.py +++ b/lib_pypy/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py --- a/lib_pypy/cffi/verifier.py +++ b/lib_pypy/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -36,13 +36,16 @@ "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "binascii", "_multiprocessing", '_warnings', "_collections", "_multibytecodec", "micronumpy", "_continuation", "_cffi_backend", - "_csv", "cppyy", "_pypyjson" + "_csv", "cppyy", "_pypyjson", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): - # it's not enough that we get x86_64 - working_modules.add('_vmprof') +from rpython.jit.backend import detect_cpu +try: + if detect_cpu.autodetect().startswith('x86'): + working_modules.add('_vmprof') +except detect_cpu.ProcessorAutodetectError: + pass + translation_modules = default_modules.copy() translation_modules.update([ diff --git a/pypy/doc/discussion/rawrefcount.rst b/pypy/doc/discussion/rawrefcount.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/discussion/rawrefcount.rst @@ -0,0 +1,158 @@ +====================== +Rawrefcount and the GC +====================== + + +GC Interface +------------ + +"PyObject" is a raw structure with at least two fields, ob_refcnt and +ob_pypy_link. The ob_refcnt is the reference counter as used on +CPython. If the PyObject structure is linked to a live PyPy object, +its current address is stored in ob_pypy_link and ob_refcnt is bumped +by either the constant REFCNT_FROM_PYPY, or the constant +REFCNT_FROM_PYPY_LIGHT (== REFCNT_FROM_PYPY + SOME_HUGE_VALUE) +(to mean "light finalizer"). + +Most PyPy objects exist outside cpyext, and conversely in cpyext it is +possible that a lot of PyObjects exist without being seen by the rest +of PyPy. At the interface, however, we can "link" a PyPy object and a +PyObject. There are two kinds of link: + +rawrefcount.create_link_pypy(p, ob) + + Makes a link between an exising object gcref 'p' and a newly + allocated PyObject structure 'ob'. ob->ob_refcnt must be + initialized to either REFCNT_FROM_PYPY, or + REFCNT_FROM_PYPY_LIGHT. (The second case is an optimization: + when the GC finds the PyPy object and PyObject no longer + referenced, it can just free() the PyObject.) + +rawrefcount.create_link_pyobj(p, ob) + + Makes a link from an existing PyObject structure 'ob' to a newly + allocated W_CPyExtPlaceHolderObject 'p'. You must also add + REFCNT_FROM_PYPY to ob->ob_refcnt. For cases where the PyObject + contains all the data, and the PyPy object is just a proxy. The + W_CPyExtPlaceHolderObject should have only a field that contains + the address of the PyObject, but that's outside the scope of the + GC. + +rawrefcount.from_obj(p) + + If there is a link from object 'p' made with create_link_pypy(), + returns the corresponding 'ob'. Otherwise, returns NULL. + +rawrefcount.to_obj(Class, ob) + + Returns ob->ob_pypy_link, cast to an instance of 'Class'. + + +Collection logic +---------------- + +Objects existing purely on the C side have ob->ob_pypy_link == 0; +these are purely reference counted. On the other hand, if +ob->ob_pypy_link != 0, then ob->ob_refcnt is at least REFCNT_FROM_PYPY +and the object is part of a "link". + +The idea is that links whose 'p' is not reachable from other PyPy +objects *and* whose 'ob->ob_refcnt' is REFCNT_FROM_PYPY or +REFCNT_FROM_PYPY_LIGHT are the ones who die. But it is more messy +because PyObjects still (usually) need to have a tp_dealloc called, +and this cannot occur immediately (and can do random things like +accessing other references this object points to, or resurrecting the +object). + +Let P = list of links created with rawrefcount.create_link_pypy() +and O = list of links created with rawrefcount.create_link_pyobj(). +The PyPy objects in the list O are all W_CPyExtPlaceHolderObject: all +the data is in the PyObjects, and all outsite references (if any) are +in C, as "PyObject *" fields. + +So, during the collection we do this about P links: + + for (p, ob) in P: + if ob->ob_refcnt != REFCNT_FROM_PYPY + and ob->ob_refcnt != REFCNT_FROM_PYPY_LIGHT: + mark 'p' as surviving, as well as all its dependencies + +At the end of the collection, the P and O links are both handled like +this: + + for (p, ob) in P + O: + if p is not surviving: # even if 'ob' might be surviving + unlink p and ob + if ob->ob_refcnt == REFCNT_FROM_PYPY_LIGHT: + free(ob) + elif ob->ob_refcnt > REFCNT_FROM_PYPY_LIGHT: + ob->ob_refcnt -= REFCNT_FROM_PYPY_LIGHT + else: + ob->ob_refcnt -= REFCNT_FROM_PYPY + if ob->ob_refcnt == 0: + invoke _Py_Dealloc(ob) later, outside the GC + + +GC Implementation +----------------- + +We need two copies of both the P list and O list, for young or old +objects. All four lists can be regular AddressLists of 'ob' objects. + +We also need an AddressDict mapping 'p' to 'ob' for all links in the P +list, and update it when PyPy objects move. + + +Further notes +------------- + +XXX +XXX the rest is the ideal world, but as a first step, we'll look +XXX for the minimal tweaks needed to adapt the existing cpyext +XXX + +For objects that are opaque in CPython, like , we always create +a PyPy object, and then when needed we make an empty PyObject and +attach it with create_link_pypy()/REFCNT_FROM_PYPY_LIGHT. + +For and objects, the corresponding PyObjects contain a +"long" or "double" field too. We link them with create_link_pypy() +and we can use REFCNT_FROM_PYPY_LIGHT too: 'tp_dealloc' doesn't +need to be called, and instead just calling free() is fine. + +For objects, we need both a PyPy and a PyObject side. These +are made with create_link_pypy()/REFCNT_FROM_PYPY. + +For custom PyXxxObjects allocated from the C extension module, we +need create_link_pyobj(). + +For or objects coming from PyPy, we use +create_link_pypy()/REFCNT_FROM_PYPY_LIGHT with a PyObject +preallocated with the size of the string. We copy the string +lazily into that area if PyString_AS_STRING() is called. + +For , , or objects in the C extension +module, we first allocate it as only a PyObject, which supports +mutation of the data from C, like CPython. When it is exported to +PyPy we could make a W_CPyExtPlaceHolderObject with +create_link_pyobj(). + +For objects coming from PyPy, if they are not specialized, +then the PyPy side holds a regular reference to the items. Then we +can allocate a PyTupleObject and store in it borrowed PyObject +pointers to the items. Such a case is created with +create_link_pypy()/REFCNT_FROM_PYPY_LIGHT. If it is specialized, +then it doesn't work because the items are created just-in-time on the +PyPy side. In this case, the PyTupleObject needs to hold real +references to the PyObject items, and we use create_link_pypy()/ +REFCNT_FROM_PYPY. In all cases, we have a C array of PyObjects +that we can directly return from PySequence_Fast_ITEMS, PyTuple_ITEMS, +PyTuple_GetItem, and so on. + +For objects coming from PyPy, we can use a cpyext list +strategy. The list turns into a PyListObject, as if it had been +allocated from C in the first place. The special strategy can hold +(only) a direct reference to the PyListObject, and we can use either +create_link_pyobj() or create_link_pypy() (to be decided). +PySequence_Fast_ITEMS then works for lists too, and PyList_GetItem +can return a borrowed reference, and so on. diff --git a/pypy/doc/tool/makecontributor.py b/pypy/doc/tool/makecontributor.py --- a/pypy/doc/tool/makecontributor.py +++ b/pypy/doc/tool/makecontributor.py @@ -72,6 +72,7 @@ 'Anton Gulenko':['anton gulenko', 'anton_gulenko'], 'Richard Lancaster':['richardlancaster'], 'William Leslie':['William ML Leslie'], + 'Spenser Bauman':['Spenser Andrew Bauman'], } alias_map = {} diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -7,6 +7,9 @@ Fixed ``_PyLong_FromByteArray()``, which was buggy. +Fixed a crash with stacklets (or greenlets) on non-Linux machines +which showed up if you forget stacklets without resuming them. + .. branch: numpy-1.10 Fix tests to run cleanly with -A and start to fix micronumpy for upstream numpy @@ -38,7 +41,8 @@ .. branch: compress-numbering -Improve the memory signature of numbering instances in the JIT. +Improve the memory signature of numbering instances in the JIT. This should massively +decrease the amount of memory consumed by the JIT, which is significant for most programs. .. branch: fix-trace-too-long-heuristic @@ -124,6 +128,7 @@ Fix SSL tests by importing cpython's patch + .. branch: remove-getfield-pure Remove pure variants of ``getfield_gc_*`` operations from the JIT. Relevant @@ -148,3 +153,21 @@ Seperate structmember.h from Python.h Also enhance creating api functions to specify which header file they appear in (previously only pypy_decl.h) + +.. branch: llimpl + +Refactor register_external(), remove running_on_llinterp mechanism and +apply sandbox transform on externals at the end of annotation. + +.. branch: cffi-embedding-win32 + +.. branch: windows-vmprof-support + +vmprof should work on Windows. + + +.. branch: reorder-map-attributes + +When creating instances and adding attributes in several different orders +depending on some condition, the JIT would create too much code. This is now +fixed. \ No newline at end of file diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -239,6 +239,9 @@ raise Exception("Cannot use the --output option with PyPy " "when --shared is on (it is by default). " "See issue #1971.") + if sys.platform == 'win32': + config.translation.libname = '..\\..\\libs\\python27.lib' + thisdir.join('..', '..', 'libs').ensure(dir=1) if config.translation.thread: config.objspace.usemodules.thread = True diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -27,7 +27,7 @@ class W_Root(object): """This is the abstract root class of all wrapped objects that live in a 'normal' object space like StdObjSpace.""" - __slots__ = () + __slots__ = ('__weakref__',) user_overridden_class = False def getdict(self, space): @@ -306,7 +306,7 @@ return None -class W_InterpIterable(W_Root): +class InterpIterable(object): def __init__(self, space, w_iterable): self.w_iter = space.iter(w_iterable) self.space = space @@ -745,9 +745,13 @@ return self.int_w(self.hash(w_obj)) def len_w(self, w_obj): - """shotcut for space.int_w(space.len(w_obj))""" + """shortcut for space.int_w(space.len(w_obj))""" return self.int_w(self.len(w_obj)) + def contains_w(self, w_container, w_item): + """shortcut for space.is_true(space.contains(w_container, w_item))""" + return self.is_true(self.contains(w_container, w_item)) + def setitem_str(self, w_obj, key, w_value): return self.setitem(w_obj, self.wrap(key), w_value) @@ -846,7 +850,7 @@ return lst_w[:] # make the resulting list resizable def iteriterable(self, w_iterable): - return W_InterpIterable(self, w_iterable) + return InterpIterable(self, w_iterable) def _unpackiterable_unknown_length(self, w_iterator, w_iterable): """Unpack an iterable of unknown length into an interp-level @@ -1237,7 +1241,7 @@ if not isinstance(statement, PyCode): raise TypeError('space.exec_(): expected a string, code or PyCode object') w_key = self.wrap('__builtins__') - if not self.is_true(self.contains(w_globals, w_key)): + if not self.contains_w(w_globals, w_key): self.setitem(w_globals, w_key, self.wrap(self.builtin)) return statement.exec_code(self, w_globals, w_locals) diff --git a/pypy/interpreter/pyparser/pytokenizer.py b/pypy/interpreter/pyparser/pytokenizer.py --- a/pypy/interpreter/pyparser/pytokenizer.py +++ b/pypy/interpreter/pyparser/pytokenizer.py @@ -91,6 +91,7 @@ strstart = (0, 0, "") for line in lines: lnum = lnum + 1 + line = universal_newline(line) pos, max = 0, len(line) if contstr: @@ -259,3 +260,14 @@ token_list.append((tokens.ENDMARKER, '', lnum, pos, line)) return token_list + + +def universal_newline(line): + # show annotator that indexes below are non-negative + line_len_m2 = len(line) - 2 + if line_len_m2 >= 0 and line[-2] == '\r' and line[-1] == '\n': + return line[:line_len_m2] + '\n' + line_len_m1 = len(line) - 1 + if line_len_m1 >= 0 and line[-1] == '\r': + return line[:line_len_m1] + '\n' + return line diff --git a/pypy/interpreter/pyparser/test/test_pyparse.py b/pypy/interpreter/pyparser/test/test_pyparse.py --- a/pypy/interpreter/pyparser/test/test_pyparse.py +++ b/pypy/interpreter/pyparser/test/test_pyparse.py @@ -158,3 +158,10 @@ def test_print_function(self): self.parse("from __future__ import print_function\nx = print\n") + + def test_universal_newlines(self): + fmt = 'stuff = """hello%sworld"""' + expected_tree = self.parse(fmt % '\n') + for linefeed in ["\r\n","\r"]: + tree = self.parse(fmt % linefeed) + assert expected_tree == tree diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -156,20 +156,6 @@ get_unique_interplevel_subclass._annspecialcase_ = "specialize:memo" _subclass_cache = {} -def enum_interplevel_subclasses(config, cls): - """Return a list of all the extra interp-level subclasses of 'cls' that - can be built by get_unique_interplevel_subclass().""" - result = [] - for flag1 in (False, True): - for flag2 in (False, True): - for flag3 in (False, True): - for flag4 in (False, True): - result.append(get_unique_interplevel_subclass( - config, cls, flag1, flag2, flag3, flag4)) - result = dict.fromkeys(result) - assert len(result) <= 6 - return result.keys() - def _getusercls(config, cls, wants_dict, wants_slots, wants_del, weakrefable): typedef = cls.typedef if wants_dict and typedef.hasdict: @@ -262,7 +248,7 @@ def user_setup(self, space, w_subtype): self.space = space self.w__class__ = w_subtype - self.user_setup_slots(w_subtype.nslots) + self.user_setup_slots(w_subtype.layout.nslots) def user_setup_slots(self, nslots): assert nslots == 0 diff --git a/pypy/module/__builtin__/interp_classobj.py b/pypy/module/__builtin__/interp_classobj.py --- a/pypy/module/__builtin__/interp_classobj.py +++ b/pypy/module/__builtin__/interp_classobj.py @@ -20,7 +20,7 @@ if not space.isinstance_w(w_dict, space.w_dict): raise_type_err(space, 'bases', 'tuple', w_bases) - if not space.is_true(space.contains(w_dict, space.wrap("__doc__"))): + if not space.contains_w(w_dict, space.wrap("__doc__")): space.setitem(w_dict, space.wrap("__doc__"), space.w_None) # XXX missing: lengthy and obscure logic about "__module__" diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py --- a/pypy/module/_cffi_backend/__init__.py +++ b/pypy/module/_cffi_backend/__init__.py @@ -3,7 +3,7 @@ from rpython.rlib import rdynload, clibffi, entrypoint from rpython.rtyper.lltypesystem import rffi -VERSION = "1.5.1" +VERSION = "1.5.2" FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI try: diff --git a/pypy/module/_cffi_backend/embedding.py b/pypy/module/_cffi_backend/embedding.py --- a/pypy/module/_cffi_backend/embedding.py +++ b/pypy/module/_cffi_backend/embedding.py @@ -57,7 +57,7 @@ # pypy_init_embedded_cffi_module(). if not glob.patched_sys: space.appexec([], """(): - import os + import os, sys sys.stdin = sys.__stdin__ = os.fdopen(0, 'rb', 0) sys.stdout = sys.__stdout__ = os.fdopen(1, 'wb', 0) sys.stderr = sys.__stderr__ = os.fdopen(2, 'wb', 0) diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py --- a/pypy/module/_cffi_backend/test/_backend_test_c.py +++ b/pypy/module/_cffi_backend/test/_backend_test_c.py @@ -1,7 +1,7 @@ # ____________________________________________________________ import sys -assert __version__ == "1.5.1", ("This test_c.py file is for testing a version" +assert __version__ == "1.5.2", ("This test_c.py file is for testing a version" " of cffi that differs from the one that we" " get from 'import _cffi_backend'") if sys.version_info < (3,): diff --git a/pypy/module/_demo/test/test_import.py b/pypy/module/_demo/test/test_import.py --- a/pypy/module/_demo/test/test_import.py +++ b/pypy/module/_demo/test/test_import.py @@ -12,8 +12,7 @@ w_modules = space.sys.get('modules') assert _demo.Module.demo_events == ['setup'] - assert not space.is_true(space.contains(w_modules, - space.wrap('_demo'))) + assert not space.contains_w(w_modules, space.wrap('_demo')) # first import w_import = space.builtin.get('__import__') diff --git a/pypy/module/_vmprof/interp_vmprof.py b/pypy/module/_vmprof/interp_vmprof.py --- a/pypy/module/_vmprof/interp_vmprof.py +++ b/pypy/module/_vmprof/interp_vmprof.py @@ -60,7 +60,7 @@ Must be smaller than 1.0 """ w_modules = space.sys.get('modules') - if space.is_true(space.contains(w_modules, space.wrap('_continuation'))): + if space.contains_w(w_modules, space.wrap('_continuation')): space.warn(space.wrap("Using _continuation/greenlet/stacklet together " "with vmprof will crash"), space.w_RuntimeWarning) diff --git a/pypy/module/cpyext/__init__.py b/pypy/module/cpyext/__init__.py --- a/pypy/module/cpyext/__init__.py +++ b/pypy/module/cpyext/__init__.py @@ -34,7 +34,7 @@ import pypy.module.cpyext.pyerrors import pypy.module.cpyext.typeobject import pypy.module.cpyext.object -import pypy.module.cpyext.stringobject +import pypy.module.cpyext.bytesobject import pypy.module.cpyext.tupleobject import pypy.module.cpyext.setobject import pypy.module.cpyext.dictobject @@ -60,7 +60,6 @@ import pypy.module.cpyext.funcobject import pypy.module.cpyext.frameobject import pypy.module.cpyext.classobject -import pypy.module.cpyext.pypyintf import pypy.module.cpyext.memoryobject import pypy.module.cpyext.codecs import pypy.module.cpyext.pyfile diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -9,7 +9,7 @@ from rpython.rtyper.tool import rffi_platform from rpython.rtyper.lltypesystem import ll2ctypes from rpython.rtyper.annlowlevel import llhelper -from rpython.rlib.objectmodel import we_are_translated +from rpython.rlib.objectmodel import we_are_translated, keepalive_until_here from rpython.translator import cdir from rpython.translator.tool.cbuild import ExternalCompilationInfo from rpython.translator.gensupp import NameManager @@ -30,13 +30,13 @@ from rpython.rlib.rposix import is_valid_fd, validate_fd from rpython.rlib.unroll import unrolling_iterable from rpython.rlib.objectmodel import specialize -from rpython.rlib.exports import export_struct from pypy.module import exceptions from pypy.module.exceptions import interp_exceptions # CPython 2.4 compatibility from py.builtin import BaseException from rpython.tool.sourcetools import func_with_new_name from rpython.rtyper.lltypesystem.lloperation import llop +from rpython.rlib import rawrefcount DEBUG_WRAPPER = True @@ -194,7 +194,7 @@ class ApiFunction: def __init__(self, argtypes, restype, callable, error=_NOT_SPECIFIED, - c_name=None, gil=None): + c_name=None, gil=None, result_borrowed=False): self.argtypes = argtypes self.restype = restype self.functype = lltype.Ptr(lltype.FuncType(argtypes, restype)) @@ -211,17 +211,15 @@ self.argnames = argnames[1:] assert len(self.argnames) == len(self.argtypes) self.gil = gil + self.result_borrowed = result_borrowed + # + def get_llhelper(space): + return llhelper(self.functype, self.get_wrapper(space)) + self.get_llhelper = get_llhelper def _freeze_(self): return True - def get_llhelper(self, space): - llh = getattr(self, '_llhelper', None) - if llh is None: - llh = llhelper(self.functype, self.get_wrapper(space)) - self._llhelper = llh - return llh - @specialize.memo() def get_wrapper(self, space): wrapper = getattr(self, '_wrapper', None) @@ -234,7 +232,7 @@ return wrapper def cpython_api(argtypes, restype, error=_NOT_SPECIFIED, header='pypy_decl.h', - gil=None): + gil=None, result_borrowed=False): """ Declares a function to be exported. - `argtypes`, `restype` are lltypes and describe the function signature. @@ -263,13 +261,15 @@ rffi.cast(restype, 0) == 0) def decorate(func): + func._always_inline_ = 'try' func_name = func.func_name if header is not None: c_name = None else: c_name = func_name api_function = ApiFunction(argtypes, restype, func, error, - c_name=c_name, gil=gil) + c_name=c_name, gil=gil, + result_borrowed=result_borrowed) func.api_func = api_function if header is not None: @@ -280,6 +280,10 @@ raise ValueError("function %s has no return value for exceptions" % func) def make_unwrapper(catch_exception): + # ZZZ is this whole logic really needed??? It seems to be only + # for RPython code calling PyXxx() functions directly. I would + # think that usually directly calling the function is clean + # enough now names = api_function.argnames types_names_enum_ui = unrolling_iterable(enumerate( zip(api_function.argtypes, @@ -287,56 +291,58 @@ @specialize.ll() def unwrapper(space, *args): - from pypy.module.cpyext.pyobject import Py_DecRef - from pypy.module.cpyext.pyobject import make_ref, from_ref - from pypy.module.cpyext.pyobject import Reference + from pypy.module.cpyext.pyobject import Py_DecRef, is_pyobj + from pypy.module.cpyext.pyobject import from_ref, as_pyobj newargs = () - to_decref = [] + keepalives = () assert len(args) == len(api_function.argtypes) for i, (ARG, is_wrapped) in types_names_enum_ui: input_arg = args[i] if is_PyObject(ARG) and not is_wrapped: - # build a reference - if input_arg is None: - arg = lltype.nullptr(PyObject.TO) - elif isinstance(input_arg, W_Root): - ref = make_ref(space, input_arg) - to_decref.append(ref) - arg = rffi.cast(ARG, ref) + # build a 'PyObject *' (not holding a reference) + if not is_pyobj(input_arg): + keepalives += (input_arg,) + arg = rffi.cast(ARG, as_pyobj(space, input_arg)) + else: + arg = rffi.cast(ARG, input_arg) + elif is_PyObject(ARG) and is_wrapped: + # build a W_Root, possibly from a 'PyObject *' + if is_pyobj(input_arg): + arg = from_ref(space, input_arg) else: arg = input_arg - elif is_PyObject(ARG) and is_wrapped: - # convert to a wrapped object - if input_arg is None: - arg = input_arg - elif isinstance(input_arg, W_Root): - arg = input_arg - else: - try: - arg = from_ref(space, - rffi.cast(PyObject, input_arg)) - except TypeError, e: - err = OperationError(space.w_TypeError, - space.wrap( - "could not cast arg to PyObject")) - if not catch_exception: - raise err - state = space.fromcache(State) - state.set_exception(err) - if is_PyObject(restype): - return None - else: - return api_function.error_value + + ## ZZZ: for is_pyobj: + ## try: + ## arg = from_ref(space, + ## rffi.cast(PyObject, input_arg)) + ## except TypeError, e: + ## err = OperationError(space.w_TypeError, + ## space.wrap( + ## "could not cast arg to PyObject")) + ## if not catch_exception: + ## raise err + ## state = space.fromcache(State) + ## state.set_exception(err) + ## if is_PyObject(restype): + ## return None + ## else: + ## return api_function.error_value else: - # convert to a wrapped object + # arg is not declared as PyObject, no magic arg = input_arg newargs += (arg, ) - try: + if not catch_exception: + try: + res = func(space, *newargs) + finally: + keepalive_until_here(*keepalives) + else: + # non-rpython variant + assert not we_are_translated() try: res = func(space, *newargs) except OperationError, e: - if not catch_exception: - raise if not hasattr(api_function, "error_value"): raise state = space.fromcache(State) @@ -345,21 +351,13 @@ return None else: return api_function.error_value - if not we_are_translated(): - got_integer = isinstance(res, (int, long, float)) - assert got_integer == expect_integer,'got %r not integer' % res - if res is None: - return None - elif isinstance(res, Reference): - return res.get_wrapped(space) - else: - return res - finally: - for arg in to_decref: - Py_DecRef(space, arg) + # 'keepalives' is alive here (it's not rpython) + got_integer = isinstance(res, (int, long, float)) + assert got_integer == expect_integer, ( + 'got %r not integer' % (res,)) + return res unwrapper.func = func unwrapper.api_func = api_function - unwrapper._always_inline_ = 'try' return unwrapper unwrapper_catch = make_unwrapper(True) @@ -501,7 +499,7 @@ GLOBALS['%s#' % (cpyname, )] = ('PyTypeObject*', pypyexpr) for cpyname in '''PyMethodObject PyListObject PyLongObject - PyDictObject PyTupleObject PyClassObject'''.split(): + PyDictObject PyClassObject'''.split(): FORWARD_DECLS.append('typedef struct { PyObject_HEAD } %s' % (cpyname, )) build_exported_objects() @@ -514,14 +512,16 @@ "PyIntObject*": PyIntObject, "PyDateTime_CAPI*": lltype.Ptr(PyDateTime_CAPI)}[ctype] +# Note: as a special case, "PyObject" is the pointer type in RPython, +# corresponding to "PyObject *" in C. We do that only for PyObject. +# For example, "PyTypeObject" is the struct type even in RPython. PyTypeObject = lltype.ForwardReference() PyTypeObjectPtr = lltype.Ptr(PyTypeObject) -# It is important that these PyObjects are allocated in a raw fashion -# Thus we cannot save a forward pointer to the wrapped object -# So we need a forward and backward mapping in our State instance PyObjectStruct = lltype.ForwardReference() PyObject = lltype.Ptr(PyObjectStruct) -PyObjectFields = (("ob_refcnt", lltype.Signed), ("ob_type", PyTypeObjectPtr)) +PyObjectFields = (("ob_refcnt", lltype.Signed), + ("ob_pypy_link", lltype.Signed), + ("ob_type", PyTypeObjectPtr)) PyVarObjectFields = PyObjectFields + (("ob_size", Py_ssize_t), ) cpython_struct('PyObject', PyObjectFields, PyObjectStruct) PyVarObjectStruct = cpython_struct("PyVarObject", PyVarObjectFields) @@ -618,8 +618,8 @@ @specialize.ll() def wrapper(*args): - from pypy.module.cpyext.pyobject import make_ref, from_ref - from pypy.module.cpyext.pyobject import Reference + from pypy.module.cpyext.pyobject import make_ref, from_ref, is_pyobj + from pypy.module.cpyext.pyobject import as_pyobj # we hope that malloc removal removes the newtuple() that is # inserted exactly here by the varargs specializer if gil_acquire: @@ -628,6 +628,7 @@ llop.gc_stack_bottom(lltype.Void) # marker for trackgcroot.py retval = fatal_value boxed_args = () + tb = None try: if not we_are_translated() and DEBUG_WRAPPER: print >>sys.stderr, callable, @@ -635,10 +636,8 @@ for i, (typ, is_wrapped) in argtypes_enum_ui: arg = args[i] if is_PyObject(typ) and is_wrapped: - if arg: - arg_conv = from_ref(space, rffi.cast(PyObject, arg)) - else: - arg_conv = None + assert is_pyobj(arg) + arg_conv = from_ref(space, rffi.cast(PyObject, arg)) else: arg_conv = arg boxed_args += (arg_conv, ) @@ -653,6 +652,7 @@ except BaseException, e: failed = True if not we_are_translated(): + tb = sys.exc_info()[2] message = repr(e) import traceback traceback.print_exc() @@ -671,29 +671,34 @@ retval = error_value elif is_PyObject(callable.api_func.restype): - if result is None: - retval = rffi.cast(callable.api_func.restype, - make_ref(space, None)) - elif isinstance(result, Reference): - retval = result.get_ref(space) - elif not rffi._isllptr(result): - retval = rffi.cast(callable.api_func.restype, - make_ref(space, result)) + if is_pyobj(result): + retval = result else: - retval = result + if result is not None: + if callable.api_func.result_borrowed: + retval = as_pyobj(space, result) + else: + retval = make_ref(space, result) + retval = rffi.cast(callable.api_func.restype, retval) + else: + retval = lltype.nullptr(PyObject.TO) elif callable.api_func.restype is not lltype.Void: retval = rffi.cast(callable.api_func.restype, result) except Exception, e: print 'Fatal error in cpyext, CPython compatibility layer, calling', callable.__name__ print 'Either report a bug or consider not using this particular extension' if not we_are_translated(): + if tb is None: + tb = sys.exc_info()[2] import traceback traceback.print_exc() - print str(e) + if sys.stdout == sys.__stdout__: + import pdb; pdb.post_mortem(tb) # we can't do much here, since we're in ctypes, swallow else: print str(e) pypy_debug_catch_fatal_exception() + assert False rffi.stackcounter.stacks_counter -= 1 if gil_release: rgil.release() @@ -827,6 +832,19 @@ outputfilename=str(udir / "module_cache" / "pypyapi")) modulename = py.path.local(eci.libraries[-1]) + def dealloc_trigger(): + from pypy.module.cpyext.pyobject import _Py_Dealloc + print 'dealloc_trigger...' + while True: + ob = rawrefcount.next_dead(PyObject) + if not ob: + break + print ob + _Py_Dealloc(space, ob) + print 'dealloc_trigger DONE' + return "RETRY" + rawrefcount.init(dealloc_trigger) + run_bootstrap_functions(space) # load the bridge, and init structure @@ -836,9 +854,9 @@ space.fromcache(State).install_dll(eci) # populate static data - builder = StaticObjectBuilder(space) + builder = space.fromcache(StaticObjectBuilder) for name, (typ, expr) in GLOBALS.iteritems(): - from pypy.module import cpyext + from pypy.module import cpyext # for the eval() below w_obj = eval(expr) if name.endswith('#'): name = name[:-1] @@ -894,27 +912,44 @@ class StaticObjectBuilder: def __init__(self, space): self.space = space - self.to_attach = [] + self.static_pyobjs = [] + self.static_objs_w = [] + self.cpyext_type_init = None + # + # add a "method" that is overridden in setup_library() + # ('self.static_pyobjs' is completely ignored in that case) + self.get_static_pyobjs = lambda: self.static_pyobjs def prepare(self, py_obj, w_obj): - from pypy.module.cpyext.pyobject import track_reference - py_obj.c_ob_refcnt = 1 - track_reference(self.space, py_obj, w_obj) - self.to_attach.append((py_obj, w_obj)) + "NOT_RPYTHON" + if py_obj: + py_obj.c_ob_refcnt = 1 # 1 for kept immortal + self.static_pyobjs.append(py_obj) + self.static_objs_w.append(w_obj) def attach_all(self): + # this is RPython, called once in pypy-c when it imports cpyext from pypy.module.cpyext.pyobject import get_typedescr, make_ref from pypy.module.cpyext.typeobject import finish_type_1, finish_type_2 + from pypy.module.cpyext.pyobject import track_reference + # space = self.space - space._cpyext_type_init = [] - for py_obj, w_obj in self.to_attach: + static_pyobjs = self.get_static_pyobjs() + static_objs_w = self.static_objs_w + for i in range(len(static_objs_w)): + track_reference(space, static_pyobjs[i], static_objs_w[i]) + # + self.cpyext_type_init = [] + for i in range(len(static_objs_w)): + py_obj = static_pyobjs[i] + w_obj = static_objs_w[i] w_type = space.type(w_obj) - typedescr = get_typedescr(w_type.instancetypedef) + typedescr = get_typedescr(w_type.layout.typedef) py_obj.c_ob_type = rffi.cast(PyTypeObjectPtr, make_ref(space, w_type)) typedescr.attach(space, py_obj, w_obj) - cpyext_type_init = space._cpyext_type_init - del space._cpyext_type_init + cpyext_type_init = self.cpyext_type_init + self.cpyext_type_init = None for pto, w_type in cpyext_type_init: finish_type_1(space, pto) finish_type_2(space, pto, w_type) @@ -1067,7 +1102,7 @@ if name.endswith('#'): structs.append('%s %s;' % (typ[:-1], name[:-1])) elif name.startswith('PyExc_'): - structs.append('extern PyTypeObject _%s;' % (name,)) + structs.append('PyTypeObject _%s;' % (name,)) structs.append('PyObject* %s = (PyObject*)&_%s;' % (name, name)) elif typ == 'PyDateTime_CAPI*': structs.append('%s %s = NULL;' % (typ, name)) @@ -1107,7 +1142,7 @@ if not use_micronumpy: return use_micronumpy # import to register api functions by side-effect - import pypy.module.cpyext.ndarrayobject + import pypy.module.cpyext.ndarrayobject global GLOBALS, SYMBOLS_C, separate_module_files GLOBALS["PyArray_Type#"]= ('PyTypeObject*', "space.gettypeobject(W_NDimArray.typedef)") SYMBOLS_C += ['PyArray_Type', '_PyArray_FILLWBYTE', '_PyArray_ZEROS'] @@ -1116,10 +1151,8 @@ def setup_library(space): "NOT_RPYTHON" - from pypy.module.cpyext.pyobject import make_ref use_micronumpy = setup_micronumpy(space) - - export_symbols = list(FUNCTIONS) + SYMBOLS_C + list(GLOBALS) + export_symbols = sorted(FUNCTIONS) + sorted(SYMBOLS_C) + sorted(GLOBALS) from rpython.translator.c.database import LowLevelDatabase db = LowLevelDatabase() @@ -1135,41 +1168,37 @@ run_bootstrap_functions(space) setup_va_functions(eci) - from pypy.module import cpyext # for eval() below - - # Set up the types. Needs a special case, because of the - # immediate cycle involving 'c_ob_type', and because we don't - # want these types to be Py_TPFLAGS_HEAPTYPE. - static_types = {} - for name, (typ, expr) in GLOBALS.items(): - if typ == 'PyTypeObject*': - pto = lltype.malloc(PyTypeObject, immortal=True, - zero=True, flavor='raw') - pto.c_ob_refcnt = 1 - pto.c_tp_basicsize = -1 - static_types[name] = pto - builder = StaticObjectBuilder(space) - for name, pto in static_types.items(): - pto.c_ob_type = static_types['PyType_Type#'] - w_type = eval(GLOBALS[name][1]) - builder.prepare(rffi.cast(PyObject, pto), w_type) - builder.attach_all() - - # populate static data - for name, (typ, expr) in GLOBALS.iteritems(): - name = name.replace("#", "") - if name.startswith('PyExc_'): + # emit uninitialized static data + builder = space.fromcache(StaticObjectBuilder) + lines = ['PyObject *pypy_static_pyobjs[] = {\n'] + include_lines = ['RPY_EXTERN PyObject *pypy_static_pyobjs[];\n'] + for name, (typ, expr) in sorted(GLOBALS.items()): + if name.endswith('#'): + assert typ in ('PyObject*', 'PyTypeObject*', 'PyIntObject*') + typ, name = typ[:-1], name[:-1] + elif name.startswith('PyExc_'): + typ = 'PyTypeObject' name = '_' + name - w_obj = eval(expr) - if typ in ('PyObject*', 'PyTypeObject*', 'PyIntObject*'): - struct_ptr = make_ref(space, w_obj) elif typ == 'PyDateTime_CAPI*': continue else: assert False, "Unknown static data: %s %s" % (typ, name) - struct = rffi.cast(get_structtype_for_ctype(typ), struct_ptr)._obj - struct._compilation_info = eci - export_struct(name, struct) + + from pypy.module import cpyext # for the eval() below + w_obj = eval(expr) + builder.prepare(None, w_obj) + lines.append('\t(PyObject *)&%s,\n' % (name,)) + include_lines.append('RPY_EXPORTED %s %s;\n' % (typ, name)) + + lines.append('};\n') + eci2 = CConfig._compilation_info_.merge(ExternalCompilationInfo( + separate_module_sources = [''.join(lines)], + post_include_bits = [''.join(include_lines)], + )) + # override this method to return a pointer to this C array directly + builder.get_static_pyobjs = rffi.CExternVariable( + PyObjectP, 'pypy_static_pyobjs', eci2, c_type='PyObject **', + getter_only=True, declare_as_extern=False) for name, func in FUNCTIONS.iteritems(): newname = mangle_name('PyPy', name) or name @@ -1180,6 +1209,10 @@ trunk_include = pypydir.dirpath() / 'include' copy_header_files(trunk_include, use_micronumpy) +def init_static_data_translated(space): + builder = space.fromcache(StaticObjectBuilder) + builder.attach_all() + def _load_from_cffi(space, name, path, initptr): from pypy.module._cffi_backend import cffi1_module cffi1_module.load_cffi1_module(space, name, path, initptr) @@ -1262,22 +1295,18 @@ @specialize.ll() def generic_cpy_call(space, func, *args): FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, True, False)(space, func, *args) - - at specialize.ll() -def generic_cpy_call_dont_decref(space, func, *args): - FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, False, False)(space, func, *args) + return make_generic_cpy_call(FT, False)(space, func, *args) @specialize.ll() def generic_cpy_call_expect_null(space, func, *args): FT = lltype.typeOf(func).TO - return make_generic_cpy_call(FT, True, True)(space, func, *args) + return make_generic_cpy_call(FT, True)(space, func, *args) @specialize.memo() -def make_generic_cpy_call(FT, decref_args, expect_null): +def make_generic_cpy_call(FT, expect_null): from pypy.module.cpyext.pyobject import make_ref, from_ref, Py_DecRef - from pypy.module.cpyext.pyobject import RefcountState + from pypy.module.cpyext.pyobject import is_pyobj, as_pyobj + from pypy.module.cpyext.pyobject import get_w_obj_and_decref from pypy.module.cpyext.pyerrors import PyErr_Occurred unrolling_arg_types = unrolling_iterable(enumerate(FT.ARGS)) RESULT_TYPE = FT.RESULT @@ -1305,65 +1334,49 @@ @specialize.ll() def generic_cpy_call(space, func, *args): boxed_args = () - to_decref = [] + keepalives = () assert len(args) == len(FT.ARGS) for i, ARG in unrolling_arg_types: arg = args[i] if is_PyObject(ARG): - if arg is None: - boxed_args += (lltype.nullptr(PyObject.TO),) - elif isinstance(arg, W_Root): - ref = make_ref(space, arg) - boxed_args += (ref,) - if decref_args: - to_decref.append(ref) - else: - boxed_args += (arg,) - else: - boxed_args += (arg,) + if not is_pyobj(arg): + keepalives += (arg,) + arg = as_pyobj(space, arg) + boxed_args += (arg,) try: - # create a new container for borrowed references - state = space.fromcache(RefcountState) - old_container = state.swap_borrow_container(None) - try: - # Call the function - result = call_external_function(func, *boxed_args) - finally: - state.swap_borrow_container(old_container) + # Call the function + result = call_external_function(func, *boxed_args) + finally: + keepalive_until_here(*keepalives) - if is_PyObject(RESULT_TYPE): - if result is None: - ret = result - elif isinstance(result, W_Root): - ret = result + if is_PyObject(RESULT_TYPE): + if not is_pyobj(result): + ret = result + else: + # The object reference returned from a C function + # that is called from Python must be an owned reference + # - ownership is transferred from the function to its caller. + if result: + ret = get_w_obj_and_decref(space, result) else: - ret = from_ref(space, result) - # The object reference returned from a C function - # that is called from Python must be an owned reference - # - ownership is transferred from the function to its caller. - if result: - Py_DecRef(space, result) + ret = None - # Check for exception consistency - has_error = PyErr_Occurred(space) is not None - has_result = ret is not None - if has_error and has_result: - raise OperationError(space.w_SystemError, space.wrap( - "An exception was set, but function returned a value")) - elif not expect_null and not has_error and not has_result: - raise OperationError(space.w_SystemError, space.wrap( - "Function returned a NULL result without setting an exception")) + # Check for exception consistency + has_error = PyErr_Occurred(space) is not None + has_result = ret is not None + if has_error and has_result: + raise OperationError(space.w_SystemError, space.wrap( + "An exception was set, but function returned a value")) + elif not expect_null and not has_error and not has_result: + raise OperationError(space.w_SystemError, space.wrap( + "Function returned a NULL result without setting an exception")) - if has_error: - state = space.fromcache(State) - state.check_and_raise_exception() + if has_error: + state = space.fromcache(State) + state.check_and_raise_exception() - return ret - return result - finally: - if decref_args: - for ref in to_decref: - Py_DecRef(space, ref) + return ret + return result + return generic_cpy_call - diff --git a/pypy/module/cpyext/bufferobject.py b/pypy/module/cpyext/bufferobject.py --- a/pypy/module/cpyext/bufferobject.py +++ b/pypy/module/cpyext/bufferobject.py @@ -25,7 +25,7 @@ @bootstrap_function def init_bufferobject(space): "Type description of PyBufferObject" - make_typedescr(space.w_buffer.instancetypedef, + make_typedescr(space.w_buffer.layout.typedef, basestruct=PyBufferObject.TO, attach=buffer_attach, dealloc=buffer_dealloc, diff --git a/pypy/module/cpyext/bytesobject.py b/pypy/module/cpyext/bytesobject.py new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/bytesobject.py @@ -0,0 +1,319 @@ +from pypy.interpreter.error import OperationError +from rpython.rtyper.lltypesystem import rffi, lltype +from pypy.module.cpyext.api import ( + cpython_api, cpython_struct, bootstrap_function, build_type_checkers, + PyObjectFields, Py_ssize_t, CONST_STRING, CANNOT_FAIL) +from pypy.module.cpyext.pyerrors import PyErr_BadArgument +from pypy.module.cpyext.pyobject import ( + PyObject, PyObjectP, Py_DecRef, make_ref, from_ref, track_reference, + make_typedescr, get_typedescr) + +## +## Implementation of PyStringObject +## ================================ +## +## The problem +## ----------- +## +## PyString_AsString() must return a (non-movable) pointer to the underlying +## buffer, whereas pypy strings are movable. C code may temporarily store +## this address and use it, as long as it owns a reference to the PyObject. +## There is no "release" function to specify that the pointer is not needed +## any more. +## +## Also, the pointer may be used to fill the initial value of string. This is +## valid only when the string was just allocated, and is not used elsewhere. +## +## Solution +## -------- +## +## PyStringObject contains two additional members: the size and a pointer to a +## char buffer; it may be NULL. +## +## - A string allocated by pypy will be converted into a PyStringObject with a +## NULL buffer. The first time PyString_AsString() is called, memory is +## allocated (with flavor='raw') and content is copied. +## +## - A string allocated with PyString_FromStringAndSize(NULL, size) will +## allocate a PyStringObject structure, and a buffer with the specified +## size, but the reference won't be stored in the global map; there is no +## corresponding object in pypy. When from_ref() or Py_INCREF() is called, +## the pypy string is created, and added to the global map of tracked +## objects. The buffer is then supposed to be immutable. +## +## - _PyString_Resize() works only on not-yet-pypy'd strings, and returns a +## similar object. +## +## - PyString_Size() doesn't need to force the object. +## +## - There could be an (expensive!) check in from_ref() that the buffer still +## corresponds to the pypy gc-managed string. +## + +PyStringObjectStruct = lltype.ForwardReference() +PyStringObject = lltype.Ptr(PyStringObjectStruct) +PyStringObjectFields = PyObjectFields + \ + (("buffer", rffi.CCHARP), ("size", Py_ssize_t)) +cpython_struct("PyStringObject", PyStringObjectFields, PyStringObjectStruct) + + at bootstrap_function +def init_stringobject(space): + "Type description of PyStringObject" + make_typedescr(space.w_str.layout.typedef, + basestruct=PyStringObject.TO, + attach=string_attach, + dealloc=string_dealloc, + realize=string_realize) + +PyString_Check, PyString_CheckExact = build_type_checkers("String", "w_str") + +def new_empty_str(space, length): + """ + Allocate a PyStringObject and its buffer, but without a corresponding + interpreter object. The buffer may be mutated, until string_realize() is + called. Refcount of the result is 1. + """ + typedescr = get_typedescr(space.w_str.layout.typedef) + py_obj = typedescr.allocate(space, space.w_str) + py_str = rffi.cast(PyStringObject, py_obj) + + buflen = length + 1 + py_str.c_size = length + py_str.c_buffer = lltype.malloc(rffi.CCHARP.TO, buflen, + flavor='raw', zero=True) + return py_str + +def string_attach(space, py_obj, w_obj): + """ + Fills a newly allocated PyStringObject with the given string object. The + buffer must not be modified. + """ + py_str = rffi.cast(PyStringObject, py_obj) + py_str.c_size = len(space.str_w(w_obj)) + py_str.c_buffer = lltype.nullptr(rffi.CCHARP.TO) + +def string_realize(space, py_obj): + """ + Creates the string in the interpreter. The PyStringObject buffer must not + be modified after this call. + """ + py_str = rffi.cast(PyStringObject, py_obj) + s = rffi.charpsize2str(py_str.c_buffer, py_str.c_size) + w_obj = space.wrap(s) + track_reference(space, py_obj, w_obj) + return w_obj + + at cpython_api([PyObject], lltype.Void, header=None) +def string_dealloc(space, py_obj): + """Frees allocated PyStringObject resources. + """ + py_str = rffi.cast(PyStringObject, py_obj) + if py_str.c_buffer: + lltype.free(py_str.c_buffer, flavor="raw") + from pypy.module.cpyext.object import PyObject_dealloc + PyObject_dealloc(space, py_obj) + +#_______________________________________________________________________ + + at cpython_api([CONST_STRING, Py_ssize_t], PyObject) +def PyString_FromStringAndSize(space, char_p, length): + if char_p: + s = rffi.charpsize2str(char_p, length) + return make_ref(space, space.wrap(s)) + else: + return rffi.cast(PyObject, new_empty_str(space, length)) + + at cpython_api([CONST_STRING], PyObject) +def PyString_FromString(space, char_p): + s = rffi.charp2str(char_p) + return space.wrap(s) + + at cpython_api([PyObject], rffi.CCHARP, error=0) +def PyString_AsString(space, ref): + if from_ref(space, rffi.cast(PyObject, ref.c_ob_type)) is space.w_str: + pass # typecheck returned "ok" without forcing 'ref' at all + elif not PyString_Check(space, ref): # otherwise, use the alternate way + raise OperationError(space.w_TypeError, space.wrap( + "PyString_AsString only support strings")) + ref_str = rffi.cast(PyStringObject, ref) + if not ref_str.c_buffer: + # copy string buffer + w_str = from_ref(space, ref) + s = space.str_w(w_str) + ref_str.c_buffer = rffi.str2charp(s) + return ref_str.c_buffer + + at cpython_api([PyObject, rffi.CCHARPP, rffi.CArrayPtr(Py_ssize_t)], rffi.INT_real, error=-1) +def PyString_AsStringAndSize(space, ref, buffer, length): + if not PyString_Check(space, ref): + raise OperationError(space.w_TypeError, space.wrap( + "PyString_AsStringAndSize only support strings")) + ref_str = rffi.cast(PyStringObject, ref) + if not ref_str.c_buffer: + # copy string buffer + w_str = from_ref(space, ref) + s = space.str_w(w_str) + ref_str.c_buffer = rffi.str2charp(s) + buffer[0] = ref_str.c_buffer + if length: + length[0] = ref_str.c_size + else: + i = 0 + while ref_str.c_buffer[i] != '\0': + i += 1 + if i != ref_str.c_size: + raise OperationError(space.w_TypeError, space.wrap( + "expected string without null bytes")) + return 0 + + at cpython_api([PyObject], Py_ssize_t, error=-1) +def PyString_Size(space, ref): + if from_ref(space, rffi.cast(PyObject, ref.c_ob_type)) is space.w_str: + ref = rffi.cast(PyStringObject, ref) + return ref.c_size + else: + w_obj = from_ref(space, ref) + return space.len_w(w_obj) + + at cpython_api([PyObjectP, Py_ssize_t], rffi.INT_real, error=-1) +def _PyString_Resize(space, ref, newsize): + """A way to resize a string object even though it is "immutable". Only use this to + build up a brand new string object; don't use this if the string may already be + known in other parts of the code. It is an error to call this function if the + refcount on the input string object is not one. Pass the address of an existing + string object as an lvalue (it may be written into), and the new size desired. + On success, *string holds the resized string object and 0 is returned; + the address in *string may differ from its input value. If the reallocation + fails, the original string object at *string is deallocated, *string is + set to NULL, a memory exception is set, and -1 is returned. + """ + # XXX always create a new string so far + py_str = rffi.cast(PyStringObject, ref[0]) + if not py_str.c_buffer: + raise OperationError(space.w_SystemError, space.wrap( + "_PyString_Resize called on already created string")) + try: + py_newstr = new_empty_str(space, newsize) + except MemoryError: + Py_DecRef(space, ref[0]) + ref[0] = lltype.nullptr(PyObject.TO) + raise + to_cp = newsize + oldsize = py_str.c_size + if oldsize < newsize: + to_cp = oldsize + for i in range(to_cp): + py_newstr.c_buffer[i] = py_str.c_buffer[i] + Py_DecRef(space, ref[0]) + ref[0] = rffi.cast(PyObject, py_newstr) + return 0 + + at cpython_api([PyObject, PyObject], rffi.INT, error=CANNOT_FAIL) +def _PyString_Eq(space, w_str1, w_str2): + return space.eq_w(w_str1, w_str2) + + at cpython_api([PyObjectP, PyObject], lltype.Void) +def PyString_Concat(space, ref, w_newpart): + """Create a new string object in *string containing the contents of newpart + appended to string; the caller will own the new reference. The reference to + the old value of string will be stolen. If the new string cannot be created, + the old reference to string will still be discarded and the value of + *string will be set to NULL; the appropriate exception will be set.""" + + if not ref[0]: + return + + if w_newpart is None or not PyString_Check(space, ref[0]) or \ + not PyString_Check(space, w_newpart): + Py_DecRef(space, ref[0]) + ref[0] = lltype.nullptr(PyObject.TO) + return + w_str = from_ref(space, ref[0]) + w_newstr = space.add(w_str, w_newpart) + Py_DecRef(space, ref[0]) + ref[0] = make_ref(space, w_newstr) + + at cpython_api([PyObjectP, PyObject], lltype.Void) +def PyString_ConcatAndDel(space, ref, newpart): + """Create a new string object in *string containing the contents of newpart + appended to string. This version decrements the reference count of newpart.""" + PyString_Concat(space, ref, newpart) + Py_DecRef(space, newpart) + + at cpython_api([PyObject, PyObject], PyObject) +def PyString_Format(space, w_format, w_args): + """Return a new string object from format and args. Analogous to format % + args. The args argument must be a tuple.""" + return space.mod(w_format, w_args) + + at cpython_api([CONST_STRING], PyObject) +def PyString_InternFromString(space, string): + """A combination of PyString_FromString() and + PyString_InternInPlace(), returning either a new string object that has + been interned, or a new ("owned") reference to an earlier interned string + object with the same value.""" + s = rffi.charp2str(string) + return space.new_interned_str(s) + + at cpython_api([PyObjectP], lltype.Void) +def PyString_InternInPlace(space, string): + """Intern the argument *string in place. The argument must be the + address of a pointer variable pointing to a Python string object. + If there is an existing interned string that is the same as + *string, it sets *string to it (decrementing the reference count + of the old string object and incrementing the reference count of + the interned string object), otherwise it leaves *string alone and + interns it (incrementing its reference count). (Clarification: + even though there is a lot of talk about reference counts, think + of this function as reference-count-neutral; you own the object + after the call if and only if you owned it before the call.) + + This function is not available in 3.x and does not have a PyBytes + alias.""" + w_str = from_ref(space, string[0]) + w_str = space.new_interned_w_str(w_str) + Py_DecRef(space, string[0]) + string[0] = make_ref(space, w_str) + + at cpython_api([PyObject, CONST_STRING, CONST_STRING], PyObject) +def PyString_AsEncodedObject(space, w_str, encoding, errors): + """Encode a string object using the codec registered for encoding and return + the result as Python object. encoding and errors have the same meaning as + the parameters of the same name in the string encode() method. The codec to + be used is looked up using the Python codec registry. Return NULL if an + exception was raised by the codec. + + This function is not available in 3.x and does not have a PyBytes alias.""" + if not PyString_Check(space, w_str): + PyErr_BadArgument(space) + + w_encoding = w_errors = None + if encoding: + w_encoding = space.wrap(rffi.charp2str(encoding)) + if errors: + w_errors = space.wrap(rffi.charp2str(errors)) + return space.call_method(w_str, 'encode', w_encoding, w_errors) + + at cpython_api([PyObject, CONST_STRING, CONST_STRING], PyObject) +def PyString_AsDecodedObject(space, w_str, encoding, errors): + """Decode a string object by passing it to the codec registered + for encoding and return the result as Python object. encoding and + errors have the same meaning as the parameters of the same name in + the string encode() method. The codec to be used is looked up + using the Python codec registry. Return NULL if an exception was + raised by the codec. + + This function is not available in 3.x and does not have a PyBytes alias.""" + if not PyString_Check(space, w_str): + PyErr_BadArgument(space) + + w_encoding = w_errors = None + if encoding: + w_encoding = space.wrap(rffi.charp2str(encoding)) + if errors: + w_errors = space.wrap(rffi.charp2str(errors)) + return space.call_method(w_str, "decode", w_encoding, w_errors) + + at cpython_api([PyObject, PyObject], PyObject) +def _PyString_Join(space, w_sep, w_seq): + return space.call_method(w_sep, 'join', w_seq) diff --git a/pypy/module/cpyext/complexobject.py b/pypy/module/cpyext/complexobject.py --- a/pypy/module/cpyext/complexobject.py +++ b/pypy/module/cpyext/complexobject.py @@ -43,7 +43,7 @@ # lltype does not handle functions returning a structure. This implements a # helper function, which takes as argument a reference to the return value. - at cpython_api([PyObject, Py_complex_ptr], lltype.Void) + at cpython_api([PyObject, Py_complex_ptr], rffi.INT_real, error=-1) def _PyComplex_AsCComplex(space, w_obj, result): """Return the Py_complex value of the complex number op. @@ -60,7 +60,7 @@ # if the above did not work, interpret obj as a float giving the # real part of the result, and fill in the imaginary part as 0. result.c_real = PyFloat_AsDouble(space, w_obj) # -1 on failure - return + return 0 if not PyComplex_Check(space, w_obj): raise OperationError(space.w_TypeError, space.wrap( @@ -69,3 +69,4 @@ assert isinstance(w_obj, W_ComplexObject) result.c_real = w_obj.realval result.c_imag = w_obj.imagval + return 0 diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -2,8 +2,7 @@ from pypy.module.cpyext.api import ( cpython_api, CANNOT_FAIL, build_type_checkers, Py_ssize_t, Py_ssize_tP, CONST_STRING) -from pypy.module.cpyext.pyobject import PyObject, PyObjectP, borrow_from -from pypy.module.cpyext.pyobject import RefcountState +from pypy.module.cpyext.pyobject import PyObject, PyObjectP, as_pyobj from pypy.module.cpyext.pyerrors import PyErr_BadInternalCall from pypy.interpreter.error import OperationError from rpython.rlib.objectmodel import specialize @@ -14,13 +13,17 @@ PyDict_Check, PyDict_CheckExact = build_type_checkers("Dict") - at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL) + at cpython_api([PyObject, PyObject], PyObject, error=CANNOT_FAIL, + result_borrowed=True) def PyDict_GetItem(space, w_dict, w_key): try: w_res = space.getitem(w_dict, w_key) except: return None - return borrow_from(w_dict, w_res) + # NOTE: this works so far because all our dict strategies store + # *values* as full objects, which stay alive as long as the dict is + # alive and not modified. So we can return a borrowed ref. + return w_res @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) From pypy.commits at gmail.com Thu Feb 25 05:01:18 2016 From: pypy.commits at gmail.com (mjacob) Date: Thu, 25 Feb 2016 02:01:18 -0800 (PST) Subject: [pypy-commit] pypy default: Add app level tests for sys.dont_write_bytecode. Message-ID: <56ced0ee.8a921c0a.fd536.ffffb544@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82493:423372b7d89b Date: 2016-02-25 10:23 +0100 http://bitbucket.org/pypy/pypy/changeset/423372b7d89b/ Log: Add app level tests for sys.dont_write_bytecode. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -98,6 +98,10 @@ 'a=5\nb=6\rc="""hello\r\nworld"""\r', mode='wb') p.join('mod.py').write( 'a=15\nb=16\rc="""foo\r\nbar"""\r', mode='wb') + setuppkg("test_bytecode", + a = '', + b = '', + c = '') # create compiled/x.py and a corresponding pyc file p = setuppkg("compiled", x = "x = 84") @@ -1342,6 +1346,36 @@ assert isinstance(importer, zipimport.zipimporter) +class AppTestWriteBytecode(object): + def setup(cls): + cls.saved_modules = _setup(cls.space) + + def teardown(cls): + _teardown(cls.space, cls.saved_modules) + + def test_default(self): + import os.path + from test_bytecode import a + assert a.__file__.endswith('a.py') + assert os.path.exists(a.__file__ + 'c') + + def test_write_bytecode(self): + import os.path + import sys + sys.dont_write_bytecode = False + from test_bytecode import b + assert b.__file__.endswith('b.py') + assert os.path.exists(b.__file__ + 'c') + + def test_dont_write_bytecode(self): + import os.path + import sys + sys.dont_write_bytecode = True + from test_bytecode import c + assert c.__file__.endswith('c.py') + assert not os.path.exists(c.__file__ + 'c') + + class AppTestNoPycFile(object): spaceconfig = { "objspace.usepycfiles": False, From pypy.commits at gmail.com Thu Feb 25 06:43:08 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 25 Feb 2016 03:43:08 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (remi, plan_rich) missing changes that should have been commited earlier? Message-ID: <56cee8cc.8ab71c0a.797c2.ffffe0c4@mx.google.com> Author: Richard Plangger Branch: fix-longevity Changeset: r82494:dad960bd604f Date: 2016-02-25 12:42 +0100 http://bitbucket.org/pypy/pypy/changeset/dad960bd604f/ Log: (remi, plan_rich) missing changes that should have been commited earlier? diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -276,6 +276,64 @@ save_around_call_regs = [] frame_reg = None + free_callee_regs = [reg for reg in all_reg if reg not in save_around_call_regs] + free_caller_regs = save_around_call_regs[:] + is_callee_lookup = [True] * len(all_regs) + for reg in save_around_call_regs: + is_callee_lookup[reg.index] = False + + def get_lower_byte_free_register(self, reg): + # try to return a volatile register first! + for i, caller in enumerate(self.free_caller_regs): + if caller not in self.no_lower_byte_regs: + del self.free_caller_regs[i] + return caller + # in any case, we might want to try callee ones as well + for i, callee in enumerate(self.free_callee_regs): + if callee not in self.no_lower_byte_regs: + del self.free_callee_regs[i] + return callee + return None + + def get_free_register(self, var, callee=False, target_reg=None): + if callee: + target_pool = self.free_callee_regs + second_pool = self.free_caller_regs + else: + target_pool = self.free_caller_regs + second_pool = self.free_callee_regs + if target_pool: + return target_pool.pop() + if second_pool: + return second_pool.pop() + assert 0, "not free register, check this before calling" + + def has_free_registers(self): + return self.free_callee_regs or self.free_caller_regs + + def allocate_new(self, var): + if self.live_ranges.survives_call(var, self.position): + # we want a callee save register + return self.get_free_register(var, callee=True) + else: + return self.get_free_register(var, callee=False, target_reg=None) + + def remove_free_register(self, reg): + if is_callee_lookup[reg.index]: + self.free_callee_regs = [fr for fr in self.free_callee_regs if fr is not r] + else: + self.free_caller_regs = [fr for fr in self.free_caller_regs if fr is not r] + + def put_back_register(self, reg): + if is_callee_lookup[reg.index]: + self.free_callee_regs.push(reg) + else: + self.free_caller_regs.push(reg) + + def is_free(self, reg): + return reg in self.free_callee_regs or \ + reg in self.free_caller_regs + def __init__(self, live_ranges, frame_manager=None, assembler=None): self.free_regs = self.all_regs[:] self.free_regs.reverse() @@ -317,7 +375,7 @@ return if not self.live_ranges.exists(v) or self.live_ranges.last_use(v) <= self.position: if v in self.reg_bindings: - self.free_regs.append(self.reg_bindings[v]) + self.put_back_register(self.reg_bindings[v]) del self.reg_bindings[v] if self.frame_manager is not None: self.frame_manager.mark_as_free(v) @@ -337,17 +395,20 @@ self.temp_boxes = [] def _check_invariants(self): + free_count = len(self.free_callee_regs) + len(self.free_caller_regs) if not we_are_translated(): # make sure no duplicates assert len(dict.fromkeys(self.reg_bindings.values())) == len(self.reg_bindings) rev_regs = dict.fromkeys(self.reg_bindings.values()) - for reg in self.free_regs: + for reg in self.free_caller_regs: assert reg not in rev_regs - assert len(rev_regs) + len(self.free_regs) == len(self.all_regs) + for reg in self.free_callee_regs: + assert reg not in rev_regs + assert len(rev_regs) + free_count == len(self.all_regs) else: - assert len(self.reg_bindings) + len(self.free_regs) == len(self.all_regs) + assert len(self.reg_bindings) + free_count == len(self.all_regs) assert len(self.temp_boxes) == 0 - if self.live_ranges: + if self.live_ranges.longevity: for v in self.reg_bindings: assert self.live_ranges.last_use(v) > self.position @@ -368,32 +429,30 @@ return res else: del self.reg_bindings[v] - self.free_regs.append(res) - if selected_reg in self.free_regs: - self.free_regs = [reg for reg in self.free_regs - if reg is not selected_reg] + self.put_back_register(res) + if self.is_free(selected_reg): + self.remove_free_register(selected_reg) self.reg_bindings[v] = selected_reg return selected_reg return None + if need_lower_byte: loc = self.reg_bindings.get(v, None) if loc is not None and loc not in self.no_lower_byte_regs: + # yes, this location is a no_lower_byte_register return loc - for i in range(len(self.free_regs) - 1, -1, -1): - reg = self.free_regs[i] - if reg not in self.no_lower_byte_regs: - if loc is not None: - self.free_regs[i] = loc - else: - del self.free_regs[i] - self.reg_bindings[v] = reg - return reg - return None + # find a free register that is also a lower byte register + if loc: + self.put_back_register(loc) + reg = self.get_lower_byte_free_register(v) + self.reg_bindings[v] = reg + return reg + try: return self.reg_bindings[v] except KeyError: - if self.free_regs: - loc = self.free_regs.pop() + if self.has_free_registers(): + loc = self.allocate_new(v) self.reg_bindings[v] = loc return loc @@ -453,7 +512,7 @@ need_lower_byte=need_lower_byte) prev_loc = self.reg_bindings.get(v, None) if prev_loc is not None: - self.free_regs.append(prev_loc) + self.put_back_register(prev_loc) self.reg_bindings[v] = loc return loc @@ -466,7 +525,7 @@ try: loc = self.reg_bindings[var] del self.reg_bindings[var] - self.free_regs.append(loc) + self.put_back_register(loc) except KeyError: pass # 'var' is already not in a register @@ -493,11 +552,11 @@ assert isinstance(v, Const) immloc = self.convert_to_imm(v) if selected_reg: - if selected_reg in self.free_regs: + if self.is_free(selected_reg): self.assembler.regalloc_mov(immloc, selected_reg) return selected_reg loc = self._spill_var(v, forbidden_vars, selected_reg) - self.free_regs.append(loc) + self.put_back_register(loc) self.assembler.regalloc_mov(immloc, loc) return loc return immloc @@ -526,8 +585,8 @@ self.reg_bindings[to_v] = reg def _move_variable_away(self, v, prev_loc): - if self.free_regs: - loc = self.free_regs.pop() + if self.has_free_registers(): + loc = self.allocate_new(v) self.reg_bindings[v] = loc self.assembler.regalloc_mov(prev_loc, loc) else: @@ -542,8 +601,8 @@ self._check_type(result_v) self._check_type(v) if isinstance(v, Const): - if self.free_regs: - loc = self.free_regs.pop() + if self.has_free_registers(): + loc = self.allocate_new(v) else: loc = self._spill_var(v, forbidden_vars, None) self.assembler.regalloc_mov(self.convert_to_imm(v), loc) @@ -559,7 +618,7 @@ # store result in the same place loc = self.reg_bindings[v] del self.reg_bindings[v] - if self.frame_manager.get(v) is None or self.free_regs: + if self.frame_manager.get(v) is None or self.has_free_registers(): self._move_variable_away(v, loc) self.reg_bindings[result_v] = loc @@ -586,7 +645,7 @@ if v not in force_store and self.live_ranges.last_use(v) <= self.position: # variable dies del self.reg_bindings[v] - self.free_regs.append(reg) + self.put_back_register(reg) continue if save_all_regs != 1 and reg not in self.save_around_call_regs: if save_all_regs == 0: @@ -595,7 +654,7 @@ continue # only save GC pointers self._sync_var(v) del self.reg_bindings[v] - self.free_regs.append(reg) + self.put_back_register(reg) def after_call(self, v): """ Adjust registers according to the result of the call, @@ -606,7 +665,7 @@ if not we_are_translated(): assert r not in self.reg_bindings.values() self.reg_bindings[v] = r - self.free_regs = [fr for fr in self.free_regs if fr is not r] + self.remove_free_register(r) return r # abstract methods, override @@ -638,8 +697,7 @@ assert not isinstance(box, Const) loc = self.fm.get_new_loc(box) locs.append(loc.value - base_ofs) - if looptoken.compiled_loop_token is not None: - # for tests + if looptoken.compiled_loop_token is not None: # <- for tests looptoken.compiled_loop_token._ll_initial_locs = locs def next_op_can_accept_cc(self, operations, i): @@ -687,6 +745,15 @@ def new_live_range(self, var, start, end): self.longevity[var] = (start, end) + def survives_call(self, var, position): + start, end = self.longevity[var] + dist = self.dist_to_next_call[position] + assert end >= position + if end-position <= dist: + # it is 'live during a call' if it live range ends after the call + return True + return False + def compute_var_live_ranges(inputargs, operations): # compute a dictionary that maps variables to index in # operations that is a "last-time-seen" From pypy.commits at gmail.com Thu Feb 25 06:45:55 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 03:45:55 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) fix trivial errors Message-ID: <56cee973.cf0b1c0a.39176.ffffe1bf@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82495:4d5d168ea4e5 Date: 2016-02-25 12:45 +0100 http://bitbucket.org/pypy/pypy/changeset/4d5d168ea4e5/ Log: (plan_rich,remi) fix trivial errors diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -276,7 +276,7 @@ save_around_call_regs = [] frame_reg = None - free_callee_regs = [reg for reg in all_reg if reg not in save_around_call_regs] + free_callee_regs = [reg for reg in all_regs if reg not in save_around_call_regs] free_caller_regs = save_around_call_regs[:] is_callee_lookup = [True] * len(all_regs) for reg in save_around_call_regs: @@ -319,13 +319,13 @@ return self.get_free_register(var, callee=False, target_reg=None) def remove_free_register(self, reg): - if is_callee_lookup[reg.index]: - self.free_callee_regs = [fr for fr in self.free_callee_regs if fr is not r] + if self.is_callee_lookup[reg.index]: + self.free_callee_regs = [fr for fr in self.free_callee_regs if fr is not reg] else: - self.free_caller_regs = [fr for fr in self.free_caller_regs if fr is not r] + self.free_caller_regs = [fr for fr in self.free_caller_regs if fr is not reg] def put_back_register(self, reg): - if is_callee_lookup[reg.index]: + if self.is_callee_lookup[reg.index]: self.free_callee_regs.push(reg) else: self.free_caller_regs.push(reg) @@ -752,7 +752,7 @@ if end-position <= dist: # it is 'live during a call' if it live range ends after the call return True - return False + return False def compute_var_live_ranges(inputargs, operations): # compute a dictionary that maps variables to index in From pypy.commits at gmail.com Thu Feb 25 07:12:02 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 25 Feb 2016 04:12:02 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (remi, plan_rich) fixed all broken tests because of our refactoring Message-ID: <56ceef92.ca56c20a.b7719.7fa8@mx.google.com> Author: Richard Plangger Branch: fix-longevity Changeset: r82496:1b6e563e6cb0 Date: 2016-02-25 13:11 +0100 http://bitbucket.org/pypy/pypy/changeset/1b6e563e6cb0/ Log: (remi, plan_rich) fixed all broken tests because of our refactoring diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -276,11 +276,9 @@ save_around_call_regs = [] frame_reg = None - free_callee_regs = [reg for reg in all_regs if reg not in save_around_call_regs] - free_caller_regs = save_around_call_regs[:] - is_callee_lookup = [True] * len(all_regs) - for reg in save_around_call_regs: - is_callee_lookup[reg.index] = False + free_callee_regs = [] + free_caller_regs = [] + is_callee_lookup = None def get_lower_byte_free_register(self, reg): # try to return a volatile register first! @@ -326,17 +324,24 @@ def put_back_register(self, reg): if self.is_callee_lookup[reg.index]: - self.free_callee_regs.push(reg) + self.free_callee_regs.append(reg) else: - self.free_caller_regs.push(reg) + self.free_caller_regs.append(reg) + + def free_register_count(self): + return len(self.free_callee_regs) + len(self.free_caller_regs) def is_free(self, reg): return reg in self.free_callee_regs or \ reg in self.free_caller_regs def __init__(self, live_ranges, frame_manager=None, assembler=None): - self.free_regs = self.all_regs[:] - self.free_regs.reverse() + self.free_callee_regs = [reg for reg in self.all_regs if reg not in self.save_around_call_regs] + self.free_caller_regs = self.save_around_call_regs[:] + self.is_callee_lookup = [True] * len(self.all_regs) + for reg in self.save_around_call_regs: + self.is_callee_lookup[reg.index] = False + self.live_ranges = live_ranges self.temp_boxes = [] if not we_are_translated(): @@ -395,7 +400,7 @@ self.temp_boxes = [] def _check_invariants(self): - free_count = len(self.free_callee_regs) + len(self.free_caller_regs) + free_count = self.free_register_count() if not we_are_translated(): # make sure no duplicates assert len(dict.fromkeys(self.reg_bindings.values())) == len(self.reg_bindings) @@ -442,11 +447,15 @@ # yes, this location is a no_lower_byte_register return loc # find a free register that is also a lower byte register - if loc: - self.put_back_register(loc) + if not self.has_free_registers(): + return None reg = self.get_lower_byte_free_register(v) - self.reg_bindings[v] = reg - return reg + if reg is not None: + if loc: + self.put_back_register(loc) + self.reg_bindings[v] = reg + return reg + return None try: return self.reg_bindings[v] @@ -737,22 +746,25 @@ self.dist_to_next_call = dist_to_next_call def exists(self, var): - return var in self.longevity + return var in self.longevity def last_use(self, var): - return self.longevity[var][1] + return self.longevity[var][1] def new_live_range(self, var, start, end): - self.longevity[var] = (start, end) + self.longevity[var] = (start, end) def survives_call(self, var, position): - start, end = self.longevity[var] - dist = self.dist_to_next_call[position] - assert end >= position - if end-position <= dist: - # it is 'live during a call' if it live range ends after the call - return True - return False + if not we_are_translated(): + if self.dist_to_next_call is None: + return False + start, end = self.longevity[var] + dist = self.dist_to_next_call[position] + assert end >= position + if end-position <= dist: + # it is 'live during a call' if it live range ends after the call + return True + return False def compute_var_live_ranges(inputargs, operations): # compute a dictionary that maps variables to index in diff --git a/rpython/jit/backend/llsupport/test/test_regalloc.py b/rpython/jit/backend/llsupport/test/test_regalloc.py --- a/rpython/jit/backend/llsupport/test/test_regalloc.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc.py @@ -23,9 +23,9 @@ class FakeReg(object): def __init__(self, i): - self.n = i + self.index = i def __repr__(self): - return 'r%d' % self.n + return 'r%d' % self.index r0, r1, r2, r3 = [FakeReg(i) for i in range(4)] regs = [r0, r1, r2, r3] @@ -83,22 +83,22 @@ for b in b0, b1, b2: rm.try_allocate_reg(b) rm._check_invariants() - assert len(rm.free_regs) == 1 + assert rm.free_register_count() == 1 assert len(rm.reg_bindings) == 3 rm.possibly_free_vars([b0, b1, b2]) - assert len(rm.free_regs) == 1 + assert rm.free_register_count() == 1 assert len(rm.reg_bindings) == 3 rm._check_invariants() rm.next_instruction() rm.possibly_free_vars([b0, b1, b2]) rm._check_invariants() - assert len(rm.free_regs) == 2 + assert rm.free_register_count() == 2 assert len(rm.reg_bindings) == 2 rm._check_invariants() rm.next_instruction() rm.possibly_free_vars([b0, b1, b2]) rm._check_invariants() - assert len(rm.free_regs) == 4 + assert rm.free_register_count() == 4 assert len(rm.reg_bindings) == 0 def test_register_exhaustion(self): @@ -230,7 +230,7 @@ rm.next_instruction() for b in b0, b1, b2, b3: rm.force_allocate_reg(b) - assert not len(rm.free_regs) + assert not rm.has_free_registers() rm._check_invariants() rm.next_instruction() rm.force_result_in_reg(b4, b0) @@ -259,8 +259,8 @@ fm = TFrameManager() asm = MockAsm() rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, assembler=asm) - rm.free_regs = rm.free_regs[:1] - rm.all_regs = rm.free_regs[:] + rm.free_callee_regs = rm.free_callee_regs[:1] + rm.all_regs = rm.free_callee_regs[:] rm.next_instruction() fm.loc(b0) rm.force_result_in_reg(b1, b0) @@ -388,7 +388,7 @@ rm.next_instruction() for b in b0, b1, b2, b3: rm.force_allocate_reg(b) - assert len(rm.free_regs) == 0 + assert not rm.has_free_registers() rm.next_instruction() loc = rm.loc(b3) spilled = rm.force_allocate_reg(b4) From pypy.commits at gmail.com Thu Feb 25 07:22:27 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 25 Feb 2016 04:22:27 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (remi, plan_rich) renamed index property to value Message-ID: <56cef203.a185c20a.f07ce.ffff84b8@mx.google.com> Author: Richard Plangger Branch: fix-longevity Changeset: r82497:ed33bb43fa34 Date: 2016-02-25 13:21 +0100 http://bitbucket.org/pypy/pypy/changeset/ed33bb43fa34/ Log: (remi, plan_rich) renamed index property to value diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -317,13 +317,13 @@ return self.get_free_register(var, callee=False, target_reg=None) def remove_free_register(self, reg): - if self.is_callee_lookup[reg.index]: + if self.is_callee_lookup[reg.value]: self.free_callee_regs = [fr for fr in self.free_callee_regs if fr is not reg] else: self.free_caller_regs = [fr for fr in self.free_caller_regs if fr is not reg] def put_back_register(self, reg): - if self.is_callee_lookup[reg.index]: + if self.is_callee_lookup[reg.value]: self.free_callee_regs.append(reg) else: self.free_caller_regs.append(reg) @@ -340,7 +340,7 @@ self.free_caller_regs = self.save_around_call_regs[:] self.is_callee_lookup = [True] * len(self.all_regs) for reg in self.save_around_call_regs: - self.is_callee_lookup[reg.index] = False + self.is_callee_lookup[reg.value] = False self.live_ranges = live_ranges self.temp_boxes = [] diff --git a/rpython/jit/backend/llsupport/test/test_regalloc_call.py b/rpython/jit/backend/llsupport/test/test_regalloc_call.py --- a/rpython/jit/backend/llsupport/test/test_regalloc_call.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc_call.py @@ -11,12 +11,6 @@ from rpython.rtyper.annlowlevel import llhelper from rpython.jit.codewriter.effectinfo import EffectInfo -class FakeReg(object): - def __init__(self, i): - self.n = i - def __repr__(self): - return 'r%d' % self.n - eax, ecx, edx, ebx, esp, ebp, esi, edi, r8, r9, r10, r11, r12, r13, r14, r15 = REGLOCS caller_saved = [] callee_saved = [] From pypy.commits at gmail.com Thu Feb 25 07:43:10 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 04:43:10 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) small fixes and allow updating the free regs lists Message-ID: <56cef6de.ca56c20a.b7719.ffff8b7e@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82498:d6ea9d88da47 Date: 2016-02-25 13:42 +0100 http://bitbucket.org/pypy/pypy/changeset/d6ea9d88da47/ Log: (plan_rich,remi) small fixes and allow updating the free regs lists diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -310,12 +310,18 @@ return self.free_callee_regs or self.free_caller_regs def allocate_new(self, var): - if self.live_ranges.survives_call(var, self.position): + if self.live_ranges.exists(var) and self.live_ranges.survives_call(var, self.position): # we want a callee save register return self.get_free_register(var, callee=True) else: return self.get_free_register(var, callee=False, target_reg=None) + def update_free_registers(self, regs_in_use): + # XXX: slow? + self._reset_free_regs() + for r in regs_in_use: + self.remove_free_register(r) + def remove_free_register(self, reg): if self.is_callee_lookup[reg.value]: self.free_callee_regs = [fr for fr in self.free_callee_regs if fr is not reg] @@ -335,9 +341,13 @@ return reg in self.free_callee_regs or \ reg in self.free_caller_regs + def _reset_free_regs(self): + self.free_callee_regs = [reg for reg in self.all_regs + if reg not in self.save_around_call_regs] + self.free_caller_regs = self.save_around_call_regs[:] + def __init__(self, live_ranges, frame_manager=None, assembler=None): - self.free_callee_regs = [reg for reg in self.all_regs if reg not in self.save_around_call_regs] - self.free_caller_regs = self.save_around_call_regs[:] + self._reset_free_regs() self.is_callee_lookup = [True] * len(self.all_regs) for reg in self.save_around_call_regs: self.is_callee_lookup[reg.value] = False diff --git a/rpython/jit/backend/llsupport/test/test_regalloc.py b/rpython/jit/backend/llsupport/test/test_regalloc.py --- a/rpython/jit/backend/llsupport/test/test_regalloc.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc.py @@ -23,9 +23,9 @@ class FakeReg(object): def __init__(self, i): - self.index = i + self.value = i def __repr__(self): - return 'r%d' % self.index + return 'r%d' % self.value r0, r1, r2, r3 = [FakeReg(i) for i in range(4)] regs = [r0, r1, r2, r3] diff --git a/rpython/jit/backend/x86/regalloc.py b/rpython/jit/backend/x86/regalloc.py --- a/rpython/jit/backend/x86/regalloc.py +++ b/rpython/jit/backend/x86/regalloc.py @@ -266,11 +266,13 @@ else: return self.xrm.make_sure_var_in_reg(var, forbidden_vars) - def _update_bindings(self, locs, inputargs): + def _update_bindings(self, arglocs, inputargs): # XXX this should probably go to llsupport/regalloc.py - used = {} + used = set() i = 0 - for loc in locs: + # manually set the register and frame bindings for + # all inputargs (for a bridge) + for loc in arglocs: if loc is None: # xxx bit kludgy loc = ebp arg = inputargs[i] @@ -278,23 +280,18 @@ if isinstance(loc, RegLoc): if arg.type == FLOAT: self.xrm.reg_bindings[arg] = loc - used[loc] = None + used.add(loc) else: if loc is ebp: self.rm.bindings_to_frame_reg[arg] = None else: self.rm.reg_bindings[arg] = loc - used[loc] = None + used.add(loc) else: self.fm.bind(arg, loc) - self.rm.free_regs = [] - for reg in self.rm.all_regs: - if reg not in used: - self.rm.free_regs.append(reg) - self.xrm.free_regs = [] - for reg in self.xrm.all_regs: - if reg not in used: - self.xrm.free_regs.append(reg) + # + self.rm.update_free_registers(used) + self.xrm.update_free_registers(used) self.possibly_free_vars(list(inputargs)) self.fm.finish_binding() self.rm._check_invariants() From pypy.commits at gmail.com Thu Feb 25 07:57:20 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 04:57:20 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) support changing of register set for tests Message-ID: <56cefa30.e7bec20a.c67fb.ffff9698@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82499:851789a1560c Date: 2016-02-25 13:56 +0100 http://bitbucket.org/pypy/pypy/changeset/851789a1560c/ Log: (plan_rich,remi) support changing of register set for tests diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -318,7 +318,7 @@ def update_free_registers(self, regs_in_use): # XXX: slow? - self._reset_free_regs() + self._reinit_free_regs() for r in regs_in_use: self.remove_free_register(r) @@ -341,16 +341,22 @@ return reg in self.free_callee_regs or \ reg in self.free_caller_regs - def _reset_free_regs(self): + def _reinit_free_regs(self): self.free_callee_regs = [reg for reg in self.all_regs if reg not in self.save_around_call_regs] self.free_caller_regs = self.save_around_call_regs[:] + def _change_regs(self, all_regs, save_around_call_regs): + self.all_regs = all_regs + self.save_around_call_regs = save_around_call_regs + self._reinit_free_regs() + self.is_callee_lookup = [True] * max( + [r.value + 1 for r in self.all_regs]) + for reg in save_around_call_regs: + self.is_callee_lookup[reg.value] = False + def __init__(self, live_ranges, frame_manager=None, assembler=None): - self._reset_free_regs() - self.is_callee_lookup = [True] * len(self.all_regs) - for reg in self.save_around_call_regs: - self.is_callee_lookup[reg.value] = False + self._change_regs(self.all_regs, self.save_around_call_regs) self.live_ranges = live_ranges self.temp_boxes = [] diff --git a/rpython/jit/backend/llsupport/test/test_regalloc_call.py b/rpython/jit/backend/llsupport/test/test_regalloc_call.py --- a/rpython/jit/backend/llsupport/test/test_regalloc_call.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc_call.py @@ -117,15 +117,15 @@ self.regalloc.rm.reg_bindings[var] = reg # instead of having all machine registers, we want only to provide some - fr = self.regalloc.free_regs + self.regalloc.rm._change_regs(self.regalloc.all_regs, + self.regalloc.caller_saved) if free_regs is None: - self.regalloc.rm.free_regs = [reg for reg in fr - if reg not in self.initial_binding.values()] + self.regalloc.rm.update_free_registers( + self.initial_binding.values()) else: - self.regalloc.rm.free_regs = free_regs - self.regalloc.rm.all_regs = self.regalloc.all_regs - self.regalloc.rm.save_around_call_regs = self.regalloc.caller_saved - + self.regalloc.rm.update_free_registers( + set(self.regalloc.all_regs) - set(free_regs)) + self.regalloc.rm._check_invariants() # invoke the allocator! self.regalloc.walk_operations(inputargs, operations) From pypy.commits at gmail.com Thu Feb 25 08:04:05 2016 From: pypy.commits at gmail.com (mjacob) Date: Thu, 25 Feb 2016 05:04:05 -0800 (PST) Subject: [pypy-commit] pypy default: Try to make import tests more independent from each other. Message-ID: <56cefbc5.654fc20a.b76fd.ffff92f0@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82501:34a4d2b7e371 Date: 2016-02-25 14:04 +0100 http://bitbucket.org/pypy/pypy/changeset/34a4d2b7e371/ Log: Try to make import tests more independent from each other. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -150,6 +150,8 @@ """) def _teardown(space, w_saved_modules): + p = udir.join('impsubdir') + p.remove() space.appexec([w_saved_modules], """ ((saved_path, saved_modules)): import sys From pypy.commits at gmail.com Thu Feb 25 08:04:03 2016 From: pypy.commits at gmail.com (mjacob) Date: Thu, 25 Feb 2016 05:04:03 -0800 (PST) Subject: [pypy-commit] pypy default: Reset sys.dont_write_bytecode because apparently the tests are not independent from each other. Message-ID: <56cefbc3.d30e1c0a.b6372.0141@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82500:b94753e4f5cc Date: 2016-02-25 13:25 +0100 http://bitbucket.org/pypy/pypy/changeset/b94753e4f5cc/ Log: Reset sys.dont_write_bytecode because apparently the tests are not independent from each other. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -1347,11 +1347,16 @@ class AppTestWriteBytecode(object): - def setup(cls): + def setup_class(cls): cls.saved_modules = _setup(cls.space) - def teardown(cls): + def teardown_class(cls): _teardown(cls.space, cls.saved_modules) + cls.space.appexec([], """ + (): + import sys + sys.dont_write_bytecode = False + """) def test_default(self): import os.path From pypy.commits at gmail.com Thu Feb 25 08:34:54 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 05:34:54 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) faster update_free_registers Message-ID: <56cf02fe.4577c20a.7d582.ffffa156@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82502:3271c1578a13 Date: 2016-02-25 14:34 +0100 http://bitbucket.org/pypy/pypy/changeset/3271c1578a13/ Log: (plan_rich,remi) faster update_free_registers diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -317,7 +317,6 @@ return self.get_free_register(var, callee=False, target_reg=None) def update_free_registers(self, regs_in_use): - # XXX: slow? self._reinit_free_regs() for r in regs_in_use: self.remove_free_register(r) @@ -342,17 +341,18 @@ reg in self.free_caller_regs def _reinit_free_regs(self): - self.free_callee_regs = [reg for reg in self.all_regs - if reg not in self.save_around_call_regs] + self.free_callee_regs = self.save_in_callee_regs[:] self.free_caller_regs = self.save_around_call_regs[:] def _change_regs(self, all_regs, save_around_call_regs): self.all_regs = all_regs self.save_around_call_regs = save_around_call_regs + self.save_in_callee_regs = [reg for reg in all_regs + if reg not in save_around_call_regs] self._reinit_free_regs() self.is_callee_lookup = [True] * max( [r.value + 1 for r in self.all_regs]) - for reg in save_around_call_regs: + for reg in self.save_around_call_regs: self.is_callee_lookup[reg.value] = False def __init__(self, live_ranges, frame_manager=None, assembler=None): From pypy.commits at gmail.com Thu Feb 25 08:37:06 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 25 Feb 2016 05:37:06 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: (matti, arigo, ronan around) Message-ID: <56cf0382.11301c0a.3d95a.08e7@mx.google.com> Author: Armin Rigo Branch: cpyext-ext Changeset: r82503:1dfe3b071dc6 Date: 2016-02-25 14:35 +0100 http://bitbucket.org/pypy/pypy/changeset/1dfe3b071dc6/ Log: (matti, arigo, ronan around) Test and fix: initialize ob_pypy_link correctly to 0 diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py --- a/pypy/module/cpyext/object.py +++ b/pypy/module/cpyext/object.py @@ -16,8 +16,9 @@ @cpython_api([Py_ssize_t], rffi.VOIDP) def PyObject_Malloc(space, size): + # returns non-zero-initialized memory, like CPython return lltype.malloc(rffi.VOIDP.TO, size, - flavor='raw', zero=True) + flavor='raw') @cpython_api([rffi.VOIDP], lltype.Void) def PyObject_Free(space, ptr): @@ -189,6 +190,7 @@ if not obj: PyErr_NoMemory(space) obj.c_ob_type = type + obj.c_ob_pypy_link = 0 obj.c_ob_refcnt = 1 return obj diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -53,6 +53,7 @@ flavor='raw', zero=True) pyobj = rffi.cast(PyObject, buf) pyobj.c_ob_refcnt = 1 + #pyobj.c_ob_pypy_link should get assigned very quickly pyobj.c_ob_type = pytype return pyobj @@ -325,6 +326,7 @@ @cpython_api([PyObject], lltype.Void) def _Py_NewReference(space, obj): obj.c_ob_refcnt = 1 + # XXX is it always useful to create the W_Root object here? w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type)) assert isinstance(w_type, W_TypeObject) get_typedescr(w_type.layout.typedef).realize(space, obj) diff --git a/pypy/module/cpyext/test/test_object.py b/pypy/module/cpyext/test/test_object.py --- a/pypy/module/cpyext/test/test_object.py +++ b/pypy/module/cpyext/test/test_object.py @@ -217,6 +217,20 @@ AppTestCpythonExtensionBase.setup_class.im_func(cls) cls.w_tmpname = cls.space.wrap(str(py.test.ensuretemp("out", dir=0))) + def test_object_malloc(self): + module = self.import_extension('foo', [ + ("malloctest", "METH_NOARGS", + """ + PyObject *obj = PyObject_MALLOC(sizeof(PyIntObject)); + obj = PyObject_Init(obj, &PyInt_Type); + if (obj != NULL) + ((PyIntObject *)obj)->ob_ival = -424344; + return obj; + """)]) + x = module.malloctest() + assert type(x) is int + assert x == -424344 + def test_TypeCheck(self): module = self.import_extension('foo', [ ("typecheck", "METH_VARARGS", diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -506,6 +506,7 @@ flavor='raw', zero=True) pto = heaptype.c_ht_type pto.c_ob_refcnt = 1 + pto.c_ob_pypy_link = 0 pto.c_ob_type = metatype pto.c_tp_flags |= Py_TPFLAGS_HEAPTYPE pto.c_tp_as_number = heaptype.c_as_number From pypy.commits at gmail.com Thu Feb 25 08:39:12 2016 From: pypy.commits at gmail.com (mattip) Date: Thu, 25 Feb 2016 05:39:12 -0800 (PST) Subject: [pypy-commit] pypy default: start updating for bug-fix releases Message-ID: <56cf0400.d22e1c0a.af3a5.0886@mx.google.com> Author: mattip Branch: Changeset: r82504:2828af388188 Date: 2016-02-25 12:58 +0100 http://bitbucket.org/pypy/pypy/changeset/2828af388188/ Log: start updating for bug-fix releases diff --git a/pypy/doc/how-to-release.rst b/pypy/doc/how-to-release.rst --- a/pypy/doc/how-to-release.rst +++ b/pypy/doc/how-to-release.rst @@ -1,5 +1,20 @@ -Making a PyPy Release -===================== +The PyPy Release Process +======================== + +Release Policy +++++++++++++++ + +We try to create a stable release a few times a year. These are released on +a branch named like release-2.x or release-4.x, and each release is tagged, +for instance release-4.0.1. + +After release, inevitably there are bug fixes. It is the responsibility of +the commiter who fixes a bug to make sure this fix is on the release branch, +so that we can then create a tagged bug-fix release, which will hopefully +happen more often than stable releases. + +How to Create a PyPy Release +++++++++++++++++++++++++++++ Overview -------- From pypy.commits at gmail.com Thu Feb 25 09:03:50 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 06:03:50 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) test and fix for call_dists calculation Message-ID: <56cf09c6.a151c20a.cf0a8.ffffa90a@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82505:4e99dd1d0654 Date: 2016-02-25 15:03 +0100 http://bitbucket.org/pypy/pypy/changeset/4e99dd1d0654/ Log: (plan_rich,remi) test and fix for call_dists calculation diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -796,6 +796,9 @@ last_call_pos = -1 for i in range(len(operations)-1, -1, -1): op = operations[i] + if op.is_call(): + last_call_pos = i + dist_to_next_call[i] = last_call_pos - i if op.type != 'v': if op not in last_used and op.has_no_side_effect(): continue @@ -816,9 +819,6 @@ assert not isinstance(arg, Const) if arg not in last_used: last_used[arg] = i - if op.is_call(): - last_call_pos = i - dist_to_next_call[i] = last_call_pos - i # longevity = {} for i, arg in enumerate(operations): diff --git a/rpython/jit/backend/llsupport/test/test_regalloc_call.py b/rpython/jit/backend/llsupport/test/test_regalloc_call.py --- a/rpython/jit/backend/llsupport/test/test_regalloc_call.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc_call.py @@ -200,8 +200,9 @@ """, namespace=self.namespace) i1 = ops.operations[0] i2 = ops.operations[1] - trace_alloc = TraceAllocation(ops, [eax, edx], [r8, r9, r10], [eax, r10], tt) - trace_alloc.run_allocation([r8,r9,edx]) + trace_alloc = TraceAllocation(ops, [eax, edx, get_param(0)], + [r8, r9, r10], [eax, r10], tt) + trace_alloc.run_allocation() # we force the allocation to immediately take the first call parameter register # the new regalloc will not shuffle register binding around (other than spilling) # in the best case this will reduce a lot of movement diff --git a/rpython/jit/backend/llsupport/test/test_regalloc_integration.py b/rpython/jit/backend/llsupport/test/test_regalloc_integration.py --- a/rpython/jit/backend/llsupport/test/test_regalloc_integration.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc_integration.py @@ -190,6 +190,20 @@ assert lrs.dist_to_next_call == [3, 2, 1, 0, 1, 0, -7, -8] + def test_compute_call_distances2(self): + ops = ''' + [p0,i0] + i1 = int_add(i0,i0) + i2 = int_sub(i0,i1) + call_n(p0, i1, descr=raising_calldescr) + i3 = int_mul(i2,i0) + jump(p0,i2) + ''' + loop = self.parse(ops) + lrs = compute_var_live_ranges(loop.inputargs, loop.operations) + assert lrs.dist_to_next_call == [2, 1, 0, -4, -5] + + def test_simple_loop(self): ops = ''' [i0] From pypy.commits at gmail.com Thu Feb 25 09:43:54 2016 From: pypy.commits at gmail.com (mjacob) Date: Thu, 25 Feb 2016 06:43:54 -0800 (PST) Subject: [pypy-commit] pypy default: Remove objspace.usepycfiles option. Message-ID: <56cf132a.aa17c20a.8b68b.76e7@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82506:bc2f7f711496 Date: 2016-02-25 15:44 +0100 http://bitbucket.org/pypy/pypy/changeset/bc2f7f711496/ Log: Remove objspace.usepycfiles option. This option was needed for the sandbox feature, where it might not be allowed to write to the file system. Nowadays Python has a sys.dont_write_bytecode flag, which can be used for that. By default, this flags is `False`, unless when the sandbox feature is enabled; in this case it's set to `True`. diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -170,12 +170,8 @@ cmdline="--translationmodules", suggests=[("objspace.allworkingmodules", False)]), - BoolOption("usepycfiles", "Write and read pyc files when importing", - default=True), - BoolOption("lonepycfiles", "Import pyc files with no matching py file", - default=False, - requires=[("objspace.usepycfiles", True)]), + default=False), StrOption("soabi", "Tag to differentiate extension modules built for different Python interpreters", diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -277,7 +277,6 @@ if config.translation.sandbox: config.objspace.lonepycfiles = False - config.objspace.usepycfiles = False config.translating = True diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -85,7 +85,7 @@ # The "imp" module does not respect this, and is allowed to find # lone .pyc files. # check the .pyc file - if space.config.objspace.usepycfiles and space.config.objspace.lonepycfiles: + if space.config.objspace.lonepycfiles: pycfile = filepart + ".pyc" if file_exists(pycfile): # existing .pyc file @@ -888,17 +888,11 @@ """ w = space.wrap - if space.config.objspace.usepycfiles: - src_stat = os.fstat(fd) - cpathname = pathname + 'c' - mtime = int(src_stat[stat.ST_MTIME]) - mode = src_stat[stat.ST_MODE] - stream = check_compiled_module(space, cpathname, mtime) - else: - cpathname = None - mtime = 0 - mode = 0 - stream = None + src_stat = os.fstat(fd) + cpathname = pathname + 'c' + mtime = int(src_stat[stat.ST_MTIME]) + mode = src_stat[stat.ST_MODE] + stream = check_compiled_module(space, cpathname, mtime) if stream: # existing and up-to-date .pyc file @@ -913,7 +907,7 @@ else: code_w = parse_source_module(space, pathname, source) - if space.config.objspace.usepycfiles and write_pyc: + if write_pyc: if not space.is_true(space.sys.get('dont_write_bytecode')): write_compiled_module(space, code_w, cpathname, mode, mtime) diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -123,7 +123,7 @@ stream.try_to_find_file_descriptor()) finally: stream.close() - if space.config.objspace.usepycfiles: + if not space.config.translation.sandbox: # also create a lone .pyc file p.join('lone.pyc').write(p.join('x.pyc').read(mode='rb'), mode='wb') @@ -1349,8 +1349,14 @@ class AppTestWriteBytecode(object): + spaceconfig = { + "translation.sandbox": False + } + def setup_class(cls): cls.saved_modules = _setup(cls.space) + sandbox = cls.spaceconfig['translation.sandbox'] + cls.w_sandbox = cls.space.wrap(sandbox) def teardown_class(cls): _teardown(cls.space, cls.saved_modules) @@ -1364,7 +1370,7 @@ import os.path from test_bytecode import a assert a.__file__.endswith('a.py') - assert os.path.exists(a.__file__ + 'c') + assert os.path.exists(a.__file__ + 'c') == (not self.sandbox) def test_write_bytecode(self): import os.path @@ -1383,15 +1389,15 @@ assert not os.path.exists(c.__file__ + 'c') -class AppTestNoPycFile(object): +class AppTestWriteBytecodeSandbox(AppTestWriteBytecode): spaceconfig = { - "objspace.usepycfiles": False, - "objspace.lonepycfiles": False + "translation.sandbox": True } + + +class _AppTestLonePycFileBase(object): def setup_class(cls): - usepycfiles = cls.spaceconfig['objspace.usepycfiles'] lonepycfiles = cls.spaceconfig['objspace.lonepycfiles'] - cls.w_usepycfiles = cls.space.wrap(usepycfiles) cls.w_lonepycfiles = cls.space.wrap(lonepycfiles) cls.saved_modules = _setup(cls.space) @@ -1400,10 +1406,7 @@ def test_import_possibly_from_pyc(self): from compiled import x - if self.usepycfiles: - assert x.__file__.endswith('x.pyc') - else: - assert x.__file__.endswith('x.py') + assert x.__file__.endswith('x.pyc') try: from compiled import lone except ImportError: @@ -1412,15 +1415,13 @@ assert self.lonepycfiles, "should not have found 'lone.pyc'" assert lone.__file__.endswith('lone.pyc') -class AppTestNoLonePycFile(AppTestNoPycFile): +class AppTestNoLonePycFile(_AppTestLonePycFileBase): spaceconfig = { - "objspace.usepycfiles": True, "objspace.lonepycfiles": False } -class AppTestLonePycFile(AppTestNoPycFile): +class AppTestLonePycFile(_AppTestLonePycFileBase): spaceconfig = { - "objspace.usepycfiles": True, "objspace.lonepycfiles": True } diff --git a/pypy/module/sys/__init__.py b/pypy/module/sys/__init__.py --- a/pypy/module/sys/__init__.py +++ b/pypy/module/sys/__init__.py @@ -77,7 +77,7 @@ 'meta_path' : 'space.wrap([])', 'path_hooks' : 'space.wrap([])', 'path_importer_cache' : 'space.wrap({})', - 'dont_write_bytecode' : 'space.w_False', + 'dont_write_bytecode' : 'space.wrap(space.config.translation.sandbox)', 'getdefaultencoding' : 'interp_encoding.getdefaultencoding', 'setdefaultencoding' : 'interp_encoding.setdefaultencoding', From pypy.commits at gmail.com Thu Feb 25 10:00:54 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 25 Feb 2016 07:00:54 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (remi, plan_rich) forward with argument allocation for calls Message-ID: <56cf1726.46fac20a.5f8c7.ffffc031@mx.google.com> Author: Richard Plangger Branch: fix-longevity Changeset: r82507:5678f7fbd0b3 Date: 2016-02-25 15:49 +0100 http://bitbucket.org/pypy/pypy/changeset/5678f7fbd0b3/ Log: (remi, plan_rich) forward with argument allocation for calls diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -276,6 +276,7 @@ save_around_call_regs = [] frame_reg = None + # TODO would be good to keep free_caller_regs sorted (according to the ABI) free_callee_regs = [] free_caller_regs = [] is_callee_lookup = None @@ -300,6 +301,16 @@ else: target_pool = self.free_caller_regs second_pool = self.free_callee_regs + if target_reg is not None: + # try to allocate this regsiter to a special register + for i,reg in enumerate(target_pool): + if reg is target_reg: + del target_pool[i] + return reg + for i,reg in enumerate(second_pool): + if reg is target_reg: + del second_pool[i] + return reg if target_pool: return target_pool.pop() if second_pool: @@ -309,12 +320,25 @@ def has_free_registers(self): return self.free_callee_regs or self.free_caller_regs + def get_abi_param_register(self, i): + raise NotImplementedError + def allocate_new(self, var): if self.live_ranges.exists(var) and self.live_ranges.survives_call(var, self.position): # we want a callee save register return self.get_free_register(var, callee=True) else: - return self.get_free_register(var, callee=False, target_reg=None) + # if survives_call indicates that the live range ends at the call site + # we would like to allocate the register directly to the parameter + # register + + index = self.live_ranges.get_call_argument_index(var, self.position) + target_reg = None + if index != -1: + target_reg = self.get_abi_param_register(index) + + + return self.get_free_register(var, callee=False, target_reg=target_reg) def update_free_registers(self, regs_in_use): self._reinit_free_regs() @@ -641,13 +665,20 @@ if self.live_ranges.last_use(v) > self.position: # we need to find a new place for variable v and # store result in the same place + loc = self.reg_bindings[v] - del self.reg_bindings[v] - if self.frame_manager.get(v) is None or self.has_free_registers(): - self._move_variable_away(v, loc) + # only spill variable is allowed to reassign a new register to a live range + result_loc = self.force_allocate_reg(result_v, forbidden_vars=forbidden_vars) + self.assembler.regalloc_mov(loc, result_loc) + loc = result_loc - self.reg_bindings[result_v] = loc + #del self.reg_bindings[v] + #if self.frame_manager.get(v) is None or self.has_free_registers(): + # self._move_variable_away(v, loc) + + #self.reg_bindings[result_v] = loc else: + import pdb; pdb.set_trace() self._reallocate_from_to(v, result_v) loc = self.reg_bindings[result_v] return loc @@ -756,10 +787,11 @@ class LiveRanges(object): - def __init__(self, longevity, last_real_usage, dist_to_next_call): + def __init__(self, longevity, last_real_usage, dist_to_next_call, operations): self.longevity = longevity self.last_real_usage = last_real_usage self.dist_to_next_call = dist_to_next_call + self.operations = operations def exists(self, var): return var in self.longevity @@ -770,6 +802,14 @@ def new_live_range(self, var, start, end): self.longevity[var] = (start, end) + def get_call_argument_index(self, var, pos): + assert self.dist_to_next_call[pos] >= 0 + op = self.operations[pos + self.dist_to_next_call[pos]] + for i,arg in enumerate(op.getarglist()): + if arg is var: + return i-1 # first parameter is the functionh + return -1 + def survives_call(self, var, position): if not we_are_translated(): if self.dist_to_next_call is None: @@ -777,8 +817,8 @@ start, end = self.longevity[var] dist = self.dist_to_next_call[position] assert end >= position - if end-position <= dist: - # it is 'live during a call' if it live range ends after the call + if end-position < dist: + # the variable is used after the call instr return True return False @@ -796,6 +836,9 @@ last_call_pos = -1 for i in range(len(operations)-1, -1, -1): op = operations[i] + if op.is_call(): + last_call_pos = i + dist_to_next_call[i] = last_call_pos - i if op.type != 'v': if op not in last_used and op.has_no_side_effect(): continue @@ -816,9 +859,6 @@ assert not isinstance(arg, Const) if arg not in last_used: last_used[arg] = i - if op.is_call(): - last_call_pos = i - dist_to_next_call[i] = last_call_pos - i # longevity = {} for i, arg in enumerate(operations): @@ -846,7 +886,9 @@ assert arg in produced produced[op] = None - return LiveRanges(longevity, last_real_usage, dist_to_next_call) + lr = LiveRanges(longevity, last_real_usage, + dist_to_next_call, operations) + return lr def is_comparison_or_ovf_op(opnum): from rpython.jit.metainterp.resoperation import opclasses diff --git a/rpython/jit/backend/llsupport/test/test_regalloc_call.py b/rpython/jit/backend/llsupport/test/test_regalloc_call.py --- a/rpython/jit/backend/llsupport/test/test_regalloc_call.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc_call.py @@ -196,12 +196,13 @@ i2 = int_sub(i0,i1) call_n(p0, i1, descr=calldescr) i3 = int_mul(i2,i0) + guard_true(i3) [] jump(p0,i2) """, namespace=self.namespace) i1 = ops.operations[0] i2 = ops.operations[1] - trace_alloc = TraceAllocation(ops, [eax, edx], [r8, r9, r10], [eax, r10], tt) - trace_alloc.run_allocation([r8,r9,edx]) + trace_alloc = TraceAllocation(ops, [eax, edx, get_param(0)], [r8, r9, r12], [eax, r12], tt) + trace_alloc.run_allocation() # we force the allocation to immediately take the first call parameter register # the new regalloc will not shuffle register binding around (other than spilling) # in the best case this will reduce a lot of movement diff --git a/rpython/jit/backend/x86/regalloc.py b/rpython/jit/backend/x86/regalloc.py --- a/rpython/jit/backend/x86/regalloc.py +++ b/rpython/jit/backend/x86/regalloc.py @@ -58,7 +58,12 @@ all_regs = [ecx, eax, edx, ebx, esi, edi, r8, r9, r10, r12, r13, r14, r15] no_lower_byte_regs = [] - save_around_call_regs = [eax, ecx, edx, esi, edi, r8, r9, r10] + abi_param_regs = [edi, esi, ecx, r8, r9] + save_around_call_regs = abi_param_regs + [eax, edx, r10] + + def get_abi_param_register(self, i): + assert i >= 0 and i < len(self.abi_param_regs) + return self.abi_param_regs[i] class X86XMMRegisterManager(RegisterManager): box_types = [FLOAT, INT] # yes INT! From pypy.commits at gmail.com Thu Feb 25 10:00:56 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 25 Feb 2016 07:00:56 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: merged fix-longevity Message-ID: <56cf1728.11301c0a.3d95a.27c5@mx.google.com> Author: Richard Plangger Branch: fix-longevity Changeset: r82508:947d61d6c498 Date: 2016-02-25 15:51 +0100 http://bitbucket.org/pypy/pypy/changeset/947d61d6c498/ Log: merged fix-longevity diff --git a/rpython/jit/backend/llsupport/test/test_regalloc_integration.py b/rpython/jit/backend/llsupport/test/test_regalloc_integration.py --- a/rpython/jit/backend/llsupport/test/test_regalloc_integration.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc_integration.py @@ -190,6 +190,20 @@ assert lrs.dist_to_next_call == [3, 2, 1, 0, 1, 0, -7, -8] + def test_compute_call_distances2(self): + ops = ''' + [p0,i0] + i1 = int_add(i0,i0) + i2 = int_sub(i0,i1) + call_n(p0, i1, descr=raising_calldescr) + i3 = int_mul(i2,i0) + jump(p0,i2) + ''' + loop = self.parse(ops) + lrs = compute_var_live_ranges(loop.inputargs, loop.operations) + assert lrs.dist_to_next_call == [2, 1, 0, -4, -5] + + def test_simple_loop(self): ops = ''' [i0] From pypy.commits at gmail.com Thu Feb 25 10:17:43 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 07:17:43 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) fix and test for call survival condition Message-ID: <56cf1b17.a185c20a.f07ce.ffffc6ef@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82509:a1c4102f1903 Date: 2016-02-25 16:16 +0100 http://bitbucket.org/pypy/pypy/changeset/a1c4102f1903/ Log: (plan_rich,remi) fix and test for call survival condition diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -817,7 +817,7 @@ start, end = self.longevity[var] dist = self.dist_to_next_call[position] assert end >= position - if end-position < dist: + if dist >= 0 and position + dist < end: # the variable is used after the call instr return True return False diff --git a/rpython/jit/backend/llsupport/test/test_regalloc_integration.py b/rpython/jit/backend/llsupport/test/test_regalloc_integration.py --- a/rpython/jit/backend/llsupport/test/test_regalloc_integration.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc_integration.py @@ -203,6 +203,24 @@ lrs = compute_var_live_ranges(loop.inputargs, loop.operations) assert lrs.dist_to_next_call == [2, 1, 0, -4, -5] + def test_survives_call(self): + ops = ''' + [p0,i0] + i1 = int_add(i0,i0) + i2 = int_sub(i0,i1) + call_n(p0, i1, descr=raising_calldescr) + i3 = int_mul(i2,i0) + guard_true(i3) [] + i5 = int_mul(i2,i0) + jump(p0,i2) + ''' + loop = self.parse(ops) + lrs = compute_var_live_ranges(loop.inputargs, loop.operations) + assert not lrs.survives_call(loop.operations[0], 0) + assert lrs.survives_call(loop.operations[1], 1) + assert not lrs.survives_call(loop.operations[3], 3) + assert not lrs.exists(loop.operations[5]) + def test_simple_loop(self): ops = ''' From pypy.commits at gmail.com Thu Feb 25 10:18:32 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 25 Feb 2016 07:18:32 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: updates Message-ID: <56cf1b48.02931c0a.f6ac0.2c48@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5610:a8a9dcfcf523 Date: 2016-02-25 16:18 +0100 http://bitbucket.org/pypy/extradoc/changeset/a8a9dcfcf523/ Log: updates diff --git a/blog/draft/cpyext-gcsupport.rst b/blog/draft/cpyext-gcsupport.rst --- a/blog/draft/cpyext-gcsupport.rst +++ b/blog/draft/cpyext-gcsupport.rst @@ -1,23 +1,50 @@ CAPI Support update =================== -I have merged a rewrite of the interaction between c-API c-level objects and -interpreter level objects. Each refcounted c-level object is now reflected in -an interpreter level object, and the garbage collector can release the object -pair only if the refcount is 0 and the interpreter level object is not longer -referenced. +As you know, PyPy can emulate the CPython C API to some extent. It is +done by passing around ``PyObject *`` pointers. Inside PyPy, the +objects don't have the same ``PyObject *`` structure at all; and +additionally their memory address can change. PyPy handles the +difference by maintaining two sets of objects. More precisely, starting +from a PyPy object, it can allocate on demand a ``PyObject`` structure +and fill it with information that points back to the original PyPy +objects; and conversely, starting from a C-level object, it can allocate +a PyPy-level object and fill it with information in the opposite +direction. -The rewrite significantly simplifies our previous code, and should make using -the c-API less slow (it is still slower than using pure python though). -XXX citations needed ... +I have merged a rewrite of the interaction between C-API C-level objects +and PyPy's interpreter level objects. This is mostly a simplification +based on a small hack in our garbage collector. This hack makes the +garbage collector aware of the reference-counted ``PyObject`` +structures. When it considers a pair consisting of a PyPy object and a +``PyObject``, it will always free either none or both of them at the +same time. They both stay alive if *either* there is a regular GC +reference to the PyPy object, *or* the reference counter in the +``PyObject`` is bigger than zero. -The good news is that now PyPy can support the upstream `lxml`_ package, which is -is one of the most popular packages on PyPI (specifically version X.X.X with old -PyPy specific hacks removed). We do recommend using the `cffi lxml`_ alternative, -since it will be faster on PyPy. +This gives a more stable result. Previously, a PyPy object might grow a +corresponding ``PyObject``, loose it (when its reference counter goes to +zero), and later have another corresponding ``PyObject`` re-created at a +different address. Now, once a link is created, it remains alive until +both objects die. -We are actively working on extending our c-API support, and hope to soon merge -a branch to support more of the c-API functions. Please try it out and let us -know how it works for you. +The rewrite significantly simplifies our previous code (which used to be +based on at least 4 different dictionaries), and should make using the +C-API less slow (it is still slower than using pure python or cffi). + +So, the good news is that now PyPy actually supports the upstream +`lxml`_ package---which is is one of the most popular packages on PyPI. +(Specifically, you need version 3.5.0 with +https://github.com/lxml/lxml/pull/187 to remove old PyPy-specific hacks +that were not really working.) At this point, we no longer recommend +using the `cffi lxml`_ alternative: although it may still be faster, it +might be incomplete and old. + +We are actively working on extending our C-API support, and hope to soon +merge a branch to support more of the C-API functions (some numpy news +coming!). Please `try it out`_ and let us know how it works for you. + +_`lxml`: https://github.com/lxml/lxml +_`try it out`: http://buildbot.pypy.org/nightly/trunk/ Armin Rigo and the PyPy team From pypy.commits at gmail.com Thu Feb 25 10:23:50 2016 From: pypy.commits at gmail.com (arigo) Date: Thu, 25 Feb 2016 07:23:50 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: updates Message-ID: <56cf1c86.07811c0a.53bc4.2e7c@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r5611:ff9f6252e503 Date: 2016-02-25 16:23 +0100 http://bitbucket.org/pypy/extradoc/changeset/ff9f6252e503/ Log: updates diff --git a/blog/draft/cpyext-gcsupport.rst b/blog/draft/cpyext-gcsupport.rst --- a/blog/draft/cpyext-gcsupport.rst +++ b/blog/draft/cpyext-gcsupport.rst @@ -36,15 +36,16 @@ `lxml`_ package---which is is one of the most popular packages on PyPI. (Specifically, you need version 3.5.0 with https://github.com/lxml/lxml/pull/187 to remove old PyPy-specific hacks -that were not really working.) At this point, we no longer recommend -using the `cffi lxml`_ alternative: although it may still be faster, it -might be incomplete and old. +that were not really working. See details__.) At this point, we no +longer recommend using the cffi-lxml alternative: although it may +still be faster, it might be incomplete and old. We are actively working on extending our C-API support, and hope to soon merge a branch to support more of the C-API functions (some numpy news coming!). Please `try it out`_ and let us know how it works for you. _`lxml`: https://github.com/lxml/lxml +__: https://bitbucket.org/pypy/compatibility/wiki/lxml _`try it out`: http://buildbot.pypy.org/nightly/trunk/ Armin Rigo and the PyPy team From pypy.commits at gmail.com Thu Feb 25 10:33:42 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 25 Feb 2016 07:33:42 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (remi, plan_rich) the first version that correctly considers call args and live ranges that survive calls Message-ID: <56cf1ed6.4c181c0a.a178c.3339@mx.google.com> Author: Richard Plangger Branch: fix-longevity Changeset: r82510:9823e109f391 Date: 2016-02-25 16:30 +0100 http://bitbucket.org/pypy/pypy/changeset/9823e109f391/ Log: (remi, plan_rich) the first version that correctly considers call args and live ranges that survive calls diff --git a/rpython/jit/backend/llsupport/test/test_regalloc_call.py b/rpython/jit/backend/llsupport/test/test_regalloc_call.py --- a/rpython/jit/backend/llsupport/test/test_regalloc_call.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc_call.py @@ -201,7 +201,7 @@ """, namespace=self.namespace) i1 = ops.operations[0] i2 = ops.operations[1] - trace_alloc = TraceAllocation(ops, [eax, edx, get_param(0)], [r8, r9, r12], [eax, r12], tt) + trace_alloc = TraceAllocation(ops, [r13, edx, get_param(0)], [r8, r9, r12], [r13, r12], tt) trace_alloc.run_allocation() # we force the allocation to immediately take the first call parameter register # the new regalloc will not shuffle register binding around (other than spilling) @@ -209,7 +209,10 @@ assert trace_alloc.initial_register(i1) == get_param(0) assert trace_alloc.is_caller_saved(i1) assert trace_alloc.is_callee_saved(i2) - assert trace_alloc.move_count() == 1 + # two moves to preserve the live range -> register mapping (i0,i2) + # p0 is reloaded before the jump (because it is a gc pointer) + # i2 is not at the right location + assert trace_alloc.move_count() == 4 def test_call_allocate_first_param_to_callee2(self): tt, ops = parse_loop(""" From pypy.commits at gmail.com Thu Feb 25 10:38:07 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 25 Feb 2016 07:38:07 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (remi, plan_rich) did not save two files Message-ID: <56cf1fdf.44e01c0a.4fffa.3a7a@mx.google.com> Author: Richard Plangger Branch: fix-longevity Changeset: r82511:bcfdedbf3019 Date: 2016-02-25 16:35 +0100 http://bitbucket.org/pypy/pypy/changeset/bcfdedbf3019/ Log: (remi, plan_rich) did not save two files diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -672,13 +672,13 @@ self.assembler.regalloc_mov(loc, result_loc) loc = result_loc + #del self.reg_bindings[v] #if self.frame_manager.get(v) is None or self.has_free_registers(): # self._move_variable_away(v, loc) #self.reg_bindings[result_v] = loc else: - import pdb; pdb.set_trace() self._reallocate_from_to(v, result_v) loc = self.reg_bindings[result_v] return loc @@ -804,7 +804,10 @@ def get_call_argument_index(self, var, pos): assert self.dist_to_next_call[pos] >= 0 - op = self.operations[pos + self.dist_to_next_call[pos]] + dist_to_call = self.dist_to_next_call[pos] + if dist_to_call < 0: + return -1 + op = self.operations[pos + dist_to_call] for i,arg in enumerate(op.getarglist()): if arg is var: return i-1 # first parameter is the functionh @@ -817,7 +820,7 @@ start, end = self.longevity[var] dist = self.dist_to_next_call[position] assert end >= position - if dist >= 0 and position + dist < end: + if dist >= 0 and position+dist < end: # the variable is used after the call instr return True return False From pypy.commits at gmail.com Thu Feb 25 10:47:03 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 07:47:03 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) fix for updating bridge argument register assignment Message-ID: <56cf21f7.890bc30a.2c3bc.ffffd289@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82512:af0b03bc7ce0 Date: 2016-02-25 16:46 +0100 http://bitbucket.org/pypy/pypy/changeset/af0b03bc7ce0/ Log: (plan_rich,remi) fix for updating bridge argument register assignment diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -803,7 +803,6 @@ self.longevity[var] = (start, end) def get_call_argument_index(self, var, pos): - assert self.dist_to_next_call[pos] >= 0 dist_to_call = self.dist_to_next_call[pos] if dist_to_call < 0: return -1 diff --git a/rpython/jit/backend/x86/regalloc.py b/rpython/jit/backend/x86/regalloc.py --- a/rpython/jit/backend/x86/regalloc.py +++ b/rpython/jit/backend/x86/regalloc.py @@ -274,6 +274,7 @@ def _update_bindings(self, arglocs, inputargs): # XXX this should probably go to llsupport/regalloc.py used = set() + used_xmm = set() i = 0 # manually set the register and frame bindings for # all inputargs (for a bridge) @@ -285,7 +286,7 @@ if isinstance(loc, RegLoc): if arg.type == FLOAT: self.xrm.reg_bindings[arg] = loc - used.add(loc) + used_xmm.add(loc) else: if loc is ebp: self.rm.bindings_to_frame_reg[arg] = None @@ -296,7 +297,7 @@ self.fm.bind(arg, loc) # self.rm.update_free_registers(used) - self.xrm.update_free_registers(used) + self.xrm.update_free_registers(used_xmm) self.possibly_free_vars(list(inputargs)) self.fm.finish_binding() self.rm._check_invariants() From pypy.commits at gmail.com Thu Feb 25 11:02:39 2016 From: pypy.commits at gmail.com (rlamy) Date: Thu, 25 Feb 2016 08:02:39 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: Fix refcount bug in test Message-ID: <56cf259f.162f1c0a.a2d88.400b@mx.google.com> Author: Ronan Lamy Branch: cpyext-ext Changeset: r82513:4f809c093ddd Date: 2016-02-25 17:00 +0100 http://bitbucket.org/pypy/pypy/changeset/4f809c093ddd/ Log: Fix refcount bug in test diff --git a/pypy/module/cpyext/test/test_dictobject.py b/pypy/module/cpyext/test/test_dictobject.py --- a/pypy/module/cpyext/test/test_dictobject.py +++ b/pypy/module/cpyext/test/test_dictobject.py @@ -169,9 +169,8 @@ w_proxy, space.wrap('sys')) raises(OperationError, space.call_method, w_proxy, 'clear') assert api.PyDictProxy_Check(w_proxy) - + class AppTestDictObject(AppTestCpythonExtensionBase): - #@py.test.mark.xfail(reason='make_frozendict memoize only works translated') def test_dictproxytype(self): module = self.import_extension('foo', [ ("dict_proxy", "METH_VARARGS", @@ -182,12 +181,11 @@ if (!PyArg_ParseTuple(args, "O", &dict)) return NULL; proxydict = PyDictProxy_New(dict); - Py_DECREF(dict); if (!PyDictProxy_Check(proxydict)) { Py_DECREF(proxydict); PyErr_SetNone(PyExc_ValueError); return NULL; - } + } if (!PyDictProxy_CheckExact(proxydict)) { Py_DECREF(proxydict); PyErr_SetNone(PyExc_ValueError); @@ -195,7 +193,7 @@ } i = PyObject_Size(proxydict); Py_DECREF(proxydict); - return PyLong_FromLong(i); + return PyLong_FromLong(i); """), ]) assert module.dict_proxy({'a': 1, 'b': 2}) == 2 From pypy.commits at gmail.com Thu Feb 25 11:03:58 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 08:03:58 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) add additional arg to LiveRanges() in test and fix test Message-ID: <56cf25ee.44e21c0a.c3594.45cc@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82514:46c6887af094 Date: 2016-02-25 17:03 +0100 http://bitbucket.org/pypy/pypy/changeset/46c6887af094/ Log: (plan_rich,remi) add additional arg to LiveRanges() in test and fix test diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -643,9 +643,10 @@ self.assembler.regalloc_mov(prev_loc, loc) def force_result_in_reg(self, result_v, v, forbidden_vars=[]): - """ Make sure that result is in the same register as v. - The variable v is copied away if it's further used. The meaning - of 'forbidden_vars' is the same as in 'force_allocate_reg'. + """ Allocate a register for result_v and copy the contents of v's + register to result_v's register. v will stay in the register it + was initially assigned to. The meaning of 'forbidden_vars' is the + same as in 'force_allocate_reg'. """ self._check_type(result_v) self._check_type(v) @@ -803,6 +804,9 @@ self.longevity[var] = (start, end) def get_call_argument_index(self, var, pos): + if not we_are_translated(): # tests + if self.dist_to_next_call is None: + return -1 dist_to_call = self.dist_to_next_call[pos] if dist_to_call < 0: return -1 diff --git a/rpython/jit/backend/llsupport/test/test_regalloc.py b/rpython/jit/backend/llsupport/test/test_regalloc.py --- a/rpython/jit/backend/llsupport/test/test_regalloc.py +++ b/rpython/jit/backend/llsupport/test/test_regalloc.py @@ -78,7 +78,7 @@ def test_freeing_vars(self): b0, b1, b2 = newboxes(0, 0, 0) longevity = {b0: (0, 1), b1: (0, 2), b2: (0, 2)} - rm = RegisterManager(LiveRanges(longevity, None, None)) + rm = RegisterManager(LiveRanges(longevity, None, None, None)) rm.next_instruction() for b in b0, b1, b2: rm.try_allocate_reg(b) @@ -103,7 +103,7 @@ def test_register_exhaustion(self): boxes, longevity = boxes_and_longevity(5) - rm = RegisterManager(LiveRanges(longevity, None, None)) + rm = RegisterManager(LiveRanges(longevity, None, None, None)) rm.next_instruction() for b in boxes[:len(regs)]: assert rm.try_allocate_reg(b) @@ -117,7 +117,7 @@ class XRegisterManager(RegisterManager): no_lower_byte_regs = [r2, r3] - rm = XRegisterManager(LiveRanges(longevity, None, None)) + rm = XRegisterManager(LiveRanges(longevity, None, None, None)) rm.next_instruction() loc0 = rm.try_allocate_reg(b0, need_lower_byte=True) assert loc0 not in XRegisterManager.no_lower_byte_regs @@ -131,7 +131,7 @@ def test_specific_register(self): boxes, longevity = boxes_and_longevity(5) - rm = RegisterManager(LiveRanges(longevity, None, None)) + rm = RegisterManager(LiveRanges(longevity, None, None, None)) rm.next_instruction() loc = rm.try_allocate_reg(boxes[0], selected_reg=r1) assert loc is r1 @@ -152,7 +152,7 @@ class XRegisterManager(RegisterManager): no_lower_byte_regs = [r2, r3] - rm = XRegisterManager(LiveRanges(longevity, None, None), + rm = XRegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=MockAsm()) rm.next_instruction() @@ -178,7 +178,7 @@ def test_make_sure_var_in_reg(self): boxes, longevity = boxes_and_longevity(5) fm = TFrameManager() - rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, + rm = RegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=MockAsm()) rm.next_instruction() # allocate a stack position @@ -194,7 +194,7 @@ longevity = {b0: (0, 1), b1: (1, 3)} fm = TFrameManager() asm = MockAsm() - rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, assembler=asm) + rm = RegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) rm.next_instruction() # first path, var is already in reg and dies loc0 = rm.force_allocate_reg(b0) @@ -210,14 +210,15 @@ longevity = {b0: (0, 2), b1: (1, 3)} fm = TFrameManager() asm = MockAsm() - rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, assembler=asm) + rm = RegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) rm.next_instruction() loc0 = rm.force_allocate_reg(b0) rm._check_invariants() rm.next_instruction() loc = rm.force_result_in_reg(b1, b0) - assert loc is loc0 - assert rm.loc(b0) is not loc0 + assert loc is not loc0 + assert rm.loc(b0) is not loc + assert rm.loc(b0) is loc0 assert len(asm.moves) == 1 rm._check_invariants() @@ -226,7 +227,7 @@ longevity = {b0: (0, 2), b1: (0, 2), b3: (0, 2), b2: (0, 2), b4: (1, 3)} fm = TFrameManager() asm = MockAsm() - rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, assembler=asm) + rm = RegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) rm.next_instruction() for b in b0, b1, b2, b3: rm.force_allocate_reg(b) @@ -242,7 +243,7 @@ longevity = {b0: (0, 1), b1: (0, 1)} fm = TFrameManager() asm = MockAsm() - rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, assembler=asm) + rm = RegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) rm.next_instruction() fm.loc(b0) rm.force_result_in_reg(b1, b0) @@ -258,7 +259,7 @@ longevity = {b0: (0, 1), b1: (0, 1)} fm = TFrameManager() asm = MockAsm() - rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, assembler=asm) + rm = RegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) rm.free_callee_regs = rm.free_callee_regs[:1] rm.all_regs = rm.free_callee_regs[:] rm.next_instruction() @@ -276,7 +277,7 @@ longevity = {b0: (0, 1)} fm = TFrameManager() asm = MockAsm() - rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, assembler=asm) + rm = RegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) rm.next_instruction() # invalid call to make_sure_var_in_reg(): box unknown so far py.test.raises(KeyError, rm.make_sure_var_in_reg, b0) @@ -285,7 +286,7 @@ asm = MockAsm() boxes, longevity = boxes_and_longevity(5) fm = TFrameManager() - rm = RegisterManager(LiveRanges(longevity, None, None), assembler=asm, + rm = RegisterManager(LiveRanges(longevity, None, None, None), assembler=asm, frame_manager=fm) rm.next_instruction() loc = rm.return_constant(ConstInt(1), selected_reg=r1) @@ -304,7 +305,7 @@ boxes, longevity = boxes_and_longevity(2) fm = TFrameManager() asm = MockAsm() - rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, + rm = RegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) rm.next_instruction() c = ConstInt(0) @@ -326,7 +327,7 @@ fm = TFrameManager() asm = MockAsm() boxes, longevity = boxes_and_longevity(5) - rm = XRegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, + rm = XRegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) for b in boxes[:-1]: rm.force_allocate_reg(b) @@ -349,7 +350,7 @@ fm = TFrameManager() asm = MockAsm() boxes, longevity = boxes_and_longevity(5) - rm = XRegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, + rm = XRegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) for b in boxes[:-1]: rm.force_allocate_reg(b) @@ -371,10 +372,10 @@ b0 = InputArgInt() longevity = {b0: (0, 1)} asm = MockAsm() - rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, assembler=asm) + rm = RegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) f0 = InputArgFloat() longevity = {f0: (0, 1)} - xrm = XRegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, assembler=asm) + xrm = XRegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) xrm.loc(f0) rm.loc(b0) assert fm.get_frame_depth() == 3 @@ -384,7 +385,7 @@ longevity = {b0: (0, 3), b1: (0, 3), b3: (0, 5), b2: (0, 2), b4: (1, 4), b5: (1, 3)} fm = TFrameManager() asm = MockAsm() - rm = RegisterManager(LiveRanges(longevity, None, None), frame_manager=fm, assembler=asm) + rm = RegisterManager(LiveRanges(longevity, None, None, None), frame_manager=fm, assembler=asm) rm.next_instruction() for b in b0, b1, b2, b3: rm.force_allocate_reg(b) From pypy.commits at gmail.com Thu Feb 25 11:15:43 2016 From: pypy.commits at gmail.com (mjacob) Date: Thu, 25 Feb 2016 08:15:43 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default Message-ID: <56cf28af.0775c20a.81e6.ffffd974@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82515:299018381016 Date: 2016-02-25 16:38 +0100 http://bitbucket.org/pypy/pypy/changeset/299018381016/ Log: hg merge default diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -41,29 +41,29 @@ Amaury Forgeot d'Arc Antonio Cuni Samuele Pedroni + Matti Picus Alex Gaynor Brian Kearns - Matti Picus Philip Jenvey Michael Hudson + Ronan Lamy David Schneider + Manuel Jacob Holger Krekel Christian Tismer Hakan Ardo - Manuel Jacob - Ronan Lamy Benjamin Peterson + Richard Plangger Anders Chrigstrom Eric van Riet Paap Wim Lavrijsen - Richard Plangger Richard Emslie Alexander Schremmer Dan Villiom Podlaski Christiansen + Remi Meier Lukas Diekmann Sven Hager Anders Lehmann - Remi Meier Aurelien Campeas Niklaus Haldimann Camillo Bruni @@ -72,8 +72,8 @@ Romain Guillebert Leonardo Santagada Seo Sanghyeon + Ronny Pfannschmidt Justin Peel - Ronny Pfannschmidt David Edelsohn Anders Hammarquist Jakub Gustak @@ -95,6 +95,7 @@ Tyler Wade Michael Foord Stephan Diehl + Vincent Legoll Stefan Schwarzer Valentino Volonghi Tomek Meka @@ -105,9 +106,9 @@ Jean-Paul Calderone Timo Paulssen Squeaky + Marius Gedminas Alexandre Fayolle Simon Burton - Marius Gedminas Martin Matusiak Konstantin Lopuhin Wenzhu Man @@ -116,16 +117,20 @@ Ivan Sichmann Freitas Greg Price Dario Bertini + Stefano Rivera Mark Pearse Simon Cross Andreas Stührk - Stefano Rivera + Edd Barrett Jean-Philippe St. Pierre Guido van Rossum Pavel Vinogradov + Jeremy Thurgood Paweł Piotr Przeradowski + Spenser Bauman Paul deGrandis Ilya Osadchiy + marky1991 Tobias Oberstein Adrian Kuhn Boris Feigin @@ -134,14 +139,12 @@ Georg Brandl Bert Freudenberg Stian Andreassen - Edd Barrett + Tobias Pape Wanja Saatkamp Gerald Klix Mike Blume - Tobias Pape Oscar Nierstrasz Stefan H. Muller - Jeremy Thurgood Rami Chowdhury Eugene Oden Henry Mason @@ -153,6 +156,8 @@ Lukas Renggli Guenter Jantzen Ned Batchelder + Tim Felgentreff + Anton Gulenko Amit Regmi Ben Young Nicolas Chauvat @@ -162,12 +167,12 @@ Nicholas Riley Jason Chu Igor Trindade Oliveira - Tim Felgentreff + Yichao Yu Rocco Moretti Gintautas Miliauskas Michael Twomey Lucian Branescu Mihaila - Yichao Yu + Devin Jeanpierre Gabriel Lavoie Olivier Dormond Jared Grubb @@ -191,33 +196,33 @@ Stanislaw Halik Mikael Schönenberg Berkin Ilbeyi - Elmo M?ntynen + Elmo Mäntynen + Faye Zhao Jonathan David Riehl Anders Qvist Corbin Simpson Chirag Jadwani Beatrice During Alex Perry - Vincent Legoll + Vaibhav Sood Alan McIntyre - Spenser Bauman + William Leslie Alexander Sedov Attila Gobi + Jasper.Schulz Christopher Pope - Devin Jeanpierre - Vaibhav Sood Christian Tismer Marc Abramowitz Dan Stromberg Arjun Naik Valentina Mukhamedzhanova Stefano Parmesan + Mark Young Alexis Daboville Jens-Uwe Mager Carl Meyer Karl Ramm Pieter Zieschang - Anton Gulenko Gabriel Lukas Vacek Andrew Dalke @@ -225,6 +230,7 @@ Jakub Stasiak Nathan Taylor Vladimir Kryachko + Omer Katz Jacek Generowicz Alejandro J. Cura Jacob Oscarson @@ -239,6 +245,7 @@ Lars Wassermann Philipp Rustemeuer Henrik Vendelbo + Richard Lancaster Dan Buch Miguel de Val Borro Artur Lisiecki @@ -250,18 +257,18 @@ Tomo Cocoa Kim Jin Su Toni Mattis + Amber Brown Lucas Stadler Julian Berman Markus Holtermann roberto at goyle Yury V. Zaytsev Anna Katrina Dominguez - William Leslie Bobby Impollonia - Faye Zhao timo at eistee.fritz.box Andrew Thompson Yusei Tahara + Aaron Tubbs Ben Darnell Roberto De Ioris Juan Francisco Cantero Hurtado @@ -273,6 +280,7 @@ Christopher Armstrong Michael Hudson-Doyle Anders Sigfridsson + Nikolay Zinov Yasir Suhail Jason Michalski rafalgalczynski at gmail.com @@ -282,6 +290,7 @@ Gustavo Niemeyer Stephan Busemann Rafał Gałczyński + Matt Bogosian Christian Muirhead Berker Peksag James Lan @@ -316,9 +325,9 @@ Stefan Marr jiaaro Mads Kiilerich - Richard Lancaster opassembler.py Antony Lee + Jason Madden Yaroslav Fedevych Jim Hunziker Markus Unterwaditzer @@ -327,6 +336,7 @@ squeaky Zearin soareschen + Jonas Pfannschmidt Kurt Griffiths Mike Bayer Matthew Miller diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -172,9 +172,6 @@ cmdline="--translationmodules", suggests=[("objspace.allworkingmodules", False)]), - BoolOption("usepycfiles", "Write and read pyc files when importing", - default=True), - StrOption("soabi", "Tag to differentiate extension modules built for different Python interpreters", cmdline="--soabi", diff --git a/pypy/doc/how-to-release.rst b/pypy/doc/how-to-release.rst --- a/pypy/doc/how-to-release.rst +++ b/pypy/doc/how-to-release.rst @@ -1,5 +1,20 @@ -Making a PyPy Release -===================== +The PyPy Release Process +======================== + +Release Policy +++++++++++++++ + +We try to create a stable release a few times a year. These are released on +a branch named like release-2.x or release-4.x, and each release is tagged, +for instance release-4.0.1. + +After release, inevitably there are bug fixes. It is the responsibility of +the commiter who fixes a bug to make sure this fix is on the release branch, +so that we can then create a tagged bug-fix release, which will hopefully +happen more often than stable releases. + +How to Create a PyPy Release +++++++++++++++++++++++++++++ Overview -------- diff --git a/pypy/doc/tool/makecontributor.py b/pypy/doc/tool/makecontributor.py --- a/pypy/doc/tool/makecontributor.py +++ b/pypy/doc/tool/makecontributor.py @@ -72,6 +72,7 @@ 'Anton Gulenko':['anton gulenko', 'anton_gulenko'], 'Richard Lancaster':['richardlancaster'], 'William Leslie':['William ML Leslie'], + 'Spenser Bauman':['Spenser Andrew Bauman'], } alias_map = {} diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -282,9 +282,6 @@ elif config.objspace.usemodules.pypyjit: config.translation.jit = True - if config.translation.sandbox: - config.objspace.usepycfiles = False - config.translating = True import translate diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -76,13 +76,6 @@ if file_exists(pyfile): return PY_SOURCE, ".pyw", "U" - # The .py file does not exist, check the .pyc file - if space.config.objspace.usepycfiles: - pycfile = filepart + ".pyc" - if file_exists(pycfile): - # existing .pyc file - return PY_COMPILED, ".pyc", "rb" - if has_so_extension(space): so_extension = get_so_extension(space) pydfile = filepart + so_extension @@ -978,17 +971,11 @@ """ w = space.wrap - if space.config.objspace.usepycfiles: - src_stat = os.fstat(fd) - cpathname = make_compiled_pathname(pathname) - mtime = int(src_stat[stat.ST_MTIME]) - mode = src_stat[stat.ST_MODE] - stream = check_compiled_module(space, cpathname, mtime) - else: - cpathname = None - mtime = 0 - mode = 0 - stream = None + src_stat = os.fstat(fd) + cpathname = make_compiled_pathname(pathname) + mtime = int(src_stat[stat.ST_MTIME]) + mode = src_stat[stat.ST_MODE] + stream = check_compiled_module(space, cpathname, mtime) if stream: # existing and up-to-date .pyc file @@ -1002,7 +989,7 @@ else: code_w = parse_source_module(space, pathname, source) - if space.config.objspace.usepycfiles and write_pyc: + if write_pyc: if not space.is_true(space.sys.get('dont_write_bytecode')): write_compiled_module(space, code_w, cpathname, mode, mtime) diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -104,6 +104,10 @@ 'a=5\nb=6\rc="""hello\r\nworld"""\r', mode='wb') p.join('mod.py').write( 'a=15\nb=16\rc="""foo\r\nbar"""\r', mode='wb') + setuppkg("test_bytecode", + a = '', + b = '', + c = '') p = setuppkg("encoded", # actually a line 2, setuppkg() sets up a line1 line2 = "# encoding: iso-8859-1\n", @@ -159,7 +163,7 @@ stream.try_to_find_file_descriptor()) finally: stream.close() - if space.config.objspace.usepycfiles: + if not space.config.translation.sandbox: # also create a lone .pyc file p.join('lone.pyc').write(p.join(pycname).read(mode='rb'), mode='wb') @@ -190,6 +194,9 @@ """) def _teardown(space, w_saved_modules): + p = udir.join('impsubdir') + if p.check(): + p.remove() space.appexec([w_saved_modules], """ (path_and_modules): saved_path, saved_modules = path_and_modules @@ -1501,13 +1508,55 @@ assert isinstance(importer, zipimport.zipimporter) -class AppTestNoPycFile(object): +class AppTestWriteBytecode(object): spaceconfig = { - "objspace.usepycfiles": False, + "translation.sandbox": False } + def setup_class(cls): - usepycfiles = cls.spaceconfig['objspace.usepycfiles'] - cls.w_usepycfiles = cls.space.wrap(usepycfiles) + cls.saved_modules = _setup(cls) + sandbox = cls.spaceconfig['translation.sandbox'] + cls.w_sandbox = cls.space.wrap(sandbox) + + def teardown_class(cls): + _teardown(cls.space, cls.saved_modules) + cls.space.appexec([], """ + (): + import sys + sys.dont_write_bytecode = False + """) + + def test_default(self): + import os.path + from test_bytecode import a + assert a.__file__.endswith('a.py') + assert os.path.exists(a.__cached__) == (not self.sandbox) + + def test_write_bytecode(self): + import os.path + import sys + sys.dont_write_bytecode = False + from test_bytecode import b + assert b.__file__.endswith('b.py') + assert os.path.exists(b.__cached__) + + def test_dont_write_bytecode(self): + import os.path + import sys + sys.dont_write_bytecode = True + from test_bytecode import c + assert c.__file__.endswith('c.py') + assert not os.path.exists(c.__cached__) + + +class AppTestWriteBytecodeSandbox(AppTestWriteBytecode): + spaceconfig = { + "translation.sandbox": True + } + + +class AppTestNoLonePycFile(object): + def setup_class(cls): cls.saved_modules = _setup(cls) def teardown_class(cls): @@ -1519,15 +1568,10 @@ try: from compiled import lone except ImportError: - assert not self.usepycfiles + pass else: assert lone.__cached__.endswith('.pyc') -class AppTestNoLonePycFile(AppTestNoPycFile): - spaceconfig = { - "objspace.usepycfiles": True, - } - class AppTestMultithreadedImp(object): spaceconfig = dict(usemodules=['thread', 'time']) diff --git a/pypy/module/sys/__init__.py b/pypy/module/sys/__init__.py --- a/pypy/module/sys/__init__.py +++ b/pypy/module/sys/__init__.py @@ -70,7 +70,7 @@ 'meta_path' : 'space.wrap([])', 'path_hooks' : 'space.wrap([])', 'path_importer_cache' : 'space.wrap({})', - 'dont_write_bytecode' : 'space.w_False', + 'dont_write_bytecode' : 'space.wrap(space.config.translation.sandbox)', 'getdefaultencoding' : 'interp_encoding.getdefaultencoding', 'getfilesystemencoding' : 'interp_encoding.getfilesystemencoding', diff --git a/pypy/module/sys/app.py b/pypy/module/sys/app.py --- a/pypy/module/sys/app.py +++ b/pypy/module/sys/app.py @@ -69,11 +69,11 @@ return None copyright_str = """ -Copyright 2003-2014 PyPy development team. +Copyright 2003-2016 PyPy development team. All Rights Reserved. For further information, see -Portions Copyright (c) 2001-2014 Python Software Foundation. +Portions Copyright (c) 2001-2016 Python Software Foundation. All Rights Reserved. Portions Copyright (c) 2000 BeOpen.com. diff --git a/pypy/objspace/std/tupleobject.py b/pypy/objspace/std/tupleobject.py --- a/pypy/objspace/std/tupleobject.py +++ b/pypy/objspace/std/tupleobject.py @@ -30,6 +30,11 @@ contains_jmp = jit.JitDriver(greens = ['tp'], reds = 'auto', name = 'tuple.contains') +hash_driver = jit.JitDriver( + name='tuple.hash', + greens=['w_type'], + reds='auto') + class W_AbstractTupleObject(W_Root): __slots__ = () @@ -258,8 +263,14 @@ def length(self): return len(self.wrappeditems) - @jit.look_inside_iff(lambda self, _1: _unroll_condition(self)) def descr_hash(self, space): + if _unroll_condition(self): + return self._descr_hash_unroll(space) + else: + return self._descr_hash_jitdriver(space) + + @jit.unroll_safe + def _descr_hash_unroll(self, space): mult = 1000003 x = 0x345678 z = len(self.wrappeditems) @@ -271,6 +282,20 @@ x += 97531 return space.wrap(intmask(x)) + def _descr_hash_jitdriver(self, space): + mult = 1000003 + x = 0x345678 + z = len(self.wrappeditems) + w_type = space.type(self.wrappeditems[0]) + for w_item in self.wrappeditems: + hash_driver.jit_merge_point(w_type=w_type) + y = space.hash_w(w_item) + x = (x ^ y) * mult + z -= 1 + mult += 82520 + z + z + x += 97531 + return space.wrap(intmask(x)) + def descr_eq(self, space, w_other): if not isinstance(w_other, W_AbstractTupleObject): return space.w_NotImplemented diff --git a/pypy/tool/test/test_tab.py b/pypy/tool/test/test_tab.py --- a/pypy/tool/test/test_tab.py +++ b/pypy/tool/test/test_tab.py @@ -6,7 +6,7 @@ from pypy.conftest import pypydir ROOT = os.path.abspath(os.path.join(pypydir, '..')) -EXCLUDE = {} +EXCLUDE = {'/virt_test/lib/python2.7/site-packages/setuptools'} def test_no_tabs(): diff --git a/rpython/jit/codewriter/jtransform.py b/rpython/jit/codewriter/jtransform.py --- a/rpython/jit/codewriter/jtransform.py +++ b/rpython/jit/codewriter/jtransform.py @@ -2042,6 +2042,11 @@ self.vable_flags[op.args[0]] = op.args[2].value return [] + def rewrite_op_jit_enter_portal_frame(self, op): + return [op] + def rewrite_op_jit_leave_portal_frame(self, op): + return [op] + # --------- # ll_math.sqrt_nonneg() diff --git a/rpython/jit/metainterp/blackhole.py b/rpython/jit/metainterp/blackhole.py --- a/rpython/jit/metainterp/blackhole.py +++ b/rpython/jit/metainterp/blackhole.py @@ -944,6 +944,14 @@ pass @arguments("i") + def bhimpl_jit_enter_portal_frame(x): + pass + + @arguments() + def bhimpl_jit_leave_portal_frame(): + pass + + @arguments("i") def bhimpl_int_assert_green(x): pass @arguments("r") diff --git a/rpython/jit/metainterp/pyjitpl.py b/rpython/jit/metainterp/pyjitpl.py --- a/rpython/jit/metainterp/pyjitpl.py +++ b/rpython/jit/metainterp/pyjitpl.py @@ -1358,6 +1358,17 @@ self.metainterp.attach_debug_info(op) @arguments("box") + def opimpl_jit_enter_portal_frame(self, uniqueidbox): + unique_id = uniqueidbox.getint() + jd_no = self.metainterp.jitdriver_sd.mainjitcode.index # fish + self.metainterp.enter_portal_frame(jd_no, unique_id) + + @arguments() + def opimpl_jit_leave_portal_frame(self): + jd_no = self.metainterp.jitdriver_sd.mainjitcode.index # fish + self.metainterp.leave_portal_frame(jd_no) + + @arguments("box") def _opimpl_assert_green(self, box): if not isinstance(box, Const): msg = "assert_green failed at %s:%d" % ( diff --git a/rpython/jit/metainterp/test/test_jitdriver.py b/rpython/jit/metainterp/test/test_jitdriver.py --- a/rpython/jit/metainterp/test/test_jitdriver.py +++ b/rpython/jit/metainterp/test/test_jitdriver.py @@ -213,6 +213,21 @@ if op.getopname() == 'enter_portal_frame': assert op.getarg(0).getint() == 0 assert op.getarg(1).getint() == 1 - + + def test_manual_leave_enter_portal_frame(self): + from rpython.rlib import jit + driver = JitDriver(greens=[], reds='auto', is_recursive=True) + + def f(arg): + i = 0 + while i < 100: + driver.jit_merge_point() + jit.enter_portal_frame(42) + jit.leave_portal_frame() + i += 1 + + self.meta_interp(f, [0]) + self.check_resops(enter_portal_frame=1, leave_portal_frame=1) + class TestLLtype(MultipleJitDriversTests, LLJitMixin): pass diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py --- a/rpython/rlib/jit.py +++ b/rpython/rlib/jit.py @@ -284,7 +284,7 @@ def loop_unrolling_heuristic(lst, size, cutoff=2): """ In which cases iterating over items of lst can be unrolled """ - return isvirtual(lst) or (isconstant(size) and size <= cutoff) + return size == 0 or isvirtual(lst) or (isconstant(size) and size <= cutoff) class Entry(ExtRegistryEntry): _about_ = hint @@ -1168,6 +1168,24 @@ hop.exception_is_here() return hop.genop('jit_conditional_call', args_v) +def enter_portal_frame(unique_id): + """call this when starting to interpret a function. calling this is not + necessary for almost all interpreters. The only exception is stackless + interpreters where the portal never calls itself. + """ + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop + llop.jit_enter_portal_frame(lltype.Void, unique_id) + +def leave_portal_frame(): + """call this after the end of executing a function. calling this is not + necessary for almost all interpreters. The only exception is stackless + interpreters where the portal never calls itself. + """ + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop + llop.jit_leave_portal_frame(lltype.Void) + class Counters(object): counters=""" TRACING diff --git a/rpython/rlib/test/test_jit.py b/rpython/rlib/test/test_jit.py --- a/rpython/rlib/test/test_jit.py +++ b/rpython/rlib/test/test_jit.py @@ -4,7 +4,8 @@ from rpython.annotator.model import UnionError from rpython.rlib.jit import (hint, we_are_jitted, JitDriver, elidable_promote, JitHintError, oopspec, isconstant, conditional_call, - elidable, unroll_safe, dont_look_inside) + elidable, unroll_safe, dont_look_inside, + enter_portal_frame, leave_portal_frame) from rpython.rlib.rarithmetic import r_uint from rpython.rtyper.test.tool import BaseRtypingTest from rpython.rtyper.lltypesystem import lltype @@ -300,3 +301,11 @@ mix = MixLevelHelperAnnotator(t.rtyper) mix.getgraph(later, [annmodel.s_Bool], annmodel.s_None) mix.finish() + + def test_enter_leave_portal_frame(self): + from rpython.translator.interactive import Translation + def g(): + enter_portal_frame(1) + leave_portal_frame() + t = Translation(g, []) + t.compile_c() # does not crash diff --git a/rpython/rtyper/llinterp.py b/rpython/rtyper/llinterp.py --- a/rpython/rtyper/llinterp.py +++ b/rpython/rtyper/llinterp.py @@ -925,6 +925,21 @@ def op_gc_gcflag_extra(self, subopnum, *args): return self.heap.gcflag_extra(subopnum, *args) + def op_gc_rawrefcount_init(self, *args): + raise NotImplementedError("gc_rawrefcount_init") + + def op_gc_rawrefcount_to_obj(self, *args): + raise NotImplementedError("gc_rawrefcount_to_obj") + + def op_gc_rawrefcount_from_obj(self, *args): + raise NotImplementedError("gc_rawrefcount_from_obj") + + def op_gc_rawrefcount_create_link_pyobj(self, *args): + raise NotImplementedError("gc_rawrefcount_create_link_pyobj") + + def op_gc_rawrefcount_create_link_pypy(self, *args): + raise NotImplementedError("gc_rawrefcount_create_link_pypy") + def op_do_malloc_fixedsize(self): raise NotImplementedError("do_malloc_fixedsize") def op_do_malloc_fixedsize_clear(self): diff --git a/rpython/rtyper/lltypesystem/lloperation.py b/rpython/rtyper/lltypesystem/lloperation.py --- a/rpython/rtyper/lltypesystem/lloperation.py +++ b/rpython/rtyper/lltypesystem/lloperation.py @@ -453,6 +453,8 @@ 'jit_record_exact_class' : LLOp(canrun=True), 'jit_ffi_save_result': LLOp(canrun=True), 'jit_conditional_call': LLOp(), + 'jit_enter_portal_frame': LLOp(canrun=True), + 'jit_leave_portal_frame': LLOp(canrun=True), 'get_exception_addr': LLOp(), 'get_exc_value_addr': LLOp(), 'do_malloc_fixedsize':LLOp(canmallocgc=True), diff --git a/rpython/rtyper/lltypesystem/opimpl.py b/rpython/rtyper/lltypesystem/opimpl.py --- a/rpython/rtyper/lltypesystem/opimpl.py +++ b/rpython/rtyper/lltypesystem/opimpl.py @@ -624,6 +624,12 @@ def op_jit_ffi_save_result(*args): pass +def op_jit_enter_portal_frame(x): + pass + +def op_jit_leave_portal_frame(): + pass + def op_get_group_member(TYPE, grpptr, memberoffset): from rpython.rtyper.lltypesystem import llgroup assert isinstance(memberoffset, llgroup.GroupMemberOffset) diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py --- a/rpython/translator/c/funcgen.py +++ b/rpython/translator/c/funcgen.py @@ -842,6 +842,12 @@ def OP_JIT_FFI_SAVE_RESULT(self, op): return '/* JIT_FFI_SAVE_RESULT %s */' % op + def OP_JIT_ENTER_PORTAL_FRAME(self, op): + return '/* JIT_ENTER_PORTAL_FRAME %s */' % op + + def OP_JIT_LEAVE_PORTAL_FRAME(self, op): + return '/* JIT_LEAVE_PORTAL_FRAME %s */' % op + def OP_GET_GROUP_MEMBER(self, op): typename = self.db.gettype(op.result.concretetype) return '%s = (%s)_OP_GET_GROUP_MEMBER(%s, %s);' % ( From pypy.commits at gmail.com Thu Feb 25 11:15:46 2016 From: pypy.commits at gmail.com (mjacob) Date: Thu, 25 Feb 2016 08:15:46 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56cf28b2.6bb8c20a.36ad2.ffffd832@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82516:868d62ac2a40 Date: 2016-02-25 17:15 +0100 http://bitbucket.org/pypy/pypy/changeset/868d62ac2a40/ Log: hg merge py3k diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -41,29 +41,29 @@ Amaury Forgeot d'Arc Antonio Cuni Samuele Pedroni + Matti Picus Alex Gaynor Brian Kearns - Matti Picus Philip Jenvey Michael Hudson + Ronan Lamy David Schneider + Manuel Jacob Holger Krekel Christian Tismer Hakan Ardo - Manuel Jacob - Ronan Lamy Benjamin Peterson + Richard Plangger Anders Chrigstrom Eric van Riet Paap Wim Lavrijsen - Richard Plangger Richard Emslie Alexander Schremmer Dan Villiom Podlaski Christiansen + Remi Meier Lukas Diekmann Sven Hager Anders Lehmann - Remi Meier Aurelien Campeas Niklaus Haldimann Camillo Bruni @@ -72,8 +72,8 @@ Romain Guillebert Leonardo Santagada Seo Sanghyeon + Ronny Pfannschmidt Justin Peel - Ronny Pfannschmidt David Edelsohn Anders Hammarquist Jakub Gustak @@ -95,6 +95,7 @@ Tyler Wade Michael Foord Stephan Diehl + Vincent Legoll Stefan Schwarzer Valentino Volonghi Tomek Meka @@ -105,9 +106,9 @@ Jean-Paul Calderone Timo Paulssen Squeaky + Marius Gedminas Alexandre Fayolle Simon Burton - Marius Gedminas Martin Matusiak Konstantin Lopuhin Wenzhu Man @@ -116,16 +117,20 @@ Ivan Sichmann Freitas Greg Price Dario Bertini + Stefano Rivera Mark Pearse Simon Cross Andreas Stührk - Stefano Rivera + Edd Barrett Jean-Philippe St. Pierre Guido van Rossum Pavel Vinogradov + Jeremy Thurgood Paweł Piotr Przeradowski + Spenser Bauman Paul deGrandis Ilya Osadchiy + marky1991 Tobias Oberstein Adrian Kuhn Boris Feigin @@ -134,14 +139,12 @@ Georg Brandl Bert Freudenberg Stian Andreassen - Edd Barrett + Tobias Pape Wanja Saatkamp Gerald Klix Mike Blume - Tobias Pape Oscar Nierstrasz Stefan H. Muller - Jeremy Thurgood Rami Chowdhury Eugene Oden Henry Mason @@ -153,6 +156,8 @@ Lukas Renggli Guenter Jantzen Ned Batchelder + Tim Felgentreff + Anton Gulenko Amit Regmi Ben Young Nicolas Chauvat @@ -162,12 +167,12 @@ Nicholas Riley Jason Chu Igor Trindade Oliveira - Tim Felgentreff + Yichao Yu Rocco Moretti Gintautas Miliauskas Michael Twomey Lucian Branescu Mihaila - Yichao Yu + Devin Jeanpierre Gabriel Lavoie Olivier Dormond Jared Grubb @@ -191,33 +196,33 @@ Stanislaw Halik Mikael Schönenberg Berkin Ilbeyi - Elmo M?ntynen + Elmo Mäntynen + Faye Zhao Jonathan David Riehl Anders Qvist Corbin Simpson Chirag Jadwani Beatrice During Alex Perry - Vincent Legoll + Vaibhav Sood Alan McIntyre - Spenser Bauman + William Leslie Alexander Sedov Attila Gobi + Jasper.Schulz Christopher Pope - Devin Jeanpierre - Vaibhav Sood Christian Tismer Marc Abramowitz Dan Stromberg Arjun Naik Valentina Mukhamedzhanova Stefano Parmesan + Mark Young Alexis Daboville Jens-Uwe Mager Carl Meyer Karl Ramm Pieter Zieschang - Anton Gulenko Gabriel Lukas Vacek Andrew Dalke @@ -225,6 +230,7 @@ Jakub Stasiak Nathan Taylor Vladimir Kryachko + Omer Katz Jacek Generowicz Alejandro J. Cura Jacob Oscarson @@ -239,6 +245,7 @@ Lars Wassermann Philipp Rustemeuer Henrik Vendelbo + Richard Lancaster Dan Buch Miguel de Val Borro Artur Lisiecki @@ -250,18 +257,18 @@ Tomo Cocoa Kim Jin Su Toni Mattis + Amber Brown Lucas Stadler Julian Berman Markus Holtermann roberto at goyle Yury V. Zaytsev Anna Katrina Dominguez - William Leslie Bobby Impollonia - Faye Zhao timo at eistee.fritz.box Andrew Thompson Yusei Tahara + Aaron Tubbs Ben Darnell Roberto De Ioris Juan Francisco Cantero Hurtado @@ -273,6 +280,7 @@ Christopher Armstrong Michael Hudson-Doyle Anders Sigfridsson + Nikolay Zinov Yasir Suhail Jason Michalski rafalgalczynski at gmail.com @@ -282,6 +290,7 @@ Gustavo Niemeyer Stephan Busemann Rafał Gałczyński + Matt Bogosian Christian Muirhead Berker Peksag James Lan @@ -316,9 +325,9 @@ Stefan Marr jiaaro Mads Kiilerich - Richard Lancaster opassembler.py Antony Lee + Jason Madden Yaroslav Fedevych Jim Hunziker Markus Unterwaditzer @@ -327,6 +336,7 @@ squeaky Zearin soareschen + Jonas Pfannschmidt Kurt Griffiths Mike Bayer Matthew Miller diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -174,9 +174,6 @@ cmdline="--translationmodules", suggests=[("objspace.allworkingmodules", False)]), - BoolOption("usepycfiles", "Write and read pyc files when importing", - default=True), - StrOption("soabi", "Tag to differentiate extension modules built for different Python interpreters", cmdline="--soabi", diff --git a/pypy/doc/how-to-release.rst b/pypy/doc/how-to-release.rst --- a/pypy/doc/how-to-release.rst +++ b/pypy/doc/how-to-release.rst @@ -1,5 +1,20 @@ -Making a PyPy Release -===================== +The PyPy Release Process +======================== + +Release Policy +++++++++++++++ + +We try to create a stable release a few times a year. These are released on +a branch named like release-2.x or release-4.x, and each release is tagged, +for instance release-4.0.1. + +After release, inevitably there are bug fixes. It is the responsibility of +the commiter who fixes a bug to make sure this fix is on the release branch, +so that we can then create a tagged bug-fix release, which will hopefully +happen more often than stable releases. + +How to Create a PyPy Release +++++++++++++++++++++++++++++ Overview -------- diff --git a/pypy/doc/tool/makecontributor.py b/pypy/doc/tool/makecontributor.py --- a/pypy/doc/tool/makecontributor.py +++ b/pypy/doc/tool/makecontributor.py @@ -72,6 +72,7 @@ 'Anton Gulenko':['anton gulenko', 'anton_gulenko'], 'Richard Lancaster':['richardlancaster'], 'William Leslie':['William ML Leslie'], + 'Spenser Bauman':['Spenser Andrew Bauman'], } alias_map = {} diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -282,9 +282,6 @@ elif config.objspace.usemodules.pypyjit: config.translation.jit = True - if config.translation.sandbox: - config.objspace.usepycfiles = False - config.translating = True import translate diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -123,6 +123,10 @@ 'a=5\nb=6\rc="""hello\r\nworld"""\r', mode='wb') p.join('mod.py').write( 'a=15\nb=16\rc="""foo\r\nbar"""\r', mode='wb') + setuppkg("test_bytecode", + a = '', + b = '', + c = '') p = setuppkg("encoded", # actually a line 2, setuppkg() sets up a line1 line2 = "# encoding: iso-8859-1\n", @@ -183,6 +187,9 @@ """) def _teardown(space, w_saved_modules): + p = udir.join('impsubdir') + if p.check(): + p.remove() space.appexec([w_saved_modules], """ (path_and_modules): saved_path, saved_modules = path_and_modules @@ -1232,31 +1239,50 @@ sys.meta_path.pop() -class AppTestNoPycFile(object): +class AppTestWriteBytecode(object): spaceconfig = { - "objspace.usepycfiles": False, + "translation.sandbox": False } + def setup_class(cls): - usepycfiles = cls.spaceconfig['objspace.usepycfiles'] - cls.w_usepycfiles = cls.space.wrap(usepycfiles) cls.saved_modules = _setup(cls) + sandbox = cls.spaceconfig['translation.sandbox'] + cls.w_sandbox = cls.space.wrap(sandbox) def teardown_class(cls): _teardown(cls.space, cls.saved_modules) + cls.space.appexec([], """ + (): + import sys + sys.dont_write_bytecode = False + """) - def test_import_possibly_from_pyc(self): - from compiled import x - assert x.__file__.endswith('.py') - try: - from compiled import lone - except ImportError: - assert not self.usepycfiles - else: - assert lone.__cached__.endswith('.pyc') + def test_default(self): + import os.path + from test_bytecode import a + assert a.__file__.endswith('a.py') + assert os.path.exists(a.__cached__) == (not self.sandbox) -class AppTestNoLonePycFile(AppTestNoPycFile): + def test_write_bytecode(self): + import os.path + import sys + sys.dont_write_bytecode = False + from test_bytecode import b + assert b.__file__.endswith('b.py') + assert os.path.exists(b.__cached__) + + def test_dont_write_bytecode(self): + import os.path + import sys + sys.dont_write_bytecode = True + from test_bytecode import c + assert c.__file__.endswith('c.py') + assert not os.path.exists(c.__cached__) + + +class AppTestWriteBytecodeSandbox(AppTestWriteBytecode): spaceconfig = { - "objspace.usepycfiles": True, + "translation.sandbox": True } diff --git a/pypy/module/sys/__init__.py b/pypy/module/sys/__init__.py --- a/pypy/module/sys/__init__.py +++ b/pypy/module/sys/__init__.py @@ -69,7 +69,7 @@ 'meta_path' : 'space.wrap([])', 'path_hooks' : 'space.wrap([])', 'path_importer_cache' : 'space.wrap({})', - 'dont_write_bytecode' : 'space.w_False', + 'dont_write_bytecode' : 'space.wrap(space.config.translation.sandbox)', 'getdefaultencoding' : 'interp_encoding.getdefaultencoding', 'getfilesystemencoding' : 'interp_encoding.getfilesystemencoding', diff --git a/pypy/module/sys/app.py b/pypy/module/sys/app.py --- a/pypy/module/sys/app.py +++ b/pypy/module/sys/app.py @@ -70,11 +70,11 @@ return None copyright_str = """ -Copyright 2003-2014 PyPy development team. +Copyright 2003-2016 PyPy development team. All Rights Reserved. For further information, see -Portions Copyright (c) 2001-2014 Python Software Foundation. +Portions Copyright (c) 2001-2016 Python Software Foundation. All Rights Reserved. Portions Copyright (c) 2000 BeOpen.com. diff --git a/pypy/objspace/std/tupleobject.py b/pypy/objspace/std/tupleobject.py --- a/pypy/objspace/std/tupleobject.py +++ b/pypy/objspace/std/tupleobject.py @@ -30,6 +30,11 @@ contains_jmp = jit.JitDriver(greens = ['tp'], reds = 'auto', name = 'tuple.contains') +hash_driver = jit.JitDriver( + name='tuple.hash', + greens=['w_type'], + reds='auto') + class W_AbstractTupleObject(W_Root): __slots__ = () @@ -258,8 +263,14 @@ def length(self): return len(self.wrappeditems) - @jit.look_inside_iff(lambda self, _1: _unroll_condition(self)) def descr_hash(self, space): + if _unroll_condition(self): + return self._descr_hash_unroll(space) + else: + return self._descr_hash_jitdriver(space) + + @jit.unroll_safe + def _descr_hash_unroll(self, space): mult = 1000003 x = 0x345678 z = len(self.wrappeditems) @@ -271,6 +282,20 @@ x += 97531 return space.wrap(intmask(x)) + def _descr_hash_jitdriver(self, space): + mult = 1000003 + x = 0x345678 + z = len(self.wrappeditems) + w_type = space.type(self.wrappeditems[0]) + for w_item in self.wrappeditems: + hash_driver.jit_merge_point(w_type=w_type) + y = space.hash_w(w_item) + x = (x ^ y) * mult + z -= 1 + mult += 82520 + z + z + x += 97531 + return space.wrap(intmask(x)) + def descr_eq(self, space, w_other): if not isinstance(w_other, W_AbstractTupleObject): return space.w_NotImplemented diff --git a/pypy/tool/test/test_tab.py b/pypy/tool/test/test_tab.py --- a/pypy/tool/test/test_tab.py +++ b/pypy/tool/test/test_tab.py @@ -6,7 +6,7 @@ from pypy.conftest import pypydir ROOT = os.path.abspath(os.path.join(pypydir, '..')) -EXCLUDE = {} +EXCLUDE = {'/virt_test/lib/python2.7/site-packages/setuptools'} def test_no_tabs(): diff --git a/rpython/jit/codewriter/jtransform.py b/rpython/jit/codewriter/jtransform.py --- a/rpython/jit/codewriter/jtransform.py +++ b/rpython/jit/codewriter/jtransform.py @@ -2042,6 +2042,11 @@ self.vable_flags[op.args[0]] = op.args[2].value return [] + def rewrite_op_jit_enter_portal_frame(self, op): + return [op] + def rewrite_op_jit_leave_portal_frame(self, op): + return [op] + # --------- # ll_math.sqrt_nonneg() diff --git a/rpython/jit/metainterp/blackhole.py b/rpython/jit/metainterp/blackhole.py --- a/rpython/jit/metainterp/blackhole.py +++ b/rpython/jit/metainterp/blackhole.py @@ -944,6 +944,14 @@ pass @arguments("i") + def bhimpl_jit_enter_portal_frame(x): + pass + + @arguments() + def bhimpl_jit_leave_portal_frame(): + pass + + @arguments("i") def bhimpl_int_assert_green(x): pass @arguments("r") diff --git a/rpython/jit/metainterp/pyjitpl.py b/rpython/jit/metainterp/pyjitpl.py --- a/rpython/jit/metainterp/pyjitpl.py +++ b/rpython/jit/metainterp/pyjitpl.py @@ -1358,6 +1358,17 @@ self.metainterp.attach_debug_info(op) @arguments("box") + def opimpl_jit_enter_portal_frame(self, uniqueidbox): + unique_id = uniqueidbox.getint() + jd_no = self.metainterp.jitdriver_sd.mainjitcode.index # fish + self.metainterp.enter_portal_frame(jd_no, unique_id) + + @arguments() + def opimpl_jit_leave_portal_frame(self): + jd_no = self.metainterp.jitdriver_sd.mainjitcode.index # fish + self.metainterp.leave_portal_frame(jd_no) + + @arguments("box") def _opimpl_assert_green(self, box): if not isinstance(box, Const): msg = "assert_green failed at %s:%d" % ( diff --git a/rpython/jit/metainterp/test/test_jitdriver.py b/rpython/jit/metainterp/test/test_jitdriver.py --- a/rpython/jit/metainterp/test/test_jitdriver.py +++ b/rpython/jit/metainterp/test/test_jitdriver.py @@ -213,6 +213,21 @@ if op.getopname() == 'enter_portal_frame': assert op.getarg(0).getint() == 0 assert op.getarg(1).getint() == 1 - + + def test_manual_leave_enter_portal_frame(self): + from rpython.rlib import jit + driver = JitDriver(greens=[], reds='auto', is_recursive=True) + + def f(arg): + i = 0 + while i < 100: + driver.jit_merge_point() + jit.enter_portal_frame(42) + jit.leave_portal_frame() + i += 1 + + self.meta_interp(f, [0]) + self.check_resops(enter_portal_frame=1, leave_portal_frame=1) + class TestLLtype(MultipleJitDriversTests, LLJitMixin): pass diff --git a/rpython/rlib/jit.py b/rpython/rlib/jit.py --- a/rpython/rlib/jit.py +++ b/rpython/rlib/jit.py @@ -284,7 +284,7 @@ def loop_unrolling_heuristic(lst, size, cutoff=2): """ In which cases iterating over items of lst can be unrolled """ - return isvirtual(lst) or (isconstant(size) and size <= cutoff) + return size == 0 or isvirtual(lst) or (isconstant(size) and size <= cutoff) class Entry(ExtRegistryEntry): _about_ = hint @@ -1168,6 +1168,24 @@ hop.exception_is_here() return hop.genop('jit_conditional_call', args_v) +def enter_portal_frame(unique_id): + """call this when starting to interpret a function. calling this is not + necessary for almost all interpreters. The only exception is stackless + interpreters where the portal never calls itself. + """ + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop + llop.jit_enter_portal_frame(lltype.Void, unique_id) + +def leave_portal_frame(): + """call this after the end of executing a function. calling this is not + necessary for almost all interpreters. The only exception is stackless + interpreters where the portal never calls itself. + """ + from rpython.rtyper.lltypesystem import lltype + from rpython.rtyper.lltypesystem.lloperation import llop + llop.jit_leave_portal_frame(lltype.Void) + class Counters(object): counters=""" TRACING diff --git a/rpython/rlib/test/test_jit.py b/rpython/rlib/test/test_jit.py --- a/rpython/rlib/test/test_jit.py +++ b/rpython/rlib/test/test_jit.py @@ -4,7 +4,8 @@ from rpython.annotator.model import UnionError from rpython.rlib.jit import (hint, we_are_jitted, JitDriver, elidable_promote, JitHintError, oopspec, isconstant, conditional_call, - elidable, unroll_safe, dont_look_inside) + elidable, unroll_safe, dont_look_inside, + enter_portal_frame, leave_portal_frame) from rpython.rlib.rarithmetic import r_uint from rpython.rtyper.test.tool import BaseRtypingTest from rpython.rtyper.lltypesystem import lltype @@ -300,3 +301,11 @@ mix = MixLevelHelperAnnotator(t.rtyper) mix.getgraph(later, [annmodel.s_Bool], annmodel.s_None) mix.finish() + + def test_enter_leave_portal_frame(self): + from rpython.translator.interactive import Translation + def g(): + enter_portal_frame(1) + leave_portal_frame() + t = Translation(g, []) + t.compile_c() # does not crash diff --git a/rpython/rtyper/llinterp.py b/rpython/rtyper/llinterp.py --- a/rpython/rtyper/llinterp.py +++ b/rpython/rtyper/llinterp.py @@ -925,6 +925,21 @@ def op_gc_gcflag_extra(self, subopnum, *args): return self.heap.gcflag_extra(subopnum, *args) + def op_gc_rawrefcount_init(self, *args): + raise NotImplementedError("gc_rawrefcount_init") + + def op_gc_rawrefcount_to_obj(self, *args): + raise NotImplementedError("gc_rawrefcount_to_obj") + + def op_gc_rawrefcount_from_obj(self, *args): + raise NotImplementedError("gc_rawrefcount_from_obj") + + def op_gc_rawrefcount_create_link_pyobj(self, *args): + raise NotImplementedError("gc_rawrefcount_create_link_pyobj") + + def op_gc_rawrefcount_create_link_pypy(self, *args): + raise NotImplementedError("gc_rawrefcount_create_link_pypy") + def op_do_malloc_fixedsize(self): raise NotImplementedError("do_malloc_fixedsize") def op_do_malloc_fixedsize_clear(self): diff --git a/rpython/rtyper/lltypesystem/lloperation.py b/rpython/rtyper/lltypesystem/lloperation.py --- a/rpython/rtyper/lltypesystem/lloperation.py +++ b/rpython/rtyper/lltypesystem/lloperation.py @@ -453,6 +453,8 @@ 'jit_record_exact_class' : LLOp(canrun=True), 'jit_ffi_save_result': LLOp(canrun=True), 'jit_conditional_call': LLOp(), + 'jit_enter_portal_frame': LLOp(canrun=True), + 'jit_leave_portal_frame': LLOp(canrun=True), 'get_exception_addr': LLOp(), 'get_exc_value_addr': LLOp(), 'do_malloc_fixedsize':LLOp(canmallocgc=True), diff --git a/rpython/rtyper/lltypesystem/opimpl.py b/rpython/rtyper/lltypesystem/opimpl.py --- a/rpython/rtyper/lltypesystem/opimpl.py +++ b/rpython/rtyper/lltypesystem/opimpl.py @@ -624,6 +624,12 @@ def op_jit_ffi_save_result(*args): pass +def op_jit_enter_portal_frame(x): + pass + +def op_jit_leave_portal_frame(): + pass + def op_get_group_member(TYPE, grpptr, memberoffset): from rpython.rtyper.lltypesystem import llgroup assert isinstance(memberoffset, llgroup.GroupMemberOffset) diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py --- a/rpython/translator/c/funcgen.py +++ b/rpython/translator/c/funcgen.py @@ -842,6 +842,12 @@ def OP_JIT_FFI_SAVE_RESULT(self, op): return '/* JIT_FFI_SAVE_RESULT %s */' % op + def OP_JIT_ENTER_PORTAL_FRAME(self, op): + return '/* JIT_ENTER_PORTAL_FRAME %s */' % op + + def OP_JIT_LEAVE_PORTAL_FRAME(self, op): + return '/* JIT_LEAVE_PORTAL_FRAME %s */' % op + def OP_GET_GROUP_MEMBER(self, op): typename = self.db.gettype(op.result.concretetype) return '%s = (%s)_OP_GET_GROUP_MEMBER(%s, %s);' % ( From pypy.commits at gmail.com Thu Feb 25 11:37:37 2016 From: pypy.commits at gmail.com (mjacob) Date: Thu, 25 Feb 2016 08:37:37 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Intern keyword arguments. Message-ID: <56cf2dd1.c3e01c0a.a48e8.4bcb@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82517:34c1abf3cd35 Date: 2016-02-25 17:36 +0100 http://bitbucket.org/pypy/pypy/changeset/34c1abf3cd35/ Log: Intern keyword arguments. diff --git a/pypy/objspace/std/kwargsdict.py b/pypy/objspace/std/kwargsdict.py --- a/pypy/objspace/std/kwargsdict.py +++ b/pypy/objspace/std/kwargsdict.py @@ -11,7 +11,7 @@ def _wrapkey(space, key): - return space.wrap(key.decode('utf-8')) + return space.new_interned_str(key) class EmptyKwargsDictStrategy(EmptyDictStrategy): diff --git a/pypy/objspace/std/test/test_dictmultiobject.py b/pypy/objspace/std/test/test_dictmultiobject.py --- a/pypy/objspace/std/test/test_dictmultiobject.py +++ b/pypy/objspace/std/test/test_dictmultiobject.py @@ -1134,6 +1134,9 @@ def wrapbytes(self, obj): return obj + def new_interned_str(self, s): + return s.decode('utf-8') + def isinstance_w(self, obj, klass): return isinstance(obj, klass) isinstance = isinstance_w From pypy.commits at gmail.com Thu Feb 25 11:44:33 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 08:44:33 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) possibly silghtly improve corner case Message-ID: <56cf2f71.2a6ec20a.d1a92.69fa@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82518:ff9c442e6dc8 Date: 2016-02-25 17:43 +0100 http://bitbucket.org/pypy/pypy/changeset/ff9c442e6dc8/ Log: (plan_rich,remi) possibly silghtly improve corner case diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -667,11 +667,22 @@ # we need to find a new place for variable v and # store result in the same place - loc = self.reg_bindings[v] - # only spill variable is allowed to reassign a new register to a live range - result_loc = self.force_allocate_reg(result_v, forbidden_vars=forbidden_vars) - self.assembler.regalloc_mov(loc, result_loc) - loc = result_loc + if (not self.has_free_registers() + and self._pick_variable_to_spill(None, forbidden_vars) is v): + # if we would be chosen for spilling, we give up our register + # for result_v (and move us away onto the frame) instead of + # forcing a spill of some other variable. + loc = self.reg_bindings[v] + del self.reg_bindings[v] + if self.frame_manager.get(v) is None: + self._move_variable_away(v, loc) + self.reg_bindings[result_v] = loc + else: + loc = self.reg_bindings[v] + # only spill variable is allowed to reassign a new register to a live range + result_loc = self.force_allocate_reg(result_v, forbidden_vars=forbidden_vars) + self.assembler.regalloc_mov(loc, result_loc) + loc = result_loc #del self.reg_bindings[v] diff --git a/rpython/jit/backend/x86/regalloc.py b/rpython/jit/backend/x86/regalloc.py --- a/rpython/jit/backend/x86/regalloc.py +++ b/rpython/jit/backend/x86/regalloc.py @@ -62,14 +62,22 @@ save_around_call_regs = abi_param_regs + [eax, edx, r10] def get_abi_param_register(self, i): - assert i >= 0 and i < len(self.abi_param_regs) - return self.abi_param_regs[i] + if not IS_X86_32 and 0 <= i < len(self.abi_param_regs): + return self.abi_param_regs[i] + return None + class X86XMMRegisterManager(RegisterManager): box_types = [FLOAT, INT] # yes INT! all_regs = [xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7] # we never need lower byte I hope save_around_call_regs = all_regs + abi_param_regs = all_regs + + def get_abi_param_register(self, i): + if not IS_X86_32 and 0 <= i < len(self.abi_param_regs): + return self.abi_param_regs[i] + return None def convert_to_imm(self, c): adr = self.assembler.datablockwrapper.malloc_aligned(8, 8) From pypy.commits at gmail.com Thu Feb 25 11:54:33 2016 From: pypy.commits at gmail.com (fijal) Date: Thu, 25 Feb 2016 08:54:33 -0800 (PST) Subject: [pypy-commit] pypy default: fix the test Message-ID: <56cf31c9.8916c20a.5e2dd.ffffe557@mx.google.com> Author: fijal Branch: Changeset: r82519:a53b593b3b95 Date: 2016-02-25 17:53 +0100 http://bitbucket.org/pypy/pypy/changeset/a53b593b3b95/ Log: fix the test diff --git a/rpython/jit/metainterp/test/test_jitdriver.py b/rpython/jit/metainterp/test/test_jitdriver.py --- a/rpython/jit/metainterp/test/test_jitdriver.py +++ b/rpython/jit/metainterp/test/test_jitdriver.py @@ -227,7 +227,7 @@ i += 1 self.meta_interp(f, [0]) - self.check_resops(enter_portal_frame=1, leave_portal_frame=1) + self.check_simple_loop(enter_portal_frame=1, leave_portal_frame=1) class TestLLtype(MultipleJitDriversTests, LLJitMixin): pass From pypy.commits at gmail.com Thu Feb 25 11:56:10 2016 From: pypy.commits at gmail.com (fijal) Date: Thu, 25 Feb 2016 08:56:10 -0800 (PST) Subject: [pypy-commit] pypy default: fix whatsnew Message-ID: <56cf322a.84b61c0a.21fec.5194@mx.google.com> Author: fijal Branch: Changeset: r82520:a3552a8e8c2c Date: 2016-02-25 17:55 +0100 http://bitbucket.org/pypy/pypy/changeset/a3552a8e8c2c/ Log: fix whatsnew diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -170,4 +170,17 @@ When creating instances and adding attributes in several different orders depending on some condition, the JIT would create too much code. This is now -fixed. \ No newline at end of file +fixed. + +.. branch: cpyext-gc-support-2 + +Improve CPython C API support, which means lxml now runs unmodified +(after removing pypy hacks, pending pull request) + +.. branch: look-inside-tuple-hash + +Look inside tuple hash, improving mdp benchmark + +.. branch: vlen-resume + +Compress resume data, saving 10-20% of memory consumed by the JIT \ No newline at end of file From pypy.commits at gmail.com Thu Feb 25 11:59:30 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 25 Feb 2016 08:59:30 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (remi, plan_rich) merged default Message-ID: <56cf32f2.05e41c0a.cd2ef.504c@mx.google.com> Author: Richard Plangger Branch: fix-longevity Changeset: r82521:3dce5a6242ac Date: 2016-02-25 17:58 +0100 http://bitbucket.org/pypy/pypy/changeset/3dce5a6242ac/ Log: (remi, plan_rich) merged default diff too long, truncating to 2000 out of 2557 lines diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -41,29 +41,29 @@ Amaury Forgeot d'Arc Antonio Cuni Samuele Pedroni + Matti Picus Alex Gaynor Brian Kearns - Matti Picus Philip Jenvey Michael Hudson + Ronan Lamy David Schneider + Manuel Jacob Holger Krekel Christian Tismer Hakan Ardo - Manuel Jacob - Ronan Lamy Benjamin Peterson + Richard Plangger Anders Chrigstrom Eric van Riet Paap Wim Lavrijsen - Richard Plangger Richard Emslie Alexander Schremmer Dan Villiom Podlaski Christiansen + Remi Meier Lukas Diekmann Sven Hager Anders Lehmann - Remi Meier Aurelien Campeas Niklaus Haldimann Camillo Bruni @@ -72,8 +72,8 @@ Romain Guillebert Leonardo Santagada Seo Sanghyeon + Ronny Pfannschmidt Justin Peel - Ronny Pfannschmidt David Edelsohn Anders Hammarquist Jakub Gustak @@ -95,6 +95,7 @@ Tyler Wade Michael Foord Stephan Diehl + Vincent Legoll Stefan Schwarzer Valentino Volonghi Tomek Meka @@ -105,9 +106,9 @@ Jean-Paul Calderone Timo Paulssen Squeaky + Marius Gedminas Alexandre Fayolle Simon Burton - Marius Gedminas Martin Matusiak Konstantin Lopuhin Wenzhu Man @@ -116,16 +117,20 @@ Ivan Sichmann Freitas Greg Price Dario Bertini + Stefano Rivera Mark Pearse Simon Cross Andreas Stührk - Stefano Rivera + Edd Barrett Jean-Philippe St. Pierre Guido van Rossum Pavel Vinogradov + Jeremy Thurgood Paweł Piotr Przeradowski + Spenser Bauman Paul deGrandis Ilya Osadchiy + marky1991 Tobias Oberstein Adrian Kuhn Boris Feigin @@ -134,14 +139,12 @@ Georg Brandl Bert Freudenberg Stian Andreassen - Edd Barrett + Tobias Pape Wanja Saatkamp Gerald Klix Mike Blume - Tobias Pape Oscar Nierstrasz Stefan H. Muller - Jeremy Thurgood Rami Chowdhury Eugene Oden Henry Mason @@ -153,6 +156,8 @@ Lukas Renggli Guenter Jantzen Ned Batchelder + Tim Felgentreff + Anton Gulenko Amit Regmi Ben Young Nicolas Chauvat @@ -162,12 +167,12 @@ Nicholas Riley Jason Chu Igor Trindade Oliveira - Tim Felgentreff + Yichao Yu Rocco Moretti Gintautas Miliauskas Michael Twomey Lucian Branescu Mihaila - Yichao Yu + Devin Jeanpierre Gabriel Lavoie Olivier Dormond Jared Grubb @@ -191,33 +196,33 @@ Stanislaw Halik Mikael Schönenberg Berkin Ilbeyi - Elmo M?ntynen + Elmo Mäntynen + Faye Zhao Jonathan David Riehl Anders Qvist Corbin Simpson Chirag Jadwani Beatrice During Alex Perry - Vincent Legoll + Vaibhav Sood Alan McIntyre - Spenser Bauman + William Leslie Alexander Sedov Attila Gobi + Jasper.Schulz Christopher Pope - Devin Jeanpierre - Vaibhav Sood Christian Tismer Marc Abramowitz Dan Stromberg Arjun Naik Valentina Mukhamedzhanova Stefano Parmesan + Mark Young Alexis Daboville Jens-Uwe Mager Carl Meyer Karl Ramm Pieter Zieschang - Anton Gulenko Gabriel Lukas Vacek Andrew Dalke @@ -225,6 +230,7 @@ Jakub Stasiak Nathan Taylor Vladimir Kryachko + Omer Katz Jacek Generowicz Alejandro J. Cura Jacob Oscarson @@ -239,6 +245,7 @@ Lars Wassermann Philipp Rustemeuer Henrik Vendelbo + Richard Lancaster Dan Buch Miguel de Val Borro Artur Lisiecki @@ -250,18 +257,18 @@ Tomo Cocoa Kim Jin Su Toni Mattis + Amber Brown Lucas Stadler Julian Berman Markus Holtermann roberto at goyle Yury V. Zaytsev Anna Katrina Dominguez - William Leslie Bobby Impollonia - Faye Zhao timo at eistee.fritz.box Andrew Thompson Yusei Tahara + Aaron Tubbs Ben Darnell Roberto De Ioris Juan Francisco Cantero Hurtado @@ -273,6 +280,7 @@ Christopher Armstrong Michael Hudson-Doyle Anders Sigfridsson + Nikolay Zinov Yasir Suhail Jason Michalski rafalgalczynski at gmail.com @@ -282,6 +290,7 @@ Gustavo Niemeyer Stephan Busemann Rafał Gałczyński + Matt Bogosian Christian Muirhead Berker Peksag James Lan @@ -316,9 +325,9 @@ Stefan Marr jiaaro Mads Kiilerich - Richard Lancaster opassembler.py Antony Lee + Jason Madden Yaroslav Fedevych Jim Hunziker Markus Unterwaditzer @@ -327,6 +336,7 @@ squeaky Zearin soareschen + Jonas Pfannschmidt Kurt Griffiths Mike Bayer Matthew Miller diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -170,12 +170,8 @@ cmdline="--translationmodules", suggests=[("objspace.allworkingmodules", False)]), - BoolOption("usepycfiles", "Write and read pyc files when importing", - default=True), - BoolOption("lonepycfiles", "Import pyc files with no matching py file", - default=False, - requires=[("objspace.usepycfiles", True)]), + default=False), StrOption("soabi", "Tag to differentiate extension modules built for different Python interpreters", diff --git a/pypy/doc/how-to-release.rst b/pypy/doc/how-to-release.rst --- a/pypy/doc/how-to-release.rst +++ b/pypy/doc/how-to-release.rst @@ -1,5 +1,20 @@ -Making a PyPy Release -===================== +The PyPy Release Process +======================== + +Release Policy +++++++++++++++ + +We try to create a stable release a few times a year. These are released on +a branch named like release-2.x or release-4.x, and each release is tagged, +for instance release-4.0.1. + +After release, inevitably there are bug fixes. It is the responsibility of +the commiter who fixes a bug to make sure this fix is on the release branch, +so that we can then create a tagged bug-fix release, which will hopefully +happen more often than stable releases. + +How to Create a PyPy Release +++++++++++++++++++++++++++++ Overview -------- diff --git a/pypy/doc/tool/makecontributor.py b/pypy/doc/tool/makecontributor.py --- a/pypy/doc/tool/makecontributor.py +++ b/pypy/doc/tool/makecontributor.py @@ -72,6 +72,7 @@ 'Anton Gulenko':['anton gulenko', 'anton_gulenko'], 'Richard Lancaster':['richardlancaster'], 'William Leslie':['William ML Leslie'], + 'Spenser Bauman':['Spenser Andrew Bauman'], } alias_map = {} diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -277,7 +277,6 @@ if config.translation.sandbox: config.objspace.lonepycfiles = False - config.objspace.usepycfiles = False config.translating = True diff --git a/pypy/module/cpyext/__init__.py b/pypy/module/cpyext/__init__.py --- a/pypy/module/cpyext/__init__.py +++ b/pypy/module/cpyext/__init__.py @@ -34,7 +34,7 @@ import pypy.module.cpyext.pyerrors import pypy.module.cpyext.typeobject import pypy.module.cpyext.object -import pypy.module.cpyext.stringobject +import pypy.module.cpyext.bytesobject import pypy.module.cpyext.tupleobject import pypy.module.cpyext.setobject import pypy.module.cpyext.dictobject diff --git a/pypy/module/cpyext/bytesobject.py b/pypy/module/cpyext/bytesobject.py new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/bytesobject.py @@ -0,0 +1,319 @@ +from pypy.interpreter.error import OperationError +from rpython.rtyper.lltypesystem import rffi, lltype +from pypy.module.cpyext.api import ( + cpython_api, cpython_struct, bootstrap_function, build_type_checkers, + PyObjectFields, Py_ssize_t, CONST_STRING, CANNOT_FAIL) +from pypy.module.cpyext.pyerrors import PyErr_BadArgument +from pypy.module.cpyext.pyobject import ( + PyObject, PyObjectP, Py_DecRef, make_ref, from_ref, track_reference, + make_typedescr, get_typedescr) + +## +## Implementation of PyStringObject +## ================================ +## +## The problem +## ----------- +## +## PyString_AsString() must return a (non-movable) pointer to the underlying +## buffer, whereas pypy strings are movable. C code may temporarily store +## this address and use it, as long as it owns a reference to the PyObject. +## There is no "release" function to specify that the pointer is not needed +## any more. +## +## Also, the pointer may be used to fill the initial value of string. This is +## valid only when the string was just allocated, and is not used elsewhere. +## +## Solution +## -------- +## +## PyStringObject contains two additional members: the size and a pointer to a +## char buffer; it may be NULL. +## +## - A string allocated by pypy will be converted into a PyStringObject with a +## NULL buffer. The first time PyString_AsString() is called, memory is +## allocated (with flavor='raw') and content is copied. +## +## - A string allocated with PyString_FromStringAndSize(NULL, size) will +## allocate a PyStringObject structure, and a buffer with the specified +## size, but the reference won't be stored in the global map; there is no +## corresponding object in pypy. When from_ref() or Py_INCREF() is called, +## the pypy string is created, and added to the global map of tracked +## objects. The buffer is then supposed to be immutable. +## +## - _PyString_Resize() works only on not-yet-pypy'd strings, and returns a +## similar object. +## +## - PyString_Size() doesn't need to force the object. +## +## - There could be an (expensive!) check in from_ref() that the buffer still +## corresponds to the pypy gc-managed string. +## + +PyStringObjectStruct = lltype.ForwardReference() +PyStringObject = lltype.Ptr(PyStringObjectStruct) +PyStringObjectFields = PyObjectFields + \ + (("buffer", rffi.CCHARP), ("size", Py_ssize_t)) +cpython_struct("PyStringObject", PyStringObjectFields, PyStringObjectStruct) + + at bootstrap_function +def init_stringobject(space): + "Type description of PyStringObject" + make_typedescr(space.w_str.layout.typedef, + basestruct=PyStringObject.TO, + attach=string_attach, + dealloc=string_dealloc, + realize=string_realize) + +PyString_Check, PyString_CheckExact = build_type_checkers("String", "w_str") + +def new_empty_str(space, length): + """ + Allocate a PyStringObject and its buffer, but without a corresponding + interpreter object. The buffer may be mutated, until string_realize() is + called. Refcount of the result is 1. + """ + typedescr = get_typedescr(space.w_str.layout.typedef) + py_obj = typedescr.allocate(space, space.w_str) + py_str = rffi.cast(PyStringObject, py_obj) + + buflen = length + 1 + py_str.c_size = length + py_str.c_buffer = lltype.malloc(rffi.CCHARP.TO, buflen, + flavor='raw', zero=True) + return py_str + +def string_attach(space, py_obj, w_obj): + """ + Fills a newly allocated PyStringObject with the given string object. The + buffer must not be modified. + """ + py_str = rffi.cast(PyStringObject, py_obj) + py_str.c_size = len(space.str_w(w_obj)) + py_str.c_buffer = lltype.nullptr(rffi.CCHARP.TO) + +def string_realize(space, py_obj): + """ + Creates the string in the interpreter. The PyStringObject buffer must not + be modified after this call. + """ + py_str = rffi.cast(PyStringObject, py_obj) + s = rffi.charpsize2str(py_str.c_buffer, py_str.c_size) + w_obj = space.wrap(s) + track_reference(space, py_obj, w_obj) + return w_obj + + at cpython_api([PyObject], lltype.Void, header=None) +def string_dealloc(space, py_obj): + """Frees allocated PyStringObject resources. + """ + py_str = rffi.cast(PyStringObject, py_obj) + if py_str.c_buffer: + lltype.free(py_str.c_buffer, flavor="raw") + from pypy.module.cpyext.object import PyObject_dealloc + PyObject_dealloc(space, py_obj) + +#_______________________________________________________________________ + + at cpython_api([CONST_STRING, Py_ssize_t], PyObject) +def PyString_FromStringAndSize(space, char_p, length): + if char_p: + s = rffi.charpsize2str(char_p, length) + return make_ref(space, space.wrap(s)) + else: + return rffi.cast(PyObject, new_empty_str(space, length)) + + at cpython_api([CONST_STRING], PyObject) +def PyString_FromString(space, char_p): + s = rffi.charp2str(char_p) + return space.wrap(s) + + at cpython_api([PyObject], rffi.CCHARP, error=0) +def PyString_AsString(space, ref): + if from_ref(space, rffi.cast(PyObject, ref.c_ob_type)) is space.w_str: + pass # typecheck returned "ok" without forcing 'ref' at all + elif not PyString_Check(space, ref): # otherwise, use the alternate way + raise OperationError(space.w_TypeError, space.wrap( + "PyString_AsString only support strings")) + ref_str = rffi.cast(PyStringObject, ref) + if not ref_str.c_buffer: + # copy string buffer + w_str = from_ref(space, ref) + s = space.str_w(w_str) + ref_str.c_buffer = rffi.str2charp(s) + return ref_str.c_buffer + + at cpython_api([PyObject, rffi.CCHARPP, rffi.CArrayPtr(Py_ssize_t)], rffi.INT_real, error=-1) +def PyString_AsStringAndSize(space, ref, buffer, length): + if not PyString_Check(space, ref): + raise OperationError(space.w_TypeError, space.wrap( + "PyString_AsStringAndSize only support strings")) + ref_str = rffi.cast(PyStringObject, ref) + if not ref_str.c_buffer: + # copy string buffer + w_str = from_ref(space, ref) + s = space.str_w(w_str) + ref_str.c_buffer = rffi.str2charp(s) + buffer[0] = ref_str.c_buffer + if length: + length[0] = ref_str.c_size + else: + i = 0 + while ref_str.c_buffer[i] != '\0': + i += 1 + if i != ref_str.c_size: + raise OperationError(space.w_TypeError, space.wrap( + "expected string without null bytes")) + return 0 + + at cpython_api([PyObject], Py_ssize_t, error=-1) +def PyString_Size(space, ref): + if from_ref(space, rffi.cast(PyObject, ref.c_ob_type)) is space.w_str: + ref = rffi.cast(PyStringObject, ref) + return ref.c_size + else: + w_obj = from_ref(space, ref) + return space.len_w(w_obj) + + at cpython_api([PyObjectP, Py_ssize_t], rffi.INT_real, error=-1) +def _PyString_Resize(space, ref, newsize): + """A way to resize a string object even though it is "immutable". Only use this to + build up a brand new string object; don't use this if the string may already be + known in other parts of the code. It is an error to call this function if the + refcount on the input string object is not one. Pass the address of an existing + string object as an lvalue (it may be written into), and the new size desired. + On success, *string holds the resized string object and 0 is returned; + the address in *string may differ from its input value. If the reallocation + fails, the original string object at *string is deallocated, *string is + set to NULL, a memory exception is set, and -1 is returned. + """ + # XXX always create a new string so far + py_str = rffi.cast(PyStringObject, ref[0]) + if not py_str.c_buffer: + raise OperationError(space.w_SystemError, space.wrap( + "_PyString_Resize called on already created string")) + try: + py_newstr = new_empty_str(space, newsize) + except MemoryError: + Py_DecRef(space, ref[0]) + ref[0] = lltype.nullptr(PyObject.TO) + raise + to_cp = newsize + oldsize = py_str.c_size + if oldsize < newsize: + to_cp = oldsize + for i in range(to_cp): + py_newstr.c_buffer[i] = py_str.c_buffer[i] + Py_DecRef(space, ref[0]) + ref[0] = rffi.cast(PyObject, py_newstr) + return 0 + + at cpython_api([PyObject, PyObject], rffi.INT, error=CANNOT_FAIL) +def _PyString_Eq(space, w_str1, w_str2): + return space.eq_w(w_str1, w_str2) + + at cpython_api([PyObjectP, PyObject], lltype.Void) +def PyString_Concat(space, ref, w_newpart): + """Create a new string object in *string containing the contents of newpart + appended to string; the caller will own the new reference. The reference to + the old value of string will be stolen. If the new string cannot be created, + the old reference to string will still be discarded and the value of + *string will be set to NULL; the appropriate exception will be set.""" + + if not ref[0]: + return + + if w_newpart is None or not PyString_Check(space, ref[0]) or \ + not PyString_Check(space, w_newpart): + Py_DecRef(space, ref[0]) + ref[0] = lltype.nullptr(PyObject.TO) + return + w_str = from_ref(space, ref[0]) + w_newstr = space.add(w_str, w_newpart) + Py_DecRef(space, ref[0]) + ref[0] = make_ref(space, w_newstr) + + at cpython_api([PyObjectP, PyObject], lltype.Void) +def PyString_ConcatAndDel(space, ref, newpart): + """Create a new string object in *string containing the contents of newpart + appended to string. This version decrements the reference count of newpart.""" + PyString_Concat(space, ref, newpart) + Py_DecRef(space, newpart) + + at cpython_api([PyObject, PyObject], PyObject) +def PyString_Format(space, w_format, w_args): + """Return a new string object from format and args. Analogous to format % + args. The args argument must be a tuple.""" + return space.mod(w_format, w_args) + + at cpython_api([CONST_STRING], PyObject) +def PyString_InternFromString(space, string): + """A combination of PyString_FromString() and + PyString_InternInPlace(), returning either a new string object that has + been interned, or a new ("owned") reference to an earlier interned string + object with the same value.""" + s = rffi.charp2str(string) + return space.new_interned_str(s) + + at cpython_api([PyObjectP], lltype.Void) +def PyString_InternInPlace(space, string): + """Intern the argument *string in place. The argument must be the + address of a pointer variable pointing to a Python string object. + If there is an existing interned string that is the same as + *string, it sets *string to it (decrementing the reference count + of the old string object and incrementing the reference count of + the interned string object), otherwise it leaves *string alone and + interns it (incrementing its reference count). (Clarification: + even though there is a lot of talk about reference counts, think + of this function as reference-count-neutral; you own the object + after the call if and only if you owned it before the call.) + + This function is not available in 3.x and does not have a PyBytes + alias.""" + w_str = from_ref(space, string[0]) + w_str = space.new_interned_w_str(w_str) + Py_DecRef(space, string[0]) + string[0] = make_ref(space, w_str) + + at cpython_api([PyObject, CONST_STRING, CONST_STRING], PyObject) +def PyString_AsEncodedObject(space, w_str, encoding, errors): + """Encode a string object using the codec registered for encoding and return + the result as Python object. encoding and errors have the same meaning as + the parameters of the same name in the string encode() method. The codec to + be used is looked up using the Python codec registry. Return NULL if an + exception was raised by the codec. + + This function is not available in 3.x and does not have a PyBytes alias.""" + if not PyString_Check(space, w_str): + PyErr_BadArgument(space) + + w_encoding = w_errors = None + if encoding: + w_encoding = space.wrap(rffi.charp2str(encoding)) + if errors: + w_errors = space.wrap(rffi.charp2str(errors)) + return space.call_method(w_str, 'encode', w_encoding, w_errors) + + at cpython_api([PyObject, CONST_STRING, CONST_STRING], PyObject) +def PyString_AsDecodedObject(space, w_str, encoding, errors): + """Decode a string object by passing it to the codec registered + for encoding and return the result as Python object. encoding and + errors have the same meaning as the parameters of the same name in + the string encode() method. The codec to be used is looked up + using the Python codec registry. Return NULL if an exception was + raised by the codec. + + This function is not available in 3.x and does not have a PyBytes alias.""" + if not PyString_Check(space, w_str): + PyErr_BadArgument(space) + + w_encoding = w_errors = None + if encoding: + w_encoding = space.wrap(rffi.charp2str(encoding)) + if errors: + w_errors = space.wrap(rffi.charp2str(errors)) + return space.call_method(w_str, "decode", w_encoding, w_errors) + + at cpython_api([PyObject, PyObject], PyObject) +def _PyString_Join(space, w_sep, w_seq): + return space.call_method(w_sep, 'join', w_seq) diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -191,7 +191,7 @@ def from_ref(space, ref): """ Finds the interpreter object corresponding to the given reference. If the - object is not yet realized (see stringobject.py), creates it. + object is not yet realized (see bytesobject.py), creates it. """ assert is_pyobj(ref) if not ref: diff --git a/pypy/module/cpyext/stringobject.py b/pypy/module/cpyext/stringobject.py deleted file mode 100644 --- a/pypy/module/cpyext/stringobject.py +++ /dev/null @@ -1,319 +0,0 @@ -from pypy.interpreter.error import OperationError -from rpython.rtyper.lltypesystem import rffi, lltype -from pypy.module.cpyext.api import ( - cpython_api, cpython_struct, bootstrap_function, build_type_checkers, - PyObjectFields, Py_ssize_t, CONST_STRING, CANNOT_FAIL) -from pypy.module.cpyext.pyerrors import PyErr_BadArgument -from pypy.module.cpyext.pyobject import ( - PyObject, PyObjectP, Py_DecRef, make_ref, from_ref, track_reference, - make_typedescr, get_typedescr) - -## -## Implementation of PyStringObject -## ================================ -## -## The problem -## ----------- -## -## PyString_AsString() must return a (non-movable) pointer to the underlying -## buffer, whereas pypy strings are movable. C code may temporarily store -## this address and use it, as long as it owns a reference to the PyObject. -## There is no "release" function to specify that the pointer is not needed -## any more. -## -## Also, the pointer may be used to fill the initial value of string. This is -## valid only when the string was just allocated, and is not used elsewhere. -## -## Solution -## -------- -## -## PyStringObject contains two additional members: the size and a pointer to a -## char buffer; it may be NULL. -## -## - A string allocated by pypy will be converted into a PyStringObject with a -## NULL buffer. The first time PyString_AsString() is called, memory is -## allocated (with flavor='raw') and content is copied. -## -## - A string allocated with PyString_FromStringAndSize(NULL, size) will -## allocate a PyStringObject structure, and a buffer with the specified -## size, but the reference won't be stored in the global map; there is no -## corresponding object in pypy. When from_ref() or Py_INCREF() is called, -## the pypy string is created, and added to the global map of tracked -## objects. The buffer is then supposed to be immutable. -## -## - _PyString_Resize() works only on not-yet-pypy'd strings, and returns a -## similar object. -## -## - PyString_Size() doesn't need to force the object. -## -## - There could be an (expensive!) check in from_ref() that the buffer still -## corresponds to the pypy gc-managed string. -## - -PyStringObjectStruct = lltype.ForwardReference() -PyStringObject = lltype.Ptr(PyStringObjectStruct) -PyStringObjectFields = PyObjectFields + \ - (("buffer", rffi.CCHARP), ("size", Py_ssize_t)) -cpython_struct("PyStringObject", PyStringObjectFields, PyStringObjectStruct) - - at bootstrap_function -def init_stringobject(space): - "Type description of PyStringObject" - make_typedescr(space.w_str.layout.typedef, - basestruct=PyStringObject.TO, - attach=string_attach, - dealloc=string_dealloc, - realize=string_realize) - -PyString_Check, PyString_CheckExact = build_type_checkers("String", "w_str") - -def new_empty_str(space, length): - """ - Allocate a PyStringObject and its buffer, but without a corresponding - interpreter object. The buffer may be mutated, until string_realize() is - called. Refcount of the result is 1. - """ - typedescr = get_typedescr(space.w_str.layout.typedef) - py_obj = typedescr.allocate(space, space.w_str) - py_str = rffi.cast(PyStringObject, py_obj) - - buflen = length + 1 - py_str.c_size = length - py_str.c_buffer = lltype.malloc(rffi.CCHARP.TO, buflen, - flavor='raw', zero=True) - return py_str - -def string_attach(space, py_obj, w_obj): - """ - Fills a newly allocated PyStringObject with the given string object. The - buffer must not be modified. - """ - py_str = rffi.cast(PyStringObject, py_obj) - py_str.c_size = len(space.str_w(w_obj)) - py_str.c_buffer = lltype.nullptr(rffi.CCHARP.TO) - -def string_realize(space, py_obj): - """ - Creates the string in the interpreter. The PyStringObject buffer must not - be modified after this call. - """ - py_str = rffi.cast(PyStringObject, py_obj) - s = rffi.charpsize2str(py_str.c_buffer, py_str.c_size) - w_obj = space.wrap(s) - track_reference(space, py_obj, w_obj) - return w_obj - - at cpython_api([PyObject], lltype.Void, header=None) -def string_dealloc(space, py_obj): - """Frees allocated PyStringObject resources. - """ - py_str = rffi.cast(PyStringObject, py_obj) - if py_str.c_buffer: - lltype.free(py_str.c_buffer, flavor="raw") - from pypy.module.cpyext.object import PyObject_dealloc - PyObject_dealloc(space, py_obj) - -#_______________________________________________________________________ - - at cpython_api([CONST_STRING, Py_ssize_t], PyObject) -def PyString_FromStringAndSize(space, char_p, length): - if char_p: - s = rffi.charpsize2str(char_p, length) - return make_ref(space, space.wrap(s)) - else: - return rffi.cast(PyObject, new_empty_str(space, length)) - - at cpython_api([CONST_STRING], PyObject) -def PyString_FromString(space, char_p): - s = rffi.charp2str(char_p) - return space.wrap(s) - - at cpython_api([PyObject], rffi.CCHARP, error=0) -def PyString_AsString(space, ref): - if from_ref(space, rffi.cast(PyObject, ref.c_ob_type)) is space.w_str: - pass # typecheck returned "ok" without forcing 'ref' at all - elif not PyString_Check(space, ref): # otherwise, use the alternate way - raise OperationError(space.w_TypeError, space.wrap( - "PyString_AsString only support strings")) - ref_str = rffi.cast(PyStringObject, ref) - if not ref_str.c_buffer: - # copy string buffer - w_str = from_ref(space, ref) - s = space.str_w(w_str) - ref_str.c_buffer = rffi.str2charp(s) - return ref_str.c_buffer - - at cpython_api([PyObject, rffi.CCHARPP, rffi.CArrayPtr(Py_ssize_t)], rffi.INT_real, error=-1) -def PyString_AsStringAndSize(space, ref, buffer, length): - if not PyString_Check(space, ref): - raise OperationError(space.w_TypeError, space.wrap( - "PyString_AsStringAndSize only support strings")) - ref_str = rffi.cast(PyStringObject, ref) - if not ref_str.c_buffer: - # copy string buffer - w_str = from_ref(space, ref) - s = space.str_w(w_str) - ref_str.c_buffer = rffi.str2charp(s) - buffer[0] = ref_str.c_buffer - if length: - length[0] = ref_str.c_size - else: - i = 0 - while ref_str.c_buffer[i] != '\0': - i += 1 - if i != ref_str.c_size: - raise OperationError(space.w_TypeError, space.wrap( - "expected string without null bytes")) - return 0 - - at cpython_api([PyObject], Py_ssize_t, error=-1) -def PyString_Size(space, ref): - if from_ref(space, rffi.cast(PyObject, ref.c_ob_type)) is space.w_str: - ref = rffi.cast(PyStringObject, ref) - return ref.c_size - else: - w_obj = from_ref(space, ref) - return space.len_w(w_obj) - - at cpython_api([PyObjectP, Py_ssize_t], rffi.INT_real, error=-1) -def _PyString_Resize(space, ref, newsize): - """A way to resize a string object even though it is "immutable". Only use this to - build up a brand new string object; don't use this if the string may already be - known in other parts of the code. It is an error to call this function if the - refcount on the input string object is not one. Pass the address of an existing - string object as an lvalue (it may be written into), and the new size desired. - On success, *string holds the resized string object and 0 is returned; - the address in *string may differ from its input value. If the reallocation - fails, the original string object at *string is deallocated, *string is - set to NULL, a memory exception is set, and -1 is returned. - """ - # XXX always create a new string so far - py_str = rffi.cast(PyStringObject, ref[0]) - if not py_str.c_buffer: - raise OperationError(space.w_SystemError, space.wrap( - "_PyString_Resize called on already created string")) - try: - py_newstr = new_empty_str(space, newsize) - except MemoryError: - Py_DecRef(space, ref[0]) - ref[0] = lltype.nullptr(PyObject.TO) - raise - to_cp = newsize - oldsize = py_str.c_size - if oldsize < newsize: - to_cp = oldsize - for i in range(to_cp): - py_newstr.c_buffer[i] = py_str.c_buffer[i] - Py_DecRef(space, ref[0]) - ref[0] = rffi.cast(PyObject, py_newstr) - return 0 - - at cpython_api([PyObject, PyObject], rffi.INT, error=CANNOT_FAIL) -def _PyString_Eq(space, w_str1, w_str2): - return space.eq_w(w_str1, w_str2) - - at cpython_api([PyObjectP, PyObject], lltype.Void) -def PyString_Concat(space, ref, w_newpart): - """Create a new string object in *string containing the contents of newpart - appended to string; the caller will own the new reference. The reference to - the old value of string will be stolen. If the new string cannot be created, - the old reference to string will still be discarded and the value of - *string will be set to NULL; the appropriate exception will be set.""" - - if not ref[0]: - return - - if w_newpart is None or not PyString_Check(space, ref[0]) or \ - not PyString_Check(space, w_newpart): - Py_DecRef(space, ref[0]) - ref[0] = lltype.nullptr(PyObject.TO) - return - w_str = from_ref(space, ref[0]) - w_newstr = space.add(w_str, w_newpart) - Py_DecRef(space, ref[0]) - ref[0] = make_ref(space, w_newstr) - - at cpython_api([PyObjectP, PyObject], lltype.Void) -def PyString_ConcatAndDel(space, ref, newpart): - """Create a new string object in *string containing the contents of newpart - appended to string. This version decrements the reference count of newpart.""" - PyString_Concat(space, ref, newpart) - Py_DecRef(space, newpart) - - at cpython_api([PyObject, PyObject], PyObject) -def PyString_Format(space, w_format, w_args): - """Return a new string object from format and args. Analogous to format % - args. The args argument must be a tuple.""" - return space.mod(w_format, w_args) - - at cpython_api([CONST_STRING], PyObject) -def PyString_InternFromString(space, string): - """A combination of PyString_FromString() and - PyString_InternInPlace(), returning either a new string object that has - been interned, or a new ("owned") reference to an earlier interned string - object with the same value.""" - s = rffi.charp2str(string) - return space.new_interned_str(s) - - at cpython_api([PyObjectP], lltype.Void) -def PyString_InternInPlace(space, string): - """Intern the argument *string in place. The argument must be the - address of a pointer variable pointing to a Python string object. - If there is an existing interned string that is the same as - *string, it sets *string to it (decrementing the reference count - of the old string object and incrementing the reference count of - the interned string object), otherwise it leaves *string alone and - interns it (incrementing its reference count). (Clarification: - even though there is a lot of talk about reference counts, think - of this function as reference-count-neutral; you own the object - after the call if and only if you owned it before the call.) - - This function is not available in 3.x and does not have a PyBytes - alias.""" - w_str = from_ref(space, string[0]) - w_str = space.new_interned_w_str(w_str) - Py_DecRef(space, string[0]) - string[0] = make_ref(space, w_str) - - at cpython_api([PyObject, CONST_STRING, CONST_STRING], PyObject) -def PyString_AsEncodedObject(space, w_str, encoding, errors): - """Encode a string object using the codec registered for encoding and return - the result as Python object. encoding and errors have the same meaning as - the parameters of the same name in the string encode() method. The codec to - be used is looked up using the Python codec registry. Return NULL if an - exception was raised by the codec. - - This function is not available in 3.x and does not have a PyBytes alias.""" - if not PyString_Check(space, w_str): - PyErr_BadArgument(space) - - w_encoding = w_errors = None - if encoding: - w_encoding = space.wrap(rffi.charp2str(encoding)) - if errors: - w_errors = space.wrap(rffi.charp2str(errors)) - return space.call_method(w_str, 'encode', w_encoding, w_errors) - - at cpython_api([PyObject, CONST_STRING, CONST_STRING], PyObject) -def PyString_AsDecodedObject(space, w_str, encoding, errors): - """Decode a string object by passing it to the codec registered - for encoding and return the result as Python object. encoding and - errors have the same meaning as the parameters of the same name in - the string encode() method. The codec to be used is looked up - using the Python codec registry. Return NULL if an exception was - raised by the codec. - - This function is not available in 3.x and does not have a PyBytes alias.""" - if not PyString_Check(space, w_str): - PyErr_BadArgument(space) - - w_encoding = w_errors = None - if encoding: - w_encoding = space.wrap(rffi.charp2str(encoding)) - if errors: - w_errors = space.wrap(rffi.charp2str(errors)) - return space.call_method(w_str, "decode", w_encoding, w_errors) - - at cpython_api([PyObject, PyObject], PyObject) -def _PyString_Join(space, w_sep, w_seq): - return space.call_method(w_sep, 'join', w_seq) diff --git a/pypy/module/cpyext/structmember.py b/pypy/module/cpyext/structmember.py --- a/pypy/module/cpyext/structmember.py +++ b/pypy/module/cpyext/structmember.py @@ -6,7 +6,7 @@ from pypy.module.cpyext.intobject import PyInt_AsLong, PyInt_AsUnsignedLong from pypy.module.cpyext.pyerrors import PyErr_Occurred from pypy.module.cpyext.pyobject import PyObject, Py_DecRef, from_ref, make_ref -from pypy.module.cpyext.stringobject import ( +from pypy.module.cpyext.bytesobject import ( PyString_FromString, PyString_FromStringAndSize) from pypy.module.cpyext.floatobject import PyFloat_AsDouble from pypy.module.cpyext.longobject import ( diff --git a/pypy/module/cpyext/test/test_bytesobject.py b/pypy/module/cpyext/test/test_bytesobject.py new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/test/test_bytesobject.py @@ -0,0 +1,329 @@ +from rpython.rtyper.lltypesystem import rffi, lltype +from pypy.module.cpyext.test.test_api import BaseApiTest +from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase +from pypy.module.cpyext.bytesobject import new_empty_str, PyStringObject +from pypy.module.cpyext.api import PyObjectP, PyObject, Py_ssize_tP +from pypy.module.cpyext.pyobject import Py_DecRef, from_ref, make_ref + +import py +import sys + +class AppTestStringObject(AppTestCpythonExtensionBase): + def test_stringobject(self): + module = self.import_extension('foo', [ + ("get_hello1", "METH_NOARGS", + """ + return PyString_FromStringAndSize( + "Hello world", 11); + """), + ("get_hello2", "METH_NOARGS", + """ + return PyString_FromString("Hello world"); + """), + ("test_Size", "METH_NOARGS", + """ + PyObject* s = PyString_FromString("Hello world"); + int result = 0; + + if(PyString_Size(s) == 11) { + result = 1; + } + if(s->ob_type->tp_basicsize != sizeof(void*)*5) + result = 0; + Py_DECREF(s); + return PyBool_FromLong(result); + """), + ("test_Size_exception", "METH_NOARGS", + """ + PyObject* f = PyFloat_FromDouble(1.0); + Py_ssize_t size = PyString_Size(f); + + Py_DECREF(f); + return NULL; + """), + ("test_is_string", "METH_VARARGS", + """ + return PyBool_FromLong(PyString_Check(PyTuple_GetItem(args, 0))); + """)]) + assert module.get_hello1() == 'Hello world' + assert module.get_hello2() == 'Hello world' + assert module.test_Size() + raises(TypeError, module.test_Size_exception) + + assert module.test_is_string("") + assert not module.test_is_string(()) + + def test_string_buffer_init(self): + module = self.import_extension('foo', [ + ("getstring", "METH_NOARGS", + """ + PyObject *s, *t; + char* c; + Py_ssize_t len; + + s = PyString_FromStringAndSize(NULL, 4); + if (s == NULL) + return NULL; + t = PyString_FromStringAndSize(NULL, 3); + if (t == NULL) + return NULL; + Py_DECREF(t); + c = PyString_AsString(s); + c[0] = 'a'; + c[1] = 'b'; + c[3] = 'c'; + return s; + """), + ]) + s = module.getstring() + assert len(s) == 4 + assert s == 'ab\x00c' + + + + def test_AsString(self): + module = self.import_extension('foo', [ + ("getstring", "METH_NOARGS", + """ + PyObject* s1 = PyString_FromStringAndSize("test", 4); + char* c = PyString_AsString(s1); + PyObject* s2 = PyString_FromStringAndSize(c, 4); + Py_DECREF(s1); + return s2; + """), + ]) + s = module.getstring() + assert s == 'test' + + def test_py_string_as_string(self): + module = self.import_extension('foo', [ + ("string_as_string", "METH_VARARGS", + ''' + return PyString_FromStringAndSize(PyString_AsString( + PyTuple_GetItem(args, 0)), 4); + ''' + )]) + assert module.string_as_string("huheduwe") == "huhe" + + def test_py_string_as_string_None(self): + module = self.import_extension('foo', [ + ("string_None", "METH_VARARGS", + ''' + return PyString_AsString(Py_None); + ''' + )]) + raises(TypeError, module.string_None) + + def test_AsStringAndSize(self): + module = self.import_extension('foo', [ + ("getstring", "METH_NOARGS", + """ + PyObject* s1 = PyString_FromStringAndSize("te\\0st", 5); + char *buf; + Py_ssize_t len; + if (PyString_AsStringAndSize(s1, &buf, &len) < 0) + return NULL; + if (len != 5) { + PyErr_SetString(PyExc_AssertionError, "Bad Length"); + return NULL; + } + if (PyString_AsStringAndSize(s1, &buf, NULL) >= 0) { + PyErr_SetString(PyExc_AssertionError, "Should Have failed"); + return NULL; + } + PyErr_Clear(); + Py_DECREF(s1); + Py_INCREF(Py_None); + return Py_None; + """), + ]) + module.getstring() + + def test_format_v(self): + module = self.import_extension('foo', [ + ("test_string_format_v", "METH_VARARGS", + ''' + return helper("bla %d ble %s\\n", + PyInt_AsLong(PyTuple_GetItem(args, 0)), + PyString_AsString(PyTuple_GetItem(args, 1))); + ''' + ) + ], prologue=''' + PyObject* helper(char* fmt, ...) + { + va_list va; + PyObject* res; + va_start(va, fmt); + res = PyString_FromFormatV(fmt, va); + va_end(va); + return res; + } + ''') + res = module.test_string_format_v(1, "xyz") + assert res == "bla 1 ble xyz\n" + + def test_format(self): + module = self.import_extension('foo', [ + ("test_string_format", "METH_VARARGS", + ''' + return PyString_FromFormat("bla %d ble %s\\n", + PyInt_AsLong(PyTuple_GetItem(args, 0)), + PyString_AsString(PyTuple_GetItem(args, 1))); + ''' + ) + ]) + res = module.test_string_format(1, "xyz") + assert res == "bla 1 ble xyz\n" + + def test_intern_inplace(self): + module = self.import_extension('foo', [ + ("test_intern_inplace", "METH_O", + ''' + PyObject *s = args; + Py_INCREF(s); + PyString_InternInPlace(&s); + return s; + ''' + ) + ]) + # This does not test much, but at least the refcounts are checked. + assert module.test_intern_inplace('s') == 's' + +class TestString(BaseApiTest): + def test_string_resize(self, space, api): + py_str = new_empty_str(space, 10) + ar = lltype.malloc(PyObjectP.TO, 1, flavor='raw') + py_str.c_buffer[0] = 'a' + py_str.c_buffer[1] = 'b' + py_str.c_buffer[2] = 'c' + ar[0] = rffi.cast(PyObject, py_str) + api._PyString_Resize(ar, 3) + py_str = rffi.cast(PyStringObject, ar[0]) + assert py_str.c_size == 3 + assert py_str.c_buffer[1] == 'b' + assert py_str.c_buffer[3] == '\x00' + # the same for growing + ar[0] = rffi.cast(PyObject, py_str) + api._PyString_Resize(ar, 10) + py_str = rffi.cast(PyStringObject, ar[0]) + assert py_str.c_size == 10 + assert py_str.c_buffer[1] == 'b' + assert py_str.c_buffer[10] == '\x00' + Py_DecRef(space, ar[0]) + lltype.free(ar, flavor='raw') + + def test_string_buffer(self, space, api): + py_str = new_empty_str(space, 10) + c_buf = py_str.c_ob_type.c_tp_as_buffer + assert c_buf + py_obj = rffi.cast(PyObject, py_str) + assert c_buf.c_bf_getsegcount(py_obj, lltype.nullptr(Py_ssize_tP.TO)) == 1 + ref = lltype.malloc(Py_ssize_tP.TO, 1, flavor='raw') + assert c_buf.c_bf_getsegcount(py_obj, ref) == 1 + assert ref[0] == 10 + lltype.free(ref, flavor='raw') + ref = lltype.malloc(rffi.VOIDPP.TO, 1, flavor='raw') + assert c_buf.c_bf_getreadbuffer(py_obj, 0, ref) == 10 + lltype.free(ref, flavor='raw') + Py_DecRef(space, py_obj) + + def test_Concat(self, space, api): + ref = make_ref(space, space.wrap('abc')) + ptr = lltype.malloc(PyObjectP.TO, 1, flavor='raw') + ptr[0] = ref + prev_refcnt = ref.c_ob_refcnt + api.PyString_Concat(ptr, space.wrap('def')) + assert ref.c_ob_refcnt == prev_refcnt - 1 + assert space.str_w(from_ref(space, ptr[0])) == 'abcdef' + api.PyString_Concat(ptr, space.w_None) + assert not ptr[0] + ptr[0] = lltype.nullptr(PyObject.TO) + api.PyString_Concat(ptr, space.wrap('def')) # should not crash + lltype.free(ptr, flavor='raw') + + def test_ConcatAndDel(self, space, api): + ref1 = make_ref(space, space.wrap('abc')) + ref2 = make_ref(space, space.wrap('def')) + ptr = lltype.malloc(PyObjectP.TO, 1, flavor='raw') + ptr[0] = ref1 + prev_refcnf = ref2.c_ob_refcnt + api.PyString_ConcatAndDel(ptr, ref2) + assert space.str_w(from_ref(space, ptr[0])) == 'abcdef' + assert ref2.c_ob_refcnt == prev_refcnf - 1 + Py_DecRef(space, ptr[0]) + ptr[0] = lltype.nullptr(PyObject.TO) + ref2 = make_ref(space, space.wrap('foo')) + prev_refcnf = ref2.c_ob_refcnt + api.PyString_ConcatAndDel(ptr, ref2) # should not crash + assert ref2.c_ob_refcnt == prev_refcnf - 1 + lltype.free(ptr, flavor='raw') + + def test_format(self, space, api): + assert "1 2" == space.unwrap( + api.PyString_Format(space.wrap('%s %d'), space.wrap((1, 2)))) + + def test_asbuffer(self, space, api): + bufp = lltype.malloc(rffi.CCHARPP.TO, 1, flavor='raw') + lenp = lltype.malloc(Py_ssize_tP.TO, 1, flavor='raw') + + w_text = space.wrap("text") + assert api.PyObject_AsCharBuffer(w_text, bufp, lenp) == 0 + assert lenp[0] == 4 + assert rffi.charp2str(bufp[0]) == 'text' + + lltype.free(bufp, flavor='raw') + lltype.free(lenp, flavor='raw') + + def test_intern(self, space, api): + buf = rffi.str2charp("test") + w_s1 = api.PyString_InternFromString(buf) + w_s2 = api.PyString_InternFromString(buf) + rffi.free_charp(buf) + assert w_s1 is w_s2 + + def test_AsEncodedObject(self, space, api): + ptr = space.wrap('abc') + + errors = rffi.str2charp("strict") + + encoding = rffi.str2charp("hex") + res = api.PyString_AsEncodedObject( + ptr, encoding, errors) + assert space.unwrap(res) == "616263" + + res = api.PyString_AsEncodedObject( + ptr, encoding, lltype.nullptr(rffi.CCHARP.TO)) + assert space.unwrap(res) == "616263" + rffi.free_charp(encoding) + + encoding = rffi.str2charp("unknown_encoding") + self.raises(space, api, LookupError, api.PyString_AsEncodedObject, + ptr, encoding, errors) + rffi.free_charp(encoding) + + rffi.free_charp(errors) + + res = api.PyString_AsEncodedObject( + ptr, lltype.nullptr(rffi.CCHARP.TO), lltype.nullptr(rffi.CCHARP.TO)) + assert space.unwrap(res) == "abc" + + self.raises(space, api, TypeError, api.PyString_AsEncodedObject, + space.wrap(2), lltype.nullptr(rffi.CCHARP.TO), lltype.nullptr(rffi.CCHARP.TO) + ) + + def test_AsDecodedObject(self, space, api): + w_str = space.wrap('caf\xe9') + encoding = rffi.str2charp("latin-1") + w_res = api.PyString_AsDecodedObject(w_str, encoding, None) + rffi.free_charp(encoding) + assert space.unwrap(w_res) == u"caf\xe9" + + def test_eq(self, space, api): + assert 1 == api._PyString_Eq(space.wrap("hello"), space.wrap("hello")) + assert 0 == api._PyString_Eq(space.wrap("hello"), space.wrap("world")) + + def test_join(self, space, api): + w_sep = space.wrap('') + w_seq = space.wrap(['a', 'b']) + w_joined = api._PyString_Join(w_sep, w_seq) + assert space.unwrap(w_joined) == 'ab' diff --git a/pypy/module/cpyext/test/test_stringobject.py b/pypy/module/cpyext/test/test_stringobject.py deleted file mode 100644 --- a/pypy/module/cpyext/test/test_stringobject.py +++ /dev/null @@ -1,329 +0,0 @@ -from rpython.rtyper.lltypesystem import rffi, lltype -from pypy.module.cpyext.test.test_api import BaseApiTest -from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase -from pypy.module.cpyext.stringobject import new_empty_str, PyStringObject -from pypy.module.cpyext.api import PyObjectP, PyObject, Py_ssize_tP -from pypy.module.cpyext.pyobject import Py_DecRef, from_ref, make_ref - -import py -import sys - -class AppTestStringObject(AppTestCpythonExtensionBase): - def test_stringobject(self): - module = self.import_extension('foo', [ - ("get_hello1", "METH_NOARGS", - """ - return PyString_FromStringAndSize( - "Hello world", 11); - """), - ("get_hello2", "METH_NOARGS", - """ - return PyString_FromString("Hello world"); - """), - ("test_Size", "METH_NOARGS", - """ - PyObject* s = PyString_FromString("Hello world"); - int result = 0; - - if(PyString_Size(s) == 11) { - result = 1; - } - if(s->ob_type->tp_basicsize != sizeof(void*)*5) - result = 0; - Py_DECREF(s); - return PyBool_FromLong(result); - """), - ("test_Size_exception", "METH_NOARGS", - """ - PyObject* f = PyFloat_FromDouble(1.0); - Py_ssize_t size = PyString_Size(f); - - Py_DECREF(f); - return NULL; - """), - ("test_is_string", "METH_VARARGS", - """ - return PyBool_FromLong(PyString_Check(PyTuple_GetItem(args, 0))); - """)]) - assert module.get_hello1() == 'Hello world' - assert module.get_hello2() == 'Hello world' - assert module.test_Size() - raises(TypeError, module.test_Size_exception) - - assert module.test_is_string("") - assert not module.test_is_string(()) - - def test_string_buffer_init(self): - module = self.import_extension('foo', [ - ("getstring", "METH_NOARGS", - """ - PyObject *s, *t; - char* c; - Py_ssize_t len; - - s = PyString_FromStringAndSize(NULL, 4); - if (s == NULL) - return NULL; - t = PyString_FromStringAndSize(NULL, 3); - if (t == NULL) - return NULL; - Py_DECREF(t); - c = PyString_AsString(s); - c[0] = 'a'; - c[1] = 'b'; - c[3] = 'c'; - return s; - """), - ]) - s = module.getstring() - assert len(s) == 4 - assert s == 'ab\x00c' - - - - def test_AsString(self): - module = self.import_extension('foo', [ - ("getstring", "METH_NOARGS", - """ - PyObject* s1 = PyString_FromStringAndSize("test", 4); - char* c = PyString_AsString(s1); - PyObject* s2 = PyString_FromStringAndSize(c, 4); - Py_DECREF(s1); - return s2; - """), - ]) - s = module.getstring() - assert s == 'test' - - def test_py_string_as_string(self): - module = self.import_extension('foo', [ - ("string_as_string", "METH_VARARGS", - ''' - return PyString_FromStringAndSize(PyString_AsString( - PyTuple_GetItem(args, 0)), 4); - ''' - )]) - assert module.string_as_string("huheduwe") == "huhe" - - def test_py_string_as_string_None(self): - module = self.import_extension('foo', [ - ("string_None", "METH_VARARGS", - ''' - return PyString_AsString(Py_None); - ''' - )]) - raises(TypeError, module.string_None) - - def test_AsStringAndSize(self): - module = self.import_extension('foo', [ - ("getstring", "METH_NOARGS", - """ - PyObject* s1 = PyString_FromStringAndSize("te\\0st", 5); - char *buf; - Py_ssize_t len; - if (PyString_AsStringAndSize(s1, &buf, &len) < 0) - return NULL; - if (len != 5) { - PyErr_SetString(PyExc_AssertionError, "Bad Length"); - return NULL; - } - if (PyString_AsStringAndSize(s1, &buf, NULL) >= 0) { - PyErr_SetString(PyExc_AssertionError, "Should Have failed"); - return NULL; - } - PyErr_Clear(); - Py_DECREF(s1); - Py_INCREF(Py_None); - return Py_None; - """), - ]) - module.getstring() - - def test_format_v(self): - module = self.import_extension('foo', [ - ("test_string_format_v", "METH_VARARGS", - ''' - return helper("bla %d ble %s\\n", - PyInt_AsLong(PyTuple_GetItem(args, 0)), - PyString_AsString(PyTuple_GetItem(args, 1))); - ''' - ) - ], prologue=''' - PyObject* helper(char* fmt, ...) - { - va_list va; - PyObject* res; - va_start(va, fmt); - res = PyString_FromFormatV(fmt, va); - va_end(va); - return res; - } - ''') - res = module.test_string_format_v(1, "xyz") - assert res == "bla 1 ble xyz\n" - - def test_format(self): - module = self.import_extension('foo', [ - ("test_string_format", "METH_VARARGS", - ''' - return PyString_FromFormat("bla %d ble %s\\n", - PyInt_AsLong(PyTuple_GetItem(args, 0)), - PyString_AsString(PyTuple_GetItem(args, 1))); - ''' - ) - ]) - res = module.test_string_format(1, "xyz") - assert res == "bla 1 ble xyz\n" - - def test_intern_inplace(self): - module = self.import_extension('foo', [ - ("test_intern_inplace", "METH_O", - ''' - PyObject *s = args; - Py_INCREF(s); - PyString_InternInPlace(&s); - return s; - ''' - ) - ]) - # This does not test much, but at least the refcounts are checked. - assert module.test_intern_inplace('s') == 's' - -class TestString(BaseApiTest): - def test_string_resize(self, space, api): - py_str = new_empty_str(space, 10) - ar = lltype.malloc(PyObjectP.TO, 1, flavor='raw') - py_str.c_buffer[0] = 'a' - py_str.c_buffer[1] = 'b' - py_str.c_buffer[2] = 'c' - ar[0] = rffi.cast(PyObject, py_str) - api._PyString_Resize(ar, 3) - py_str = rffi.cast(PyStringObject, ar[0]) - assert py_str.c_size == 3 - assert py_str.c_buffer[1] == 'b' - assert py_str.c_buffer[3] == '\x00' - # the same for growing - ar[0] = rffi.cast(PyObject, py_str) - api._PyString_Resize(ar, 10) - py_str = rffi.cast(PyStringObject, ar[0]) - assert py_str.c_size == 10 - assert py_str.c_buffer[1] == 'b' - assert py_str.c_buffer[10] == '\x00' - Py_DecRef(space, ar[0]) - lltype.free(ar, flavor='raw') - - def test_string_buffer(self, space, api): - py_str = new_empty_str(space, 10) - c_buf = py_str.c_ob_type.c_tp_as_buffer - assert c_buf - py_obj = rffi.cast(PyObject, py_str) - assert c_buf.c_bf_getsegcount(py_obj, lltype.nullptr(Py_ssize_tP.TO)) == 1 - ref = lltype.malloc(Py_ssize_tP.TO, 1, flavor='raw') - assert c_buf.c_bf_getsegcount(py_obj, ref) == 1 - assert ref[0] == 10 - lltype.free(ref, flavor='raw') - ref = lltype.malloc(rffi.VOIDPP.TO, 1, flavor='raw') - assert c_buf.c_bf_getreadbuffer(py_obj, 0, ref) == 10 - lltype.free(ref, flavor='raw') - Py_DecRef(space, py_obj) - - def test_Concat(self, space, api): - ref = make_ref(space, space.wrap('abc')) - ptr = lltype.malloc(PyObjectP.TO, 1, flavor='raw') - ptr[0] = ref - prev_refcnt = ref.c_ob_refcnt - api.PyString_Concat(ptr, space.wrap('def')) - assert ref.c_ob_refcnt == prev_refcnt - 1 - assert space.str_w(from_ref(space, ptr[0])) == 'abcdef' - api.PyString_Concat(ptr, space.w_None) - assert not ptr[0] - ptr[0] = lltype.nullptr(PyObject.TO) - api.PyString_Concat(ptr, space.wrap('def')) # should not crash - lltype.free(ptr, flavor='raw') - - def test_ConcatAndDel(self, space, api): - ref1 = make_ref(space, space.wrap('abc')) - ref2 = make_ref(space, space.wrap('def')) - ptr = lltype.malloc(PyObjectP.TO, 1, flavor='raw') - ptr[0] = ref1 - prev_refcnf = ref2.c_ob_refcnt - api.PyString_ConcatAndDel(ptr, ref2) - assert space.str_w(from_ref(space, ptr[0])) == 'abcdef' - assert ref2.c_ob_refcnt == prev_refcnf - 1 - Py_DecRef(space, ptr[0]) - ptr[0] = lltype.nullptr(PyObject.TO) - ref2 = make_ref(space, space.wrap('foo')) - prev_refcnf = ref2.c_ob_refcnt - api.PyString_ConcatAndDel(ptr, ref2) # should not crash - assert ref2.c_ob_refcnt == prev_refcnf - 1 - lltype.free(ptr, flavor='raw') - - def test_format(self, space, api): - assert "1 2" == space.unwrap( - api.PyString_Format(space.wrap('%s %d'), space.wrap((1, 2)))) - - def test_asbuffer(self, space, api): - bufp = lltype.malloc(rffi.CCHARPP.TO, 1, flavor='raw') - lenp = lltype.malloc(Py_ssize_tP.TO, 1, flavor='raw') - - w_text = space.wrap("text") - assert api.PyObject_AsCharBuffer(w_text, bufp, lenp) == 0 - assert lenp[0] == 4 - assert rffi.charp2str(bufp[0]) == 'text' - - lltype.free(bufp, flavor='raw') - lltype.free(lenp, flavor='raw') - - def test_intern(self, space, api): - buf = rffi.str2charp("test") - w_s1 = api.PyString_InternFromString(buf) - w_s2 = api.PyString_InternFromString(buf) - rffi.free_charp(buf) - assert w_s1 is w_s2 - - def test_AsEncodedObject(self, space, api): - ptr = space.wrap('abc') - - errors = rffi.str2charp("strict") - - encoding = rffi.str2charp("hex") - res = api.PyString_AsEncodedObject( - ptr, encoding, errors) - assert space.unwrap(res) == "616263" - - res = api.PyString_AsEncodedObject( - ptr, encoding, lltype.nullptr(rffi.CCHARP.TO)) - assert space.unwrap(res) == "616263" - rffi.free_charp(encoding) - - encoding = rffi.str2charp("unknown_encoding") - self.raises(space, api, LookupError, api.PyString_AsEncodedObject, - ptr, encoding, errors) - rffi.free_charp(encoding) - - rffi.free_charp(errors) - - res = api.PyString_AsEncodedObject( - ptr, lltype.nullptr(rffi.CCHARP.TO), lltype.nullptr(rffi.CCHARP.TO)) - assert space.unwrap(res) == "abc" - - self.raises(space, api, TypeError, api.PyString_AsEncodedObject, - space.wrap(2), lltype.nullptr(rffi.CCHARP.TO), lltype.nullptr(rffi.CCHARP.TO) - ) - - def test_AsDecodedObject(self, space, api): - w_str = space.wrap('caf\xe9') - encoding = rffi.str2charp("latin-1") - w_res = api.PyString_AsDecodedObject(w_str, encoding, None) - rffi.free_charp(encoding) - assert space.unwrap(w_res) == u"caf\xe9" - - def test_eq(self, space, api): - assert 1 == api._PyString_Eq(space.wrap("hello"), space.wrap("hello")) - assert 0 == api._PyString_Eq(space.wrap("hello"), space.wrap("world")) - - def test_join(self, space, api): - w_sep = space.wrap('') - w_seq = space.wrap(['a', 'b']) - w_joined = api._PyString_Join(w_sep, w_seq) - assert space.unwrap(w_joined) == 'ab' diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -340,7 +340,7 @@ @cpython_api([PyObject, Py_ssize_t, rffi.VOIDPP], lltype.Signed, header=None, error=-1) def str_getreadbuffer(space, w_str, segment, ref): - from pypy.module.cpyext.stringobject import PyString_AsString + from pypy.module.cpyext.bytesobject import PyString_AsString if segment != 0: raise OperationError(space.w_SystemError, space.wrap ("accessing non-existent string segment")) @@ -353,7 +353,7 @@ @cpython_api([PyObject, Py_ssize_t, rffi.CCHARPP], lltype.Signed, header=None, error=-1) def str_getcharbuffer(space, w_str, segment, ref): - from pypy.module.cpyext.stringobject import PyString_AsString + from pypy.module.cpyext.bytesobject import PyString_AsString if segment != 0: raise OperationError(space.w_SystemError, space.wrap ("accessing non-existent string segment")) @@ -463,7 +463,7 @@ w_typename = space.getattr(w_type, space.wrap('__name__')) heaptype = rffi.cast(PyHeapTypeObject, pto) heaptype.c_ht_name = make_ref(space, w_typename) - from pypy.module.cpyext.stringobject import PyString_AsString + from pypy.module.cpyext.bytesobject import PyString_AsString pto.c_tp_name = PyString_AsString(space, heaptype.c_ht_name) else: pto.c_tp_name = rffi.str2charp(w_type.name) diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -9,7 +9,7 @@ from pypy.module.cpyext.pyobject import ( PyObject, PyObjectP, Py_DecRef, make_ref, from_ref, track_reference, make_typedescr, get_typedescr) -from pypy.module.cpyext.stringobject import PyString_Check +from pypy.module.cpyext.bytesobject import PyString_Check from pypy.module.sys.interp_encoding import setdefaultencoding from pypy.module._codecs.interp_codecs import CodecState from pypy.objspace.std import unicodeobject @@ -17,7 +17,7 @@ from rpython.tool.sourcetools import func_renamer import sys -## See comment in stringobject.py. +## See comment in bytesobject.py. PyUnicodeObjectStruct = lltype.ForwardReference() PyUnicodeObject = lltype.Ptr(PyUnicodeObjectStruct) diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -85,7 +85,7 @@ # The "imp" module does not respect this, and is allowed to find # lone .pyc files. # check the .pyc file - if space.config.objspace.usepycfiles and space.config.objspace.lonepycfiles: + if space.config.objspace.lonepycfiles: pycfile = filepart + ".pyc" if file_exists(pycfile): # existing .pyc file @@ -888,17 +888,11 @@ """ w = space.wrap - if space.config.objspace.usepycfiles: - src_stat = os.fstat(fd) - cpathname = pathname + 'c' - mtime = int(src_stat[stat.ST_MTIME]) - mode = src_stat[stat.ST_MODE] - stream = check_compiled_module(space, cpathname, mtime) - else: - cpathname = None - mtime = 0 - mode = 0 - stream = None + src_stat = os.fstat(fd) + cpathname = pathname + 'c' + mtime = int(src_stat[stat.ST_MTIME]) + mode = src_stat[stat.ST_MODE] + stream = check_compiled_module(space, cpathname, mtime) if stream: # existing and up-to-date .pyc file @@ -913,7 +907,7 @@ else: code_w = parse_source_module(space, pathname, source) - if space.config.objspace.usepycfiles and write_pyc: + if write_pyc: if not space.is_true(space.sys.get('dont_write_bytecode')): write_compiled_module(space, code_w, cpathname, mode, mtime) diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -98,6 +98,10 @@ 'a=5\nb=6\rc="""hello\r\nworld"""\r', mode='wb') p.join('mod.py').write( 'a=15\nb=16\rc="""foo\r\nbar"""\r', mode='wb') + setuppkg("test_bytecode", + a = '', + b = '', + c = '') # create compiled/x.py and a corresponding pyc file p = setuppkg("compiled", x = "x = 84") @@ -119,7 +123,7 @@ stream.try_to_find_file_descriptor()) finally: stream.close() - if space.config.objspace.usepycfiles: + if not space.config.translation.sandbox: # also create a lone .pyc file p.join('lone.pyc').write(p.join('x.pyc').read(mode='rb'), mode='wb') @@ -146,6 +150,8 @@ """) def _teardown(space, w_saved_modules): + p = udir.join('impsubdir') + p.remove() space.appexec([w_saved_modules], """ ((saved_path, saved_modules)): import sys @@ -1342,15 +1348,56 @@ assert isinstance(importer, zipimport.zipimporter) -class AppTestNoPycFile(object): +class AppTestWriteBytecode(object): spaceconfig = { - "objspace.usepycfiles": False, - "objspace.lonepycfiles": False + "translation.sandbox": False } + def setup_class(cls): - usepycfiles = cls.spaceconfig['objspace.usepycfiles'] + cls.saved_modules = _setup(cls.space) + sandbox = cls.spaceconfig['translation.sandbox'] + cls.w_sandbox = cls.space.wrap(sandbox) + + def teardown_class(cls): + _teardown(cls.space, cls.saved_modules) + cls.space.appexec([], """ + (): + import sys + sys.dont_write_bytecode = False + """) + + def test_default(self): + import os.path + from test_bytecode import a + assert a.__file__.endswith('a.py') + assert os.path.exists(a.__file__ + 'c') == (not self.sandbox) + + def test_write_bytecode(self): + import os.path + import sys + sys.dont_write_bytecode = False + from test_bytecode import b + assert b.__file__.endswith('b.py') + assert os.path.exists(b.__file__ + 'c') + + def test_dont_write_bytecode(self): + import os.path + import sys + sys.dont_write_bytecode = True + from test_bytecode import c + assert c.__file__.endswith('c.py') + assert not os.path.exists(c.__file__ + 'c') + + +class AppTestWriteBytecodeSandbox(AppTestWriteBytecode): + spaceconfig = { + "translation.sandbox": True + } + + +class _AppTestLonePycFileBase(object): + def setup_class(cls): lonepycfiles = cls.spaceconfig['objspace.lonepycfiles'] - cls.w_usepycfiles = cls.space.wrap(usepycfiles) cls.w_lonepycfiles = cls.space.wrap(lonepycfiles) cls.saved_modules = _setup(cls.space) @@ -1359,10 +1406,7 @@ def test_import_possibly_from_pyc(self): from compiled import x - if self.usepycfiles: - assert x.__file__.endswith('x.pyc') - else: - assert x.__file__.endswith('x.py') + assert x.__file__.endswith('x.pyc') try: from compiled import lone except ImportError: @@ -1371,15 +1415,13 @@ assert self.lonepycfiles, "should not have found 'lone.pyc'" assert lone.__file__.endswith('lone.pyc') -class AppTestNoLonePycFile(AppTestNoPycFile): +class AppTestNoLonePycFile(_AppTestLonePycFileBase): spaceconfig = { - "objspace.usepycfiles": True, "objspace.lonepycfiles": False } -class AppTestLonePycFile(AppTestNoPycFile): +class AppTestLonePycFile(_AppTestLonePycFileBase): spaceconfig = { - "objspace.usepycfiles": True, "objspace.lonepycfiles": True } diff --git a/pypy/module/sys/__init__.py b/pypy/module/sys/__init__.py --- a/pypy/module/sys/__init__.py +++ b/pypy/module/sys/__init__.py @@ -77,7 +77,7 @@ 'meta_path' : 'space.wrap([])', 'path_hooks' : 'space.wrap([])', 'path_importer_cache' : 'space.wrap({})', - 'dont_write_bytecode' : 'space.w_False', + 'dont_write_bytecode' : 'space.wrap(space.config.translation.sandbox)', 'getdefaultencoding' : 'interp_encoding.getdefaultencoding', 'setdefaultencoding' : 'interp_encoding.setdefaultencoding', diff --git a/pypy/module/sys/app.py b/pypy/module/sys/app.py --- a/pypy/module/sys/app.py +++ b/pypy/module/sys/app.py @@ -70,11 +70,11 @@ return None copyright_str = """ -Copyright 2003-2014 PyPy development team. +Copyright 2003-2016 PyPy development team. All Rights Reserved. For further information, see -Portions Copyright (c) 2001-2014 Python Software Foundation. +Portions Copyright (c) 2001-2016 Python Software Foundation. All Rights Reserved. Portions Copyright (c) 2000 BeOpen.com. diff --git a/pypy/objspace/std/tupleobject.py b/pypy/objspace/std/tupleobject.py --- a/pypy/objspace/std/tupleobject.py +++ b/pypy/objspace/std/tupleobject.py @@ -30,6 +30,11 @@ contains_jmp = jit.JitDriver(greens = ['tp'], reds = 'auto', name = 'tuple.contains') +hash_driver = jit.JitDriver( + name='tuple.hash', + greens=['w_type'], + reds='auto') + class W_AbstractTupleObject(W_Root): __slots__ = () @@ -262,8 +267,14 @@ def length(self): return len(self.wrappeditems) - @jit.look_inside_iff(lambda self, _1: _unroll_condition(self)) def descr_hash(self, space): + if _unroll_condition(self): + return self._descr_hash_unroll(space) + else: + return self._descr_hash_jitdriver(space) + + @jit.unroll_safe + def _descr_hash_unroll(self, space): mult = 1000003 x = 0x345678 z = len(self.wrappeditems) @@ -275,6 +286,20 @@ x += 97531 return space.wrap(intmask(x)) + def _descr_hash_jitdriver(self, space): + mult = 1000003 + x = 0x345678 + z = len(self.wrappeditems) + w_type = space.type(self.wrappeditems[0]) + for w_item in self.wrappeditems: + hash_driver.jit_merge_point(w_type=w_type) + y = space.hash_w(w_item) + x = (x ^ y) * mult + z -= 1 + mult += 82520 + z + z + x += 97531 + return space.wrap(intmask(x)) + def descr_eq(self, space, w_other): if not isinstance(w_other, W_AbstractTupleObject): return space.w_NotImplemented diff --git a/pypy/tool/test/test_tab.py b/pypy/tool/test/test_tab.py --- a/pypy/tool/test/test_tab.py +++ b/pypy/tool/test/test_tab.py @@ -6,7 +6,7 @@ from pypy.conftest import pypydir ROOT = os.path.abspath(os.path.join(pypydir, '..')) -EXCLUDE = {} +EXCLUDE = {'/virt_test/lib/python2.7/site-packages/setuptools'} def test_no_tabs(): diff --git a/rpython/jit/codewriter/jtransform.py b/rpython/jit/codewriter/jtransform.py --- a/rpython/jit/codewriter/jtransform.py +++ b/rpython/jit/codewriter/jtransform.py @@ -2042,6 +2042,11 @@ self.vable_flags[op.args[0]] = op.args[2].value return [] + def rewrite_op_jit_enter_portal_frame(self, op): + return [op] + def rewrite_op_jit_leave_portal_frame(self, op): + return [op] + # --------- # ll_math.sqrt_nonneg() diff --git a/rpython/jit/metainterp/blackhole.py b/rpython/jit/metainterp/blackhole.py --- a/rpython/jit/metainterp/blackhole.py +++ b/rpython/jit/metainterp/blackhole.py @@ -944,6 +944,14 @@ pass @arguments("i") + def bhimpl_jit_enter_portal_frame(x): + pass + + @arguments() + def bhimpl_jit_leave_portal_frame(): + pass + + @arguments("i") From pypy.commits at gmail.com Thu Feb 25 12:25:51 2016 From: pypy.commits at gmail.com (plan_rich) Date: Thu, 25 Feb 2016 09:25:51 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (remi, plan_rich) fixed translation Message-ID: <56cf391f.01adc20a.f6add.fffffc8e@mx.google.com> Author: Richard Plangger Branch: fix-longevity Changeset: r82522:7cf8129ae01d Date: 2016-02-25 18:25 +0100 http://bitbucket.org/pypy/pypy/changeset/7cf8129ae01d/ Log: (remi, plan_rich) fixed translation diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -374,8 +374,12 @@ self.save_in_callee_regs = [reg for reg in all_regs if reg not in save_around_call_regs] self._reinit_free_regs() - self.is_callee_lookup = [True] * max( - [r.value + 1 for r in self.all_regs]) + if we_are_translated(): + self.is_callee_lookup = [True] * len(self.all_regs) + else: + # in tests the len of all_regs can change + values = [r.value + 1 for r in self.all_regs] + self.is_callee_lookup = [True] * max(values) for reg in self.save_around_call_regs: self.is_callee_lookup[reg.value] = False diff --git a/rpython/jit/backend/x86/regalloc.py b/rpython/jit/backend/x86/regalloc.py --- a/rpython/jit/backend/x86/regalloc.py +++ b/rpython/jit/backend/x86/regalloc.py @@ -281,8 +281,8 @@ def _update_bindings(self, arglocs, inputargs): # XXX this should probably go to llsupport/regalloc.py - used = set() - used_xmm = set() + used = {} + used_xmm = {} i = 0 # manually set the register and frame bindings for # all inputargs (for a bridge) @@ -294,13 +294,13 @@ if isinstance(loc, RegLoc): if arg.type == FLOAT: self.xrm.reg_bindings[arg] = loc - used_xmm.add(loc) + used_xmm[loc] = None else: if loc is ebp: self.rm.bindings_to_frame_reg[arg] = None else: self.rm.reg_bindings[arg] = loc - used.add(loc) + used[loc] = None else: self.fm.bind(arg, loc) # From pypy.commits at gmail.com Thu Feb 25 13:41:13 2016 From: pypy.commits at gmail.com (Raemi) Date: Thu, 25 Feb 2016 10:41:13 -0800 (PST) Subject: [pypy-commit] pypy fix-longevity: (plan_rich, remi) replace is_callee_lookup with attribute on regloc Message-ID: <56cf4ac9.42cbc20a.6455b.155c@mx.google.com> Author: Remi Meier Branch: fix-longevity Changeset: r82523:491569178ceb Date: 2016-02-25 19:40 +0100 http://bitbucket.org/pypy/pypy/changeset/491569178ceb/ Log: (plan_rich,remi) replace is_callee_lookup with attribute on regloc diff --git a/rpython/jit/backend/llsupport/regalloc.py b/rpython/jit/backend/llsupport/regalloc.py --- a/rpython/jit/backend/llsupport/regalloc.py +++ b/rpython/jit/backend/llsupport/regalloc.py @@ -279,7 +279,6 @@ # TODO would be good to keep free_caller_regs sorted (according to the ABI) free_callee_regs = [] free_caller_regs = [] - is_callee_lookup = None def get_lower_byte_free_register(self, reg): # try to return a volatile register first! @@ -346,13 +345,13 @@ self.remove_free_register(r) def remove_free_register(self, reg): - if self.is_callee_lookup[reg.value]: + if not reg.save_around_calls: self.free_callee_regs = [fr for fr in self.free_callee_regs if fr is not reg] else: self.free_caller_regs = [fr for fr in self.free_caller_regs if fr is not reg] def put_back_register(self, reg): - if self.is_callee_lookup[reg.value]: + if not reg.save_around_calls: self.free_callee_regs.append(reg) else: self.free_caller_regs.append(reg) @@ -374,14 +373,10 @@ self.save_in_callee_regs = [reg for reg in all_regs if reg not in save_around_call_regs] self._reinit_free_regs() - if we_are_translated(): - self.is_callee_lookup = [True] * len(self.all_regs) - else: - # in tests the len of all_regs can change - values = [r.value + 1 for r in self.all_regs] - self.is_callee_lookup = [True] * max(values) - for reg in self.save_around_call_regs: - self.is_callee_lookup[reg.value] = False + if not we_are_translated(): + # in tests we need to update regloc.save_around_calls + for r in self.all_regs: + r.save_around_calls = r in save_around_call_regs def __init__(self, live_ranges, frame_manager=None, assembler=None): self._change_regs(self.all_regs, self.save_around_call_regs) diff --git a/rpython/jit/backend/x86/regalloc.py b/rpython/jit/backend/x86/regalloc.py --- a/rpython/jit/backend/x86/regalloc.py +++ b/rpython/jit/backend/x86/regalloc.py @@ -39,6 +39,8 @@ no_lower_byte_regs = [esi, edi] save_around_call_regs = [eax, edx, ecx] frame_reg = ebp + for r in all_regs: + r.save_around_calls = r in save_around_call_regs def call_result_location(self, v): return eax @@ -61,6 +63,9 @@ abi_param_regs = [edi, esi, ecx, r8, r9] save_around_call_regs = abi_param_regs + [eax, edx, r10] + for r in all_regs: + r.save_around_calls = r in save_around_call_regs + def get_abi_param_register(self, i): if not IS_X86_32 and 0 <= i < len(self.abi_param_regs): return self.abi_param_regs[i] diff --git a/rpython/jit/backend/x86/regloc.py b/rpython/jit/backend/x86/regloc.py --- a/rpython/jit/backend/x86/regloc.py +++ b/rpython/jit/backend/x86/regloc.py @@ -130,6 +130,10 @@ class RegLoc(AssemblerLocation): _immutable_ = True + + # indicates if the register is caller-save: + save_around_calls = True + def __init__(self, regnum, is_xmm): assert regnum >= 0 self.value = regnum From pypy.commits at gmail.com Thu Feb 25 16:26:53 2016 From: pypy.commits at gmail.com (fijal) Date: Thu, 25 Feb 2016 13:26:53 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: (ronan, fijal) add stubs for datetime object Message-ID: <56cf719d.c74fc20a.de6b6.4cd8@mx.google.com> Author: fijal Branch: cpyext-ext Changeset: r82524:ea86b9ae3ca6 Date: 2016-02-25 22:26 +0100 http://bitbucket.org/pypy/pypy/changeset/ea86b9ae3ca6/ Log: (ronan, fijal) add stubs for datetime object diff --git a/pypy/module/cpyext/cdatetime.py b/pypy/module/cpyext/cdatetime.py --- a/pypy/module/cpyext/cdatetime.py +++ b/pypy/module/cpyext/cdatetime.py @@ -42,9 +42,15 @@ return datetimeAPI -PyDateTime_Date = PyObject -PyDateTime_Time = PyObject -PyDateTime_DateTime = PyObject +PyDateTime_DateStruct = lltype.ForwardReference() +PyDateTime_TimeStruct = lltype.ForwardReference() +PyDateTime_DateTimeStruct = lltype.ForwardReference() +cpython_struct("PyDateTime_Date", PyObjectFields, PyDateTime_DateStruct) +PyDateTime_Date = lltype.Ptr(PyDateTime_DateStruct) +cpython_struct("PyDateTime_Time", PyObjectFields, PyDateTime_TimeStruct) +PyDateTime_Time = lltype.Ptr(PyDateTime_TimeStruct) +cpython_struct("PyDateTime_DateTime", PyObjectFields, PyDateTime_DateTimeStruct) +PyDateTime_DateTime = lltype.Ptr(PyDateTime_DateTimeStruct) PyDeltaObjectStruct = lltype.ForwardReference() cpython_struct("PyDateTime_Delta", PyObjectFields, PyDeltaObjectStruct) diff --git a/pypy/module/cpyext/include/datetime.h b/pypy/module/cpyext/include/datetime.h --- a/pypy/module/cpyext/include/datetime.h +++ b/pypy/module/cpyext/include/datetime.h @@ -24,6 +24,18 @@ PyObject_HEAD } PyDateTime_Delta; +typedef struct { + PyObject_HEAD +} PyDateTime_Date; + +typedef struct { + PyObject_HEAD +} PyDateTime_Time; + +typedef struct { + PyObject_HEAD +} PyDateTime_DateTime; + #ifdef __cplusplus } #endif From pypy.commits at gmail.com Fri Feb 26 03:51:31 2016 From: pypy.commits at gmail.com (fijal) Date: Fri, 26 Feb 2016 00:51:31 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: add TODO for cpyext Message-ID: <56d01213.2a6ec20a.d1a92.55eb@mx.google.com> Author: fijal Branch: cpyext-ext Changeset: r82525:0984da240844 Date: 2016-02-26 09:49 +0100 http://bitbucket.org/pypy/pypy/changeset/0984da240844/ Log: add TODO for cpyext diff --git a/TODO b/TODO new file mode 100644 --- /dev/null +++ b/TODO @@ -0,0 +1,5 @@ +* finish PySequence_Fast +* typeobject.py and handling of __float__ prevents us from using pypy +* python setup.py install in numpy does not somehow tell setuptools + it's installed (I bet it's about the py27 tag) +* implement PyFile_AsFile diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py --- a/pypy/module/cpyext/sequence.py +++ b/pypy/module/cpyext/sequence.py @@ -48,8 +48,6 @@ # make sure we can return a borrowed obj from PySequence_Fast_GET_ITEM w_obj.convert_to_cpy_strategy(space) return w_obj - if isinstance(w_obj, tupleobject.W_TupleObject): - return w_obj try: return listobject.W_ListObject.newlist_cpyext(space, space.listview(w_obj)) except OperationError: diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -11,7 +11,7 @@ getattrfunc, getattrofunc, setattrofunc, lenfunc, ssizeargfunc, inquiry, ssizessizeargfunc, ssizeobjargproc, iternextfunc, initproc, richcmpfunc, cmpfunc, hashfunc, descrgetfunc, descrsetfunc, objobjproc, objobjargproc, - readbufferproc) + readbufferproc, ssizessizeobjargproc) from pypy.module.cpyext.pyobject import from_ref, make_ref, Py_DecRef from pypy.module.cpyext.pyerrors import PyErr_Occurred from pypy.module.cpyext.state import State @@ -170,6 +170,15 @@ func_target = rffi.cast(ternaryfunc, func) return generic_cpy_call(space, func_target, w_self, w_args, w_kwds) +def wrap_ssizessizeobjargproc(space, w_self, w_args, func): + func_target = rffi.cast(ssizessizeobjargproc, func) + check_num_args(space, w_args, 3) + args_w = space.fixedview(w_args) + i = space.int_w(space.index(args_w[0])) + j = space.int_w(space.index(args_w[1])) + w_y = args_w[2] + return space.wrap(generic_cpy_call(space, func_target, w_self, i, j, w_y)) + def wrap_lenfunc(space, w_self, w_args, func): func_len = rffi.cast(lenfunc, func) check_num_args(space, w_args, 0) From pypy.commits at gmail.com Fri Feb 26 05:41:32 2016 From: pypy.commits at gmail.com (Raemi) Date: Fri, 26 Feb 2016 02:41:32 -0800 (PST) Subject: [pypy-commit] stmgc default: fix infinite loop in segfault handler Message-ID: <56d02bdc.d22e1c0a.d899a.ffffbccf@mx.google.com> Author: Remi Meier Branch: Changeset: r1983:8c9162341945 Date: 2016-02-26 11:41 +0100 http://bitbucket.org/pypy/stmgc/changeset/8c9162341945/ Log: fix infinite loop in segfault handler diff --git a/c8/stm/setup.c b/c8/stm/setup.c --- a/c8/stm/setup.c +++ b/c8/stm/setup.c @@ -287,13 +287,15 @@ if (addr == NULL) return; stm_thread_local_t *tl = stm_all_thread_locals; - while (tl != NULL) { + if (tl == NULL) + return; + do { char *trap = _shadowstack_trap_page(tl->shadowstack_base); if (trap <= addr && addr <= trap + 4095) { fprintf(stderr, "This is caused by a stack overflow.\n" - "Sorry, proper RuntimeError support is not implemented yet.\n"); + "Sorry, proper RuntimeError support is not implemented yet.\n"); return; } tl = tl->next; - } + } while (tl != stm_all_thread_locals); } From pypy.commits at gmail.com Fri Feb 26 06:17:12 2016 From: pypy.commits at gmail.com (Raemi) Date: Fri, 26 Feb 2016 03:17:12 -0800 (PST) Subject: [pypy-commit] pypy stmgc-c8: fix llop.gc_load_indexed for STM Message-ID: <56d03438.890bc30a.2c3bc.0d31@mx.google.com> Author: Remi Meier Branch: stmgc-c8 Changeset: r82526:36a2560a3194 Date: 2016-02-26 12:08 +0100 http://bitbucket.org/pypy/pypy/changeset/36a2560a3194/ Log: fix llop.gc_load_indexed for STM diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py --- a/rpython/translator/c/funcgen.py +++ b/rpython/translator/c/funcgen.py @@ -725,15 +725,19 @@ ' (((%(char)s *)%(addr)s) + %(offset)s))[0];' % locals()) def OP_GC_LOAD_INDEXED(self, op): + tlprefix, char = '', 'char' + if (self._is_stm() and isinstance(op.args[0].concretetype, Ptr) + and op.args[0].concretetype.TO._gckind == 'gc'): + tlprefix, char = ' TLPREFIX ', 'rpygcchar_t' addr = self.expr(op.args[0]) index = self.expr(op.args[1]) scale = self.expr(op.args[2]) base_ofs = self.expr(op.args[3]) result = self.expr(op.result) TYPE = op.result.concretetype - typename = cdecl(self.db.gettype(TYPE).replace('@', '*@'), '') + typename = cdecl(self.db.gettype(TYPE).replace('@', tlprefix+'*@'), '') return ( - "%(result)s = ((%(typename)s) (((char *)%(addr)s) + " + "%(result)s = ((%(typename)s) (((%(char)s *)%(addr)s) + " "%(base_ofs)s + %(scale)s * %(index)s))[0];" % locals()) diff --git a/rpython/translator/stm/test/test_ztranslated.py b/rpython/translator/stm/test/test_ztranslated.py --- a/rpython/translator/stm/test/test_ztranslated.py +++ b/rpython/translator/stm/test/test_ztranslated.py @@ -13,6 +13,20 @@ class TestSTMTranslated(CompiledSTMTests): + def test_math(self): + from rpython.rtyper.lltypesystem.module.ll_math import sqrt_nonneg + def entry_point(argv): + lst = [] + for i in range(int(argv[1])): + lst.append(sqrt_nonneg(99)) + print '<', len(lst), '>' + return 0 + # + t, cbuilder = self.compile(entry_point, backendopt=True) + data = cbuilder.cmdexec('5') + assert '< 5 >' in data, "got: %r" % (data,) + + def test_malloc(self): class Foo: pass @@ -431,6 +445,25 @@ data = cbuilder.cmdexec('') assert '< 42 >\n< 84 >\n' in data + def test_gc_load_indexed(self): + from rpython.rtyper.annlowlevel import llstr + from rpython.rtyper.lltypesystem.rstr import STR + from rpython.rtyper.lltypesystem import lltype, llmemory, rffi, lloperation + + s = "hillo world" + lls = llstr(s) + base_ofs = (llmemory.offsetof(STR, 'chars') + + llmemory.itemoffsetof(STR.chars, 0)) + scale_factor = llmemory.sizeof(lltype.Char) + + def main(argv): + print int(llop.gc_load_indexed(rffi.SHORT, lls, int(argv[1]), + scale_factor, base_ofs)) + return 0 + t, cbuilder = self.compile(main) + data = cbuilder.cmdexec('1') + assert '105\n' + def test_raw_load_store_on_gc(self): X = lltype.GcStruct('X', ('foo', lltype.Signed)) prebuilt = lltype.malloc(X, immortal=True) From pypy.commits at gmail.com Fri Feb 26 06:17:14 2016 From: pypy.commits at gmail.com (Raemi) Date: Fri, 26 Feb 2016 03:17:14 -0800 (PST) Subject: [pypy-commit] pypy stmgc-c8: import stmgc Message-ID: <56d0343a.aa0ac20a.a021f.04b6@mx.google.com> Author: Remi Meier Branch: stmgc-c8 Changeset: r82527:f1e7aa419f98 Date: 2016-02-26 12:14 +0100 http://bitbucket.org/pypy/pypy/changeset/f1e7aa419f98/ Log: import stmgc diff --git a/rpython/translator/stm/src_stm/revision b/rpython/translator/stm/src_stm/revision --- a/rpython/translator/stm/src_stm/revision +++ b/rpython/translator/stm/src_stm/revision @@ -1,1 +1,1 @@ -d31c9f671775 +8c9162341945 diff --git a/rpython/translator/stm/src_stm/stm/core.c b/rpython/translator/stm/src_stm/stm/core.c --- a/rpython/translator/stm/src_stm/stm/core.c +++ b/rpython/translator/stm/src_stm/stm/core.c @@ -109,164 +109,6 @@ } -/* ############# signal handler ############# */ - -static void copy_bk_objs_in_page_from(int from_segnum, uintptr_t pagenum, - bool only_if_not_modified) -{ - /* looks at all bk copies of objects overlapping page 'pagenum' and - copies the part in 'pagenum' back to the current segment */ - dprintf(("copy_bk_objs_in_page_from(%d, %ld, %d)\n", - from_segnum, (long)pagenum, only_if_not_modified)); - - assert(modification_lock_check_rdlock(from_segnum)); - struct list_s *list = get_priv_segment(from_segnum)->modified_old_objects; - struct stm_undo_s *undo = (struct stm_undo_s *)list->items; - struct stm_undo_s *end = (struct stm_undo_s *)(list->items + list->count); - - import_objects(only_if_not_modified ? -2 : -1, - pagenum, undo, end); -} - -static void go_to_the_past(uintptr_t pagenum, - struct stm_commit_log_entry_s *from, - struct stm_commit_log_entry_s *to) -{ - assert(modification_lock_check_wrlock(STM_SEGMENT->segment_num)); - assert(from->rev_num >= to->rev_num); - /* walk BACKWARDS the commit log and update the page 'pagenum', - initially at revision 'from', until we reach the revision 'to'. */ - - /* XXXXXXX Recursive algo for now, fix this! */ - if (from != to) { - struct stm_commit_log_entry_s *cl = to->next; - go_to_the_past(pagenum, from, cl); - - struct stm_undo_s *undo = cl->written; - struct stm_undo_s *end = cl->written + cl->written_count; - - import_objects(-1, pagenum, undo, end); - } -} - - - -static void handle_segfault_in_page(uintptr_t pagenum) -{ - /* assumes page 'pagenum' is ACCESS_NONE, privatizes it, - and validates to newest revision */ - - dprintf(("handle_segfault_in_page(%lu), seg %d\n", pagenum, STM_SEGMENT->segment_num)); - - /* XXX: bad, but no deadlocks: */ - acquire_all_privatization_locks(); - - long i; - int my_segnum = STM_SEGMENT->segment_num; - - assert(get_page_status_in(my_segnum, pagenum) == PAGE_NO_ACCESS); - - /* find who has the most recent revision of our page */ - int copy_from_segnum = -1; - uint64_t most_recent_rev = 0; - for (i = 1; i < NB_SEGMENTS; i++) { - if (i == my_segnum) - continue; - - struct stm_commit_log_entry_s *log_entry; - log_entry = get_priv_segment(i)->last_commit_log_entry; - if (get_page_status_in(i, pagenum) != PAGE_NO_ACCESS - && (copy_from_segnum == -1 || log_entry->rev_num > most_recent_rev)) { - copy_from_segnum = i; - most_recent_rev = log_entry->rev_num; - } - } - OPT_ASSERT(copy_from_segnum != my_segnum); - - /* make our page write-ready */ - page_mark_accessible(my_segnum, pagenum); - - /* account for this page now: XXX */ - /* increment_total_allocated(4096); */ - - if (copy_from_segnum == -1) { - /* this page is only accessible in the sharing segment seg0 so far (new - allocation). We can thus simply mark it accessible here. */ - pagecopy(get_virtual_page(my_segnum, pagenum), - get_virtual_page(0, pagenum)); - release_all_privatization_locks(); - return; - } - - /* before copying anything, acquire modification locks from our and - the other segment */ - uint64_t to_lock = (1UL << copy_from_segnum); - acquire_modification_lock_set(to_lock, my_segnum); - pagecopy(get_virtual_page(my_segnum, pagenum), - get_virtual_page(copy_from_segnum, pagenum)); - - /* if there were modifications in the page, revert them. */ - copy_bk_objs_in_page_from(copy_from_segnum, pagenum, false); - - /* we need to go from 'src_version' to 'target_version'. This - might need a walk into the past. */ - struct stm_commit_log_entry_s *src_version, *target_version; - src_version = get_priv_segment(copy_from_segnum)->last_commit_log_entry; - target_version = STM_PSEGMENT->last_commit_log_entry; - - - dprintf(("handle_segfault_in_page: rev %lu to rev %lu\n", - src_version->rev_num, target_version->rev_num)); - /* adapt revision of page to our revision: - if our rev is higher than the page we copy from, everything - is fine as we never read/modified the page anyway - */ - if (src_version->rev_num > target_version->rev_num) - go_to_the_past(pagenum, src_version, target_version); - - release_modification_lock_set(to_lock, my_segnum); - release_all_privatization_locks(); -} - -static void _signal_handler(int sig, siginfo_t *siginfo, void *context) -{ - assert(_stm_segfault_expected > 0); - - int saved_errno = errno; - char *addr = siginfo->si_addr; - dprintf(("si_addr: %p\n", addr)); - if (addr == NULL || addr < stm_object_pages || - addr >= stm_object_pages+TOTAL_MEMORY) { - /* actual segfault, unrelated to stmgc */ - fprintf(stderr, "Segmentation fault: accessing %p\n", addr); - detect_shadowstack_overflow(addr); - abort(); - } - - int segnum = get_segment_of_linear_address(addr); - OPT_ASSERT(segnum != 0); - if (segnum != STM_SEGMENT->segment_num) { - fprintf(stderr, "Segmentation fault: accessing %p (seg %d) from" - " seg %d\n", addr, segnum, STM_SEGMENT->segment_num); - abort(); - } - dprintf(("-> segment: %d\n", segnum)); - - char *seg_base = STM_SEGMENT->segment_base; - uintptr_t pagenum = ((char*)addr - seg_base) / 4096UL; - if (pagenum < END_NURSERY_PAGE) { - fprintf(stderr, "Segmentation fault: accessing %p (seg %d " - "page %lu)\n", addr, segnum, pagenum); - abort(); - } - - DEBUG_EXPECT_SEGFAULT(false); - handle_segfault_in_page(pagenum); - DEBUG_EXPECT_SEGFAULT(true); - - errno = saved_errno; - /* now return and retry */ -} /* ############# commit log ############# */ diff --git a/rpython/translator/stm/src_stm/stm/core.h b/rpython/translator/stm/src_stm/stm/core.h --- a/rpython/translator/stm/src_stm/stm/core.h +++ b/rpython/translator/stm/src_stm/stm/core.h @@ -303,6 +303,14 @@ static bool _stm_validate(void); static void _core_commit_transaction(bool external); +static void import_objects( + int from_segnum, /* or -1: from undo->backup, + or -2: from undo->backup if not modified */ + uintptr_t pagenum, /* or -1: "all accessible" */ + struct stm_undo_s *undo, + struct stm_undo_s *end); + + static inline bool was_read_remote(char *base, object_t *obj) { uint8_t other_transaction_read_version = diff --git a/rpython/translator/stm/src_stm/stm/hashtable.c b/rpython/translator/stm/src_stm/stm/hashtable.c --- a/rpython/translator/stm/src_stm/stm/hashtable.c +++ b/rpython/translator/stm/src_stm/stm/hashtable.c @@ -78,6 +78,7 @@ stm_hashtable_table_t *table; stm_hashtable_table_t initial_table; uint64_t additions; + uint64_t pickitem_index; }; @@ -490,6 +491,90 @@ return nresult; } +stm_hashtable_entry_t *stm_hashtable_pickitem(object_t *hobj, + stm_hashtable_t *hashtable) +{ + /* We use hashtable->pickitem_index as a shared index counter (not + initialized, any initial garbage is fine). The goal is + two-folds: + + - This is used to implement popitem(). Like CPython and PyPy's + non-STM dict implementations, the goal is that repeated calls + to pickitem() maintains a roughly O(1) time per call while + returning different items (in the case of popitem(), the + returned items are immediately deleted). + + - Additionally, with STM, if several threads all call + pickitem(), this should give the best effort to distribute + different items to different threads and thus minimize + conflicts. (At least that's the theory; it should be tested + in practice.) + */ + restart:; + uint64_t startindex = VOLATILE_HASHTABLE(hashtable)->pickitem_index; + + /* Get the table. No synchronization is needed: we may miss some + entries that are being added, but they would contain NULL in + this segment anyway. */ + stm_hashtable_table_t *table = VOLATILE_HASHTABLE(hashtable)->table; + + /* Find the first entry with a non-NULL object, starting at + 'index'. */ + uintptr_t mask = table->mask; + uintptr_t count; + stm_hashtable_entry_t *entry; + + for (count = 0; count <= mask; ) { + entry = VOLATILE_TABLE(table)->items[(startindex + count) & mask]; + count++; + if (entry != NULL && entry->object != NULL) { + /* + Found the next entry. Update pickitem_index now. If + it was already changed under our feet, we assume that + it is because another thread just did pickitem() too + and is likely to have got the very same entry. In that + case we start again from scratch to look for the + following entry. + */ + if (!__sync_bool_compare_and_swap(&hashtable->pickitem_index, + startindex, + startindex + count)) + goto restart; + + /* Here we mark the entry as as read and return it. + + Note a difference with notably stm_hashtable_list(): we + only call stm_read() after we checked that + entry->object is not NULL. If we find NULL, we don't + mark the entry as read from this thread at all in this + step---this is fine, as we can return a random + different entry here. + */ + stm_read((object_t *)entry); + return entry; + } + } + + /* Didn't find any entry. We have to be sure that the dictionary + is empty now, in the sense that returning NULL must guarantee + conflicts with a different thread adding items. This is done + by marking both the dict and all entries' read marker. */ + stm_read(hobj); + + /* Reload the table after setting the read marker */ + uintptr_t i; + table = VOLATILE_HASHTABLE(hashtable)->table; + mask = table->mask; + for (i = 0; i <= mask; i++) { + entry = VOLATILE_TABLE(table)->items[i]; + if (entry != NULL) { + stm_read((object_t *)entry); + assert(entry->object == NULL); + } + } + return NULL; +} + static void _stm_compact_hashtable(struct object_s *hobj, stm_hashtable_t *hashtable) { diff --git a/rpython/translator/stm/src_stm/stm/setup.c b/rpython/translator/stm/src_stm/stm/setup.c --- a/rpython/translator/stm/src_stm/stm/setup.c +++ b/rpython/translator/stm/src_stm/stm/setup.c @@ -39,20 +39,6 @@ } -static void setup_signal_handler(void) -{ - struct sigaction act; - memset(&act, 0, sizeof(act)); - - act.sa_sigaction = &_signal_handler; - /* The SA_SIGINFO flag tells sigaction() to use the sa_sigaction field, not sa_handler. */ - act.sa_flags = SA_SIGINFO | SA_NODEFER; - - if (sigaction(SIGSEGV, &act, NULL) < 0) { - perror ("sigaction"); - abort(); - } -} void stm_setup(void) { @@ -301,13 +287,15 @@ if (addr == NULL) return; stm_thread_local_t *tl = stm_all_thread_locals; - while (tl != NULL) { + if (tl == NULL) + return; + do { char *trap = _shadowstack_trap_page(tl->shadowstack_base); if (trap <= addr && addr <= trap + 4095) { fprintf(stderr, "This is caused by a stack overflow.\n" - "Sorry, proper RuntimeError support is not implemented yet.\n"); + "Sorry, proper RuntimeError support is not implemented yet.\n"); return; } tl = tl->next; - } + } while (tl != stm_all_thread_locals); } diff --git a/rpython/translator/stm/src_stm/stm/signal_handler.c b/rpython/translator/stm/src_stm/stm/signal_handler.c new file mode 100644 --- /dev/null +++ b/rpython/translator/stm/src_stm/stm/signal_handler.c @@ -0,0 +1,178 @@ +/* Imported by rpython/translator/stm/import_stmgc.py */ +#ifndef _STM_CORE_H_ +# error "must be compiled via stmgc.c" +#endif + + +static void setup_signal_handler(void) +{ + struct sigaction act; + memset(&act, 0, sizeof(act)); + + act.sa_sigaction = &_signal_handler; + /* The SA_SIGINFO flag tells sigaction() to use the sa_sigaction field, not sa_handler. */ + act.sa_flags = SA_SIGINFO | SA_NODEFER; + + if (sigaction(SIGSEGV, &act, NULL) < 0) { + perror ("sigaction"); + abort(); + } +} + + +static void copy_bk_objs_in_page_from(int from_segnum, uintptr_t pagenum, + bool only_if_not_modified) +{ + /* looks at all bk copies of objects overlapping page 'pagenum' and + copies the part in 'pagenum' back to the current segment */ + dprintf(("copy_bk_objs_in_page_from(%d, %ld, %d)\n", + from_segnum, (long)pagenum, only_if_not_modified)); + + assert(modification_lock_check_rdlock(from_segnum)); + struct list_s *list = get_priv_segment(from_segnum)->modified_old_objects; + struct stm_undo_s *undo = (struct stm_undo_s *)list->items; + struct stm_undo_s *end = (struct stm_undo_s *)(list->items + list->count); + + import_objects(only_if_not_modified ? -2 : -1, + pagenum, undo, end); +} + +static void go_to_the_past(uintptr_t pagenum, + struct stm_commit_log_entry_s *from, + struct stm_commit_log_entry_s *to) +{ + assert(modification_lock_check_wrlock(STM_SEGMENT->segment_num)); + assert(from->rev_num >= to->rev_num); + /* walk BACKWARDS the commit log and update the page 'pagenum', + initially at revision 'from', until we reach the revision 'to'. */ + + /* XXXXXXX Recursive algo for now, fix this! */ + if (from != to) { + struct stm_commit_log_entry_s *cl = to->next; + go_to_the_past(pagenum, from, cl); + + struct stm_undo_s *undo = cl->written; + struct stm_undo_s *end = cl->written + cl->written_count; + + import_objects(-1, pagenum, undo, end); + } +} + + + +static void handle_segfault_in_page(uintptr_t pagenum) +{ + /* assumes page 'pagenum' is ACCESS_NONE, privatizes it, + and validates to newest revision */ + + dprintf(("handle_segfault_in_page(%lu), seg %d\n", pagenum, STM_SEGMENT->segment_num)); + + /* XXX: bad, but no deadlocks: */ + acquire_all_privatization_locks(); + + long i; + int my_segnum = STM_SEGMENT->segment_num; + + assert(get_page_status_in(my_segnum, pagenum) == PAGE_NO_ACCESS); + + /* find who has the most recent revision of our page */ + int copy_from_segnum = -1; + uint64_t most_recent_rev = 0; + for (i = 1; i < NB_SEGMENTS; i++) { + if (i == my_segnum) + continue; + + struct stm_commit_log_entry_s *log_entry; + log_entry = get_priv_segment(i)->last_commit_log_entry; + if (get_page_status_in(i, pagenum) != PAGE_NO_ACCESS + && (copy_from_segnum == -1 || log_entry->rev_num > most_recent_rev)) { + copy_from_segnum = i; + most_recent_rev = log_entry->rev_num; + } + } + OPT_ASSERT(copy_from_segnum != my_segnum); + + /* make our page write-ready */ + page_mark_accessible(my_segnum, pagenum); + + /* account for this page now: XXX */ + /* increment_total_allocated(4096); */ + + if (copy_from_segnum == -1) { + /* this page is only accessible in the sharing segment seg0 so far (new + allocation). We can thus simply mark it accessible here. */ + pagecopy(get_virtual_page(my_segnum, pagenum), + get_virtual_page(0, pagenum)); + release_all_privatization_locks(); + return; + } + + /* before copying anything, acquire modification locks from our and + the other segment */ + uint64_t to_lock = (1UL << copy_from_segnum); + acquire_modification_lock_set(to_lock, my_segnum); + pagecopy(get_virtual_page(my_segnum, pagenum), + get_virtual_page(copy_from_segnum, pagenum)); + + /* if there were modifications in the page, revert them. */ + copy_bk_objs_in_page_from(copy_from_segnum, pagenum, false); + + /* we need to go from 'src_version' to 'target_version'. This + might need a walk into the past. */ + struct stm_commit_log_entry_s *src_version, *target_version; + src_version = get_priv_segment(copy_from_segnum)->last_commit_log_entry; + target_version = STM_PSEGMENT->last_commit_log_entry; + + + dprintf(("handle_segfault_in_page: rev %lu to rev %lu\n", + src_version->rev_num, target_version->rev_num)); + /* adapt revision of page to our revision: + if our rev is higher than the page we copy from, everything + is fine as we never read/modified the page anyway + */ + if (src_version->rev_num > target_version->rev_num) + go_to_the_past(pagenum, src_version, target_version); + + release_modification_lock_set(to_lock, my_segnum); + release_all_privatization_locks(); +} + +static void _signal_handler(int sig, siginfo_t *siginfo, void *context) +{ + assert(_stm_segfault_expected > 0); + + int saved_errno = errno; + char *addr = siginfo->si_addr; + dprintf(("si_addr: %p\n", addr)); + if (addr == NULL || addr < stm_object_pages || + addr >= stm_object_pages+TOTAL_MEMORY) { + /* actual segfault, unrelated to stmgc */ + fprintf(stderr, "Segmentation fault: accessing %p\n", addr); + detect_shadowstack_overflow(addr); + abort(); + } + + int segnum = get_segment_of_linear_address(addr); + OPT_ASSERT(segnum != 0); + if (segnum != STM_SEGMENT->segment_num) { + fprintf(stderr, "Segmentation fault: accessing %p (seg %d) from" + " seg %d\n", addr, segnum, STM_SEGMENT->segment_num); + abort(); + } + dprintf(("-> segment: %d\n", segnum)); + + char *seg_base = STM_SEGMENT->segment_base; + uintptr_t pagenum = ((char*)addr - seg_base) / 4096UL; + if (pagenum < END_NURSERY_PAGE) { + fprintf(stderr, "Segmentation fault: accessing %p (seg %d " + "page %lu)\n", addr, segnum, pagenum); + abort(); + } + + DEBUG_EXPECT_SEGFAULT(false); + handle_segfault_in_page(pagenum); + DEBUG_EXPECT_SEGFAULT(true); + + errno = saved_errno; + /* now return and retry */ +} diff --git a/rpython/translator/stm/src_stm/stm/signal_handler.h b/rpython/translator/stm/src_stm/stm/signal_handler.h new file mode 100644 --- /dev/null +++ b/rpython/translator/stm/src_stm/stm/signal_handler.h @@ -0,0 +1,8 @@ +/* Imported by rpython/translator/stm/import_stmgc.py */ +static void copy_bk_objs_in_page_from(int from_segnum, uintptr_t pagenum, + bool only_if_not_modified); + +static void handle_segfault_in_page(uintptr_t pagenum); + + +static void setup_signal_handler(void); diff --git a/rpython/translator/stm/src_stm/stmgc.c b/rpython/translator/stm/src_stm/stmgc.c --- a/rpython/translator/stm/src_stm/stmgc.c +++ b/rpython/translator/stm/src_stm/stmgc.c @@ -4,6 +4,7 @@ #include "stm/atomic.h" #include "stm/list.h" #include "stm/smallmalloc.h" +#include "stm/signal_handler.h" #include "stm/core.h" #include "stm/pagecopy.h" #include "stm/pages.h" @@ -36,6 +37,7 @@ #include "stm/forksupport.c" #include "stm/setup.c" #include "stm/hash_id.c" +#include "stm/signal_handler.c" #include "stm/core.c" #include "stm/extra.c" #include "stm/fprintcolor.c" diff --git a/rpython/translator/stm/src_stm/stmgc.h b/rpython/translator/stm/src_stm/stmgc.h --- a/rpython/translator/stm/src_stm/stmgc.h +++ b/rpython/translator/stm/src_stm/stmgc.h @@ -749,6 +749,7 @@ void stm_hashtable_write_entry(object_t *hobj, stm_hashtable_entry_t *entry, object_t *nvalue); long stm_hashtable_length_upper_bound(stm_hashtable_t *); +stm_hashtable_entry_t *stm_hashtable_pickitem(object_t *, stm_hashtable_t *); /* WARNING: stm_hashtable_list does not do a stm_write() on the 'results' argument. 'results' may point inside an object. So if 'results' may be From pypy.commits at gmail.com Fri Feb 26 07:15:15 2016 From: pypy.commits at gmail.com (cfbolz) Date: Fri, 26 Feb 2016 04:15:15 -0800 (PST) Subject: [pypy-commit] pypy default: kill some no longer needed indirection Message-ID: <56d041d3.46fac20a.5f8c7.201e@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82559:d9671adc681a Date: 2016-02-26 13:14 +0100 http://bitbucket.org/pypy/pypy/changeset/d9671adc681a/ Log: kill some no longer needed indirection diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -65,21 +65,8 @@ def delete(self, obj, name, index): pass + @jit.elidable def find_map_attr(self, name, index): - if jit.we_are_jitted(): - # hack for the jit: - # the _find_map_attr method is pure too, but its argument is never - # constant, because it is always a new tuple - return self._find_map_attr_jit_pure(name, index) - else: - return self._find_map_attr_indirection(name, index) - - @jit.elidable - def _find_map_attr_jit_pure(self, name, index): - return self._find_map_attr_indirection(name, index) - - @jit.dont_look_inside - def _find_map_attr_indirection(self, name, index): if (self.space.config.objspace.std.withmethodcache): return self._find_map_attr_cache(name, index) return self._find_map_attr(name, index) From pypy.commits at gmail.com Fri Feb 26 07:28:38 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 04:28:38 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Make sure test_os_wait tests are actually run. Message-ID: <56d044f6.8673c20a.950a1.2960@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82560:b1039c6babf1 Date: 2016-02-25 22:54 +0100 http://bitbucket.org/pypy/pypy/changeset/b1039c6babf1/ Log: Make sure test_os_wait tests are actually run. diff --git a/pypy/module/test_lib_pypy/test_os_wait.py b/pypy/module/test_lib_pypy/test_os_wait.py --- a/pypy/module/test_lib_pypy/test_os_wait.py +++ b/pypy/module/test_lib_pypy/test_os_wait.py @@ -11,15 +11,14 @@ class AppTestOsWait: spaceconfig = dict(usemodules=('_rawffi', 'fcntl', 'itertools', 'select', - 'signal')) + 'signal', '_posixsubprocess')) def setup_class(cls): if not hasattr(os, "fork"): py.test.skip("Need fork() to test wait3/wait4()") rebuild.rebuild_one('resource.ctc.py') cls.w__pypy_wait = import_lib_pypy( - cls.space, '_pypy_wait', - '_pypy_wait not supported on this platform') + cls.space, '_pypy_wait') def test_os_wait3(self): import os From pypy.commits at gmail.com Fri Feb 26 07:28:43 2016 From: pypy.commits at gmail.com (Raemi) Date: Fri, 26 Feb 2016 04:28:43 -0800 (PST) Subject: [pypy-commit] pypy stmgc-c8: release gil again for these functions Message-ID: <56d044fb.e6ebc20a.a372c.2725@mx.google.com> Author: Remi Meier Branch: stmgc-c8 Changeset: r82562:a736e9684a84 Date: 2016-02-26 13:28 +0100 http://bitbucket.org/pypy/pypy/changeset/a736e9684a84/ Log: release gil again for these functions I don't think this is good anymore with the detached inev tx mode diff --git a/rpython/rlib/rthread.py b/rpython/rlib/rthread.py --- a/rpython/rlib/rthread.py +++ b/rpython/rlib/rthread.py @@ -60,7 +60,8 @@ releasegil=True) # release the GIL c_thread_releaselock = llexternal('RPyThreadReleaseLock', [TLOCKP], lltype.Signed, - _nowrapper=True) # *don't* release the GIL + releasegil=True) # avoid conflicts if possible + #_nowrapper=True) # *don't* release the GIL # another set of functions, this time in versions that don't cause the # GIL to be released. Used to be there to handle the GIL lock itself, @@ -71,7 +72,10 @@ c_thread_acquirelock_timed_NOAUTO = llexternal('RPyThreadAcquireLockTimed', [TLOCKP, rffi.LONGLONG, rffi.INT], rffi.INT, _nowrapper=True) -c_thread_releaselock_NOAUTO = c_thread_releaselock +c_thread_releaselock_NOAUTO = llexternal('RPyThreadReleaseLock', [TLOCKP], + lltype.Signed, + _nowrapper=True) # *don't* release the GIL + def allocate_lock(): From pypy.commits at gmail.com Fri Feb 26 07:28:40 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 04:28:40 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Preimport module to reduce stack depth. Message-ID: <56d044f8.55031c0a.6fd73.ffffa1ad@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82561:0f700cec0352 Date: 2016-02-26 13:28 +0100 http://bitbucket.org/pypy/pypy/changeset/0f700cec0352/ Log: Preimport module to reduce stack depth. This test file passes if run in isolation. However, if run in conjunction with test_greenlet.py, it fails with a "maximum recursion depth exceeded" RuntimeError because using greenlets untranslated on top of CPython results in the recursion counter being off. This turned out to be too hard too fix, so instead I changed the subsequent test to use fewer stack frames. diff --git a/pypy/module/test_lib_pypy/test_os_wait.py b/pypy/module/test_lib_pypy/test_os_wait.py --- a/pypy/module/test_lib_pypy/test_os_wait.py +++ b/pypy/module/test_lib_pypy/test_os_wait.py @@ -17,6 +17,7 @@ if not hasattr(os, "fork"): py.test.skip("Need fork() to test wait3/wait4()") rebuild.rebuild_one('resource.ctc.py') + cls.space.appexec([], "(): import ctypes") cls.w__pypy_wait = import_lib_pypy( cls.space, '_pypy_wait') From pypy.commits at gmail.com Fri Feb 26 08:14:33 2016 From: pypy.commits at gmail.com (rlamy) Date: Fri, 26 Feb 2016 05:14:33 -0800 (PST) Subject: [pypy-commit] pypy default: Use skipif in tests instead of manually emulating it Message-ID: <56d04fb9.42711c0a.f6cf2.fffff6bd@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82563:4a5b9f2b35f5 Date: 2016-02-26 14:13 +0100 http://bitbucket.org/pypy/pypy/changeset/4a5b9f2b35f5/ Log: Use skipif in tests instead of manually emulating it diff --git a/rpython/rlib/test/test_posix.py b/rpython/rlib/test/test_posix.py --- a/rpython/rlib/test/test_posix.py +++ b/rpython/rlib/test/test_posix.py @@ -1,4 +1,4 @@ -import py +import py.test from rpython.rtyper.test.tool import BaseRtypingTest from rpython.rtyper.annlowlevel import hlstr from rpython.tool.udir import udir @@ -58,7 +58,7 @@ assert res def test_times(self): - import py; py.test.skip("llinterp does not like tuple returns") + py.test.skip("llinterp does not like tuple returns") from rpython.rtyper.test.test_llinterp import interpret times = interpret(lambda: posix.times(), ()) assert isinstance(times, tuple) @@ -119,21 +119,21 @@ res = self.interpret(f,[fi,20]) assert self.ll_to_string(res) == text - if hasattr(os, 'chown'): - def test_chown(self): - f = open(path, "w") - f.write("xyz") - f.close() - def f(): - try: - posix.chown(path, os.getuid(), os.getgid()) - return 1 - except OSError: - return 2 + @py.test.mark.skipif("not hasattr(os, 'chown')") + def test_chown(self): + f = open(path, "w") + f.write("xyz") + f.close() + def f(): + try: + posix.chown(path, os.getuid(), os.getgid()) + return 1 + except OSError: + return 2 - assert self.interpret(f, []) == 1 - os.unlink(path) - assert self.interpret(f, []) == 2 + assert self.interpret(f, []) == 1 + os.unlink(path) + assert self.interpret(f, []) == 2 def test_close(self): def f(fi): @@ -144,70 +144,70 @@ res = self.interpret(f,[fi]) py.test.raises( OSError, os.fstat, fi) - if hasattr(os, 'ftruncate'): - def test_ftruncate(self): - def f(fi,len): - os.ftruncate(fi,len) - fi = os.open(path,os.O_RDWR,0777) - func = self.interpret(f,[fi,6]) - assert os.fstat(fi).st_size == 6 + @py.test.mark.skipif("not hasattr(os, 'ftruncate')") + def test_ftruncate(self): + def f(fi,len): + os.ftruncate(fi,len) + fi = os.open(path,os.O_RDWR,0777) + func = self.interpret(f,[fi,6]) + assert os.fstat(fi).st_size == 6 - if hasattr(os, 'getuid'): - def test_getuid(self): - def f(): - return os.getuid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'getuid')") + def test_getuid(self): + def f(): + return os.getuid() + assert self.interpret(f, []) == f() - if hasattr(os, 'getgid'): - def test_getgid(self): - def f(): - return os.getgid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'getgid')") + def test_getgid(self): + def f(): + return os.getgid() + assert self.interpret(f, []) == f() - if hasattr(os, 'setuid'): - def test_os_setuid(self): - def f(): - os.setuid(os.getuid()) - return os.getuid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'setuid')") + def test_os_setuid(self): + def f(): + os.setuid(os.getuid()) + return os.getuid() + assert self.interpret(f, []) == f() - if hasattr(os, 'sysconf'): - def test_os_sysconf(self): - def f(i): - return os.sysconf(i) - assert self.interpret(f, [13]) == f(13) + @py.test.mark.skipif("not hasattr(os, 'sysconf')") + def test_os_sysconf(self): + def f(i): + return os.sysconf(i) + assert self.interpret(f, [13]) == f(13) - if hasattr(os, 'confstr'): - def test_os_confstr(self): - def f(i): - try: - return os.confstr(i) - except OSError: - return "oooops!!" - some_value = os.confstr_names.values()[-1] - res = self.interpret(f, [some_value]) - assert hlstr(res) == f(some_value) - res = self.interpret(f, [94781413]) - assert hlstr(res) == "oooops!!" + @py.test.mark.skipif("not hasattr(os, 'confstr')") + def test_os_confstr(self): + def f(i): + try: + return os.confstr(i) + except OSError: + return "oooops!!" + some_value = os.confstr_names.values()[-1] + res = self.interpret(f, [some_value]) + assert hlstr(res) == f(some_value) + res = self.interpret(f, [94781413]) + assert hlstr(res) == "oooops!!" - if hasattr(os, 'pathconf'): - def test_os_pathconf(self): - def f(i): - return os.pathconf("/tmp", i) - i = os.pathconf_names["PC_NAME_MAX"] - some_value = self.interpret(f, [i]) - assert some_value >= 31 + @py.test.mark.skipif("not hasattr(os, 'pathconf')") + def test_os_pathconf(self): + def f(i): + return os.pathconf("/tmp", i) + i = os.pathconf_names["PC_NAME_MAX"] + some_value = self.interpret(f, [i]) + assert some_value >= 31 - if hasattr(os, 'chroot'): - def test_os_chroot(self): - def f(): - try: - os.chroot('!@$#!#%$#^#@!#!$$#^') - except OSError: - return 1 - return 0 + @py.test.mark.skipif("not hasattr(os, 'chroot')") + def test_os_chroot(self): + def f(): + try: + os.chroot('!@$#!#%$#^#@!#!$$#^') + except OSError: + return 1 + return 0 - assert self.interpret(f, []) == 1 + assert self.interpret(f, []) == 1 def test_os_wstar(self): from rpython.rlib import rposix @@ -221,84 +221,84 @@ res = self.interpret(fun, [value]) assert res == fun(value) - if hasattr(os, 'getgroups'): - def test_getgroups(self): - def f(): - return os.getgroups() - ll_a = self.interpret(f, []) - assert self.ll_to_list(ll_a) == f() + @py.test.mark.skipif("not hasattr(os, 'getgroups')") + def test_getgroups(self): + def f(): + return os.getgroups() + ll_a = self.interpret(f, []) + assert self.ll_to_list(ll_a) == f() - if hasattr(os, 'setgroups'): - def test_setgroups(self): - def f(): - try: - os.setgroups(os.getgroups()) - except OSError: - pass - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setgroups')") + def test_setgroups(self): + def f(): + try: + os.setgroups(os.getgroups()) + except OSError: + pass + self.interpret(f, []) - if hasattr(os, 'initgroups'): - def test_initgroups(self): - def f(): - try: - os.initgroups('sUJJeumz', 4321) - except OSError: - return 1 - return 0 - res = self.interpret(f, []) - assert res == 1 + @py.test.mark.skipif("not hasattr(os, 'initgroups')") + def test_initgroups(self): + def f(): + try: + os.initgroups('sUJJeumz', 4321) + except OSError: + return 1 + return 0 + res = self.interpret(f, []) + assert res == 1 - if hasattr(os, 'tcgetpgrp'): - def test_tcgetpgrp(self): - def f(fd): - try: - return os.tcgetpgrp(fd) - except OSError: - return 42 - res = self.interpret(f, [9999]) - assert res == 42 + @py.test.mark.skipif("not hasattr(os, 'tcgetpgrp')") + def test_tcgetpgrp(self): + def f(fd): + try: + return os.tcgetpgrp(fd) + except OSError: + return 42 + res = self.interpret(f, [9999]) + assert res == 42 - if hasattr(os, 'tcsetpgrp'): - def test_tcsetpgrp(self): - def f(fd, pgrp): - try: - os.tcsetpgrp(fd, pgrp) - except OSError: - return 1 - return 0 - res = self.interpret(f, [9999, 1]) - assert res == 1 + @py.test.mark.skipif("not hasattr(os, 'tcsetpgrp')") + def test_tcsetpgrp(self): + def f(fd, pgrp): + try: + os.tcsetpgrp(fd, pgrp) + except OSError: + return 1 + return 0 + res = self.interpret(f, [9999, 1]) + assert res == 1 - if hasattr(os, 'getresuid'): - def test_getresuid(self): - def f(): - a, b, c = os.getresuid() - return a + b * 37 + c * 1291 - res = self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'getresuid')") + def test_getresuid(self): + def f(): a, b, c = os.getresuid() - assert res == a + b * 37 + c * 1291 + return a + b * 37 + c * 1291 + res = self.interpret(f, []) + a, b, c = os.getresuid() + assert res == a + b * 37 + c * 1291 - if hasattr(os, 'getresgid'): - def test_getresgid(self): - def f(): - a, b, c = os.getresgid() - return a + b * 37 + c * 1291 - res = self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'getresgid')") + def test_getresgid(self): + def f(): a, b, c = os.getresgid() - assert res == a + b * 37 + c * 1291 + return a + b * 37 + c * 1291 + res = self.interpret(f, []) + a, b, c = os.getresgid() + assert res == a + b * 37 + c * 1291 - if hasattr(os, 'setresuid'): - def test_setresuid(self): - def f(): - a, b, c = os.getresuid() - a = (a + 1) - 1 - os.setresuid(a, b, c) - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setresuid')") + def test_setresuid(self): + def f(): + a, b, c = os.getresuid() + a = (a + 1) - 1 + os.setresuid(a, b, c) + self.interpret(f, []) - if hasattr(os, 'setresgid'): - def test_setresgid(self): - def f(): - a, b, c = os.getresgid() - a = (a + 1) - 1 - os.setresgid(a, b, c) - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setresgid')") + def test_setresgid(self): + def f(): + a, b, c = os.getresgid() + a = (a + 1) - 1 + os.setresgid(a, b, c) + self.interpret(f, []) From pypy.commits at gmail.com Fri Feb 26 08:15:44 2016 From: pypy.commits at gmail.com (cfbolz) Date: Fri, 26 Feb 2016 05:15:44 -0800 (PST) Subject: [pypy-commit] pypy default: (cfbolz, arigo): have a test that we don't import pypy. from rpython/ Message-ID: <56d05000.512f1c0a.eeec3.fffff087@mx.google.com> Author: Carl Friedrich Bolz Branch: Changeset: r82564:96b63fbec213 Date: 2016-02-26 14:14 +0100 http://bitbucket.org/pypy/pypy/changeset/96b63fbec213/ Log: (cfbolz, arigo): have a test that we don't import pypy. from rpython/ diff --git a/pypy/tool/test/test_tab.py b/pypy/tool/test/test_tab.py --- a/pypy/tool/test/test_tab.py +++ b/pypy/tool/test/test_tab.py @@ -6,6 +6,7 @@ from pypy.conftest import pypydir ROOT = os.path.abspath(os.path.join(pypydir, '..')) +RPYTHONDIR = os.path.join(ROOT, "rpython") EXCLUDE = {'/virt_test/lib/python2.7/site-packages/setuptools'} @@ -28,3 +29,27 @@ if not entry.startswith('.'): walk('%s/%s' % (reldir, entry)) walk('') + +def test_no_pypy_import_in_rpython(): + def walk(reldir): + print reldir + if reldir: + path = os.path.join(RPYTHONDIR, *reldir.split('/')) + else: + path = RPYTHONDIR + if os.path.isfile(path): + if not path.lower().endswith('.py'): + return + with file(path) as f: + for line in f: + if "import" not in line: + continue + assert "from pypy." not in line + assert "import pypy." not in line + elif os.path.isdir(path) and not os.path.islink(path): + for entry in os.listdir(path): + if not entry.startswith('.'): + walk('%s/%s' % (reldir, entry)) + + walk('') + From pypy.commits at gmail.com Fri Feb 26 08:45:13 2016 From: pypy.commits at gmail.com (rlamy) Date: Fri, 26 Feb 2016 05:45:13 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Fix test so that the truncate() call actually fails Message-ID: <56d056e9.4577c20a.7d582.4198@mx.google.com> Author: Ronan Lamy Branch: py3.3 Changeset: r82565:2d2859fe2901 Date: 2016-02-26 14:44 +0100 http://bitbucket.org/pypy/pypy/changeset/2d2859fe2901/ Log: Fix test so that the truncate() call actually fails diff --git a/pypy/module/posix/test/test_posix2.py b/pypy/module/posix/test/test_posix2.py --- a/pypy/module/posix/test/test_posix2.py +++ b/pypy/module/posix/test/test_posix2.py @@ -996,7 +996,8 @@ # Check invalid inputs mkfile(dest) raises(OSError, posix.truncate, dest, -1) - raises(OSError, posix.truncate, 1, 1) + with open(dest, 'rb') as f: # f is read-only so cannot be truncated + raises(OSError, posix.truncate, f.fileno(), 1) raises(TypeError, posix.truncate, dest, None) raises(TypeError, posix.truncate, None, None) From pypy.commits at gmail.com Fri Feb 26 09:23:51 2016 From: pypy.commits at gmail.com (arigo) Date: Fri, 26 Feb 2016 06:23:51 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: (fijal, arigo) nb_float Message-ID: <56d05ff7.162f1c0a.d63de.100d@mx.google.com> Author: Armin Rigo Branch: cpyext-ext Changeset: r82566:f112de2bd0fa Date: 2016-02-26 15:22 +0100 http://bitbucket.org/pypy/pypy/changeset/f112de2bd0fa/ Log: (fijal, arigo) nb_float diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -349,6 +349,10 @@ return space.int(w_self) @cpython_api([PyObject], PyObject, header=None) +def slot_nb_float(space, w_self): + return space.float(w_self) + + at cpython_api([PyObject], PyObject, header=None) def slot_tp_iter(space, w_self): return space.iter(w_self) diff --git a/pypy/module/cpyext/test/test_typeobject.py b/pypy/module/cpyext/test/test_typeobject.py --- a/pypy/module/cpyext/test/test_typeobject.py +++ b/pypy/module/cpyext/test/test_typeobject.py @@ -488,6 +488,24 @@ assert module.nb_int(-12.3) == -12 raises(ValueError, module.nb_int, "123") + def test_nb_float(self): + module = self.import_extension('foo', [ + ("nb_float", "METH_O", + ''' + if (!args->ob_type->tp_as_number || + !args->ob_type->tp_as_number->nb_float) + { + PyErr_SetNone(PyExc_ValueError); + return NULL; + } + return args->ob_type->tp_as_number->nb_float(args); + ''' + ) + ]) + assert module.nb_float(10) == 10.0 + assert module.nb_float(-12.3) == -12.3 + raises(ValueError, module.nb_float, "123") + def test_tp_call(self): module = self.import_extension('foo', [ ("tp_call", "METH_VARARGS", From pypy.commits at gmail.com Fri Feb 26 10:50:58 2016 From: pypy.commits at gmail.com (plan_rich) Date: Fri, 26 Feb 2016 07:50:58 -0800 (PST) Subject: [pypy-commit] pypy new-jit-log: added jitlog as a replacement of the current PYPYLOG, does not run yet Message-ID: <56d07462.703dc20a.b310.02f9@mx.google.com> Author: Richard Plangger Branch: new-jit-log Changeset: r82567:2e7a4736bf22 Date: 2016-02-26 16:20 +0100 http://bitbucket.org/pypy/pypy/changeset/2e7a4736bf22/ Log: added jitlog as a replacement of the current PYPYLOG, does not run yet diff --git a/rpython/jit/metainterp/jitlog.py b/rpython/jit/metainterp/jitlog.py new file mode 100644 --- /dev/null +++ b/rpython/jit/metainterp/jitlog.py @@ -0,0 +1,22 @@ +from rpython.rlib.rvmprof.rvmprof import cintf + +class VMProfJitLogger(object): + def __init__(self): + self.cintf = cintf.setup() + + def _ensure_init(self): + self.cintf.jitlog_try_init_using_env() + + self.cintf.write_marker(BinaryJitLogger.JIT_META_MARKER) + count = len(resoperation.opname) + assert count < 256 + self.cintf.write_marker(count) + for opnum, opname in resoperation.opname.items(): + self.cintf.write_byte(opnum) + self.cintf.write_string(opnum) + + def log_loop(self, operations): + pass + + def _log_resoperation(self, op): + pass diff --git a/rpython/jit/metainterp/optimizeopt/__init__.py b/rpython/jit/metainterp/optimizeopt/__init__.py --- a/rpython/jit/metainterp/optimizeopt/__init__.py +++ b/rpython/jit/metainterp/optimizeopt/__init__.py @@ -54,6 +54,7 @@ debug_start("jit-optimize") inputargs = compile_data.start_label.getarglist() try: + metainterp.jitlog.log_loop(inputargs, compile_data.operations, memo) metainterp_sd.logger_noopt.log_loop(inputargs, compile_data.operations, memo=memo) diff --git a/rpython/jit/metainterp/pyjitpl.py b/rpython/jit/metainterp/pyjitpl.py --- a/rpython/jit/metainterp/pyjitpl.py +++ b/rpython/jit/metainterp/pyjitpl.py @@ -13,6 +13,7 @@ from rpython.jit.metainterp.logger import Logger from rpython.jit.metainterp.optimizeopt.util import args_dict from rpython.jit.metainterp.resoperation import rop, OpHelpers, GuardResOp +from rpython.jit.metainterp import jitlog from rpython.rlib import nonconst, rstack from rpython.rlib.debug import debug_start, debug_stop, debug_print from rpython.rlib.debug import have_debug_prints, make_sure_not_resized @@ -1717,12 +1718,14 @@ class MetaInterpStaticData(object): logger_noopt = None logger_ops = None + jitlog = None def __init__(self, cpu, options, ProfilerClass=EmptyProfiler, warmrunnerdesc=None): self.cpu = cpu self.stats = self.cpu.stats self.options = options + self.jitlog = jitlog.VMProfJitLogger() self.logger_noopt = Logger(self) self.logger_ops = Logger(self, guard_number=True) diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -51,6 +51,11 @@ [rffi.INT], lltype.Void, compilation_info=eci, _nowrapper=True) + + jitlog_init = rffi.llexternal("jitlog_init", [rffi.INT, rffi.CHARP], + rffi.CHARP, compilation_info=eci, + save_err=rffi.RFFI_SAVE_ERRNO) + return CInterface(locals()) diff --git a/rpython/rlib/rvmprof/rvmprof.py b/rpython/rlib/rvmprof/rvmprof.py --- a/rpython/rlib/rvmprof/rvmprof.py +++ b/rpython/rlib/rvmprof/rvmprof.py @@ -109,12 +109,20 @@ if p_error: raise VMProfError(rffi.charp2str(p_error)) + self.enable_jitlog(fileno, "") + self._gather_all_code_objs() res = self.cintf.vmprof_enable() if res < 0: raise VMProfError(os.strerror(rposix.get_saved_errno())) self.is_enabled = True + def enable_jitlog(self, fileno, regexp): + # initialize the jit log + p_error = self.cintf.jitlog_init(fileno, regexp) + if p_error: + raise VMProfError(rffi.charp2str(p_error)) + def disable(self): """Disable vmprof. Raises VMProfError if something goes wrong. diff --git a/rpython/rlib/rvmprof/src/jitlog_main.h b/rpython/rlib/rvmprof/src/jitlog_main.h new file mode 100644 --- /dev/null +++ b/rpython/rlib/rvmprof/src/jitlog_main.h @@ -0,0 +1,96 @@ +#include + +static int jitlog_fd = -1; +static char * jitlog_prefix = NULL; +static int jitlog_ready = 0; + +RPY_EXTERN +void jitlog_try_init_using_env(void) { + if (jitlog_ready) { return; } + + char *filename = getenv("JITLOG"); + + if (filename && filename[0]) { + char *newfilename = NULL, *escape; + char *colon = strchr(filename, ':'); + if (filename[0] == '+') { + filename += 1; + colon = NULL; + } + if (!colon) { + /* JITLOG=+filename (or just 'filename') --- profiling version */ + debug_profile = 1; + pypy_setup_profiling(); + } else { + /* JITLOG=prefix:filename --- conditional logging */ + int n = colon - filename; + debug_prefix = malloc(n + 1); + memcpy(debug_prefix, filename, n); + debug_prefix[n] = '\0'; + filename = colon + 1; + } + escape = strstr(filename, "%d"); + if (escape) { + /* a "%d" in the filename is replaced with the pid */ + newfilename = malloc(strlen(filename) + 32); + if (newfilename != NULL) { + char *p = newfilename; + memcpy(p, filename, escape - filename); + p += escape - filename; + sprintf(p, "%ld", (long)getpid()); + strcat(p, escape + 2); + filename = newfilename; + } + } + if (strcmp(filename, "-") != 0) { + // mode is 775 + mode_t mode = S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH; + jitlog_fd = open(filename, O_WRONLY | O_CREATE, mode); + } + + if (escape) { + free(newfilename); /* if not null */ + /* the env var is kept and passed to subprocesses */ + } else { +#ifndef _WIN32 + unsetenv("JITLOG"); +#else + putenv("JITLOG="); +#endif + } + } + if (!jitlog_fd) { + jitlog_fd = stderr; + // TODO + //if (isatty(2)) + // { + // debug_start_colors_1 = "\033[1m\033[31m"; + // debug_start_colors_2 = "\033[31m"; + // debug_stop_colors = "\033[0m"; + // } + } + + jitlog_ready = 1; +} + +RPY_EXTERN +char *jitlog_init(int fd, char * prefix) +{ + jitlog_fd = fd; + jitlog_prefix = strdup(prefix); + return NULL; +} + +RPY_EXTERN +void jitlog_close(int close_fd) +{ + if (jitlog_fd == -1) { + return; + } + if (close_fd) { + close(jitlog_fd); + } + jitlog_fd = -1; + free(jitlog_prefix); +} + diff --git a/rpython/rlib/rvmprof/src/rvmprof.c b/rpython/rlib/rvmprof/src/rvmprof.c --- a/rpython/rlib/rvmprof/src/rvmprof.c +++ b/rpython/rlib/rvmprof/src/rvmprof.c @@ -16,12 +16,12 @@ # include "structdef.h" # include "src/threadlocal.h" # include "rvmprof.h" - #endif #if defined(__unix__) || defined(__APPLE__) #include "vmprof_main.h" +#include "jitlog_main.h" #else #include "vmprof_main_win32.h" #endif diff --git a/rpython/rlib/rvmprof/src/rvmprof.h b/rpython/rlib/rvmprof/src/rvmprof.h --- a/rpython/rlib/rvmprof/src/rvmprof.h +++ b/rpython/rlib/rvmprof/src/rvmprof.h @@ -8,3 +8,6 @@ RPY_EXTERN int vmprof_stack_append(void*, long); RPY_EXTERN long vmprof_stack_pop(void*); RPY_EXTERN void vmprof_stack_free(void*); + +RPY_EXTERN char * jitlog_init(int, char*); +RPY_EXTERN void jitlog_try_init_using_env(void); diff --git a/rpython/rlib/rvmprof/src/vmprof_common.h b/rpython/rlib/rvmprof/src/vmprof_common.h --- a/rpython/rlib/rvmprof/src/vmprof_common.h +++ b/rpython/rlib/rvmprof/src/vmprof_common.h @@ -7,11 +7,7 @@ static long profile_interval_usec = 0; static int opened_profile(char *interp_name); -#define MARKER_STACKTRACE '\x01' -#define MARKER_VIRTUAL_IP '\x02' -#define MARKER_TRAILER '\x03' -#define MARKER_INTERP_NAME '\x04' /* deprecated */ -#define MARKER_HEADER '\x05' +#include "vmprof_markers.h" #define VERSION_BASE '\x00' #define VERSION_THREAD_ID '\x01' diff --git a/rpython/rlib/rvmprof/src/vmprof_markers.h b/rpython/rlib/rvmprof/src/vmprof_markers.h new file mode 100644 --- /dev/null +++ b/rpython/rlib/rvmprof/src/vmprof_markers.h @@ -0,0 +1,10 @@ +#pragma once + +#define MARKER_STACKTRACE '\x01' +#define MARKER_VIRTUAL_IP '\x02' +#define MARKER_TRAILER '\x03' +#define MARKER_INTERP_NAME '\x04' /* deprecated */ +#define MARKER_HEADER '\x05' + +#define MARKER_JITLOG_META '\x06' + From pypy.commits at gmail.com Fri Feb 26 11:33:17 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 08:33:17 -0800 (PST) Subject: [pypy-commit] pypy default: (ronan, mjacob) Make app-level time.sleep() release the GIL untranslated. Message-ID: <56d07e4d.080a1c0a.4cd87.3da7@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82568:4f2954127738 Date: 2016-02-26 17:28 +0100 http://bitbucket.org/pypy/pypy/changeset/4f2954127738/ Log: (ronan, mjacob) Make app-level time.sleep() release the GIL untranslated. Some app-level tests call time.sleep() to release the GIL. This didn't work reliably without hacks. This changeset also removes one of these hacks. The modified test wouldn't have passed without the interp_time.py change. diff --git a/pypy/module/thread/test/test_thread.py b/pypy/module/thread/test/test_thread.py --- a/pypy/module/thread/test/test_thread.py +++ b/pypy/module/thread/test/test_thread.py @@ -239,14 +239,12 @@ if waiting: thread.interrupt_main() return - print 'tock...', x # <-force the GIL to be released, as - time.sleep(0.1) # time.sleep doesn't do non-translated + time.sleep(0.1) def busy_wait(): waiting.append(None) for x in range(50): - print 'tick...', x # <-force the GIL to be released, as - time.sleep(0.1) # time.sleep doesn't do non-translated + time.sleep(0.1) waiting.pop() # This is normally called by app_main.py diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py --- a/pypy/module/time/interp_time.py +++ b/pypy/module/time/interp_time.py @@ -4,7 +4,7 @@ from pypy.interpreter.gateway import unwrap_spec from rpython.rtyper.lltypesystem import lltype from rpython.rlib.rarithmetic import intmask -from rpython.rlib import rposix +from rpython.rlib import rposix, rtime from rpython.translator.tool.cbuild import ExternalCompilationInfo import os import sys @@ -316,13 +316,13 @@ if secs < 0: raise OperationError(space.w_IOError, space.wrap("Invalid argument: negative time in sleep")) - pytime.sleep(secs) + rtime.sleep(secs) else: from rpython.rlib import rwin32 from errno import EINTR def _simple_sleep(space, secs, interruptible): if secs == 0.0 or not interruptible: - pytime.sleep(secs) + rtime.sleep(secs) else: millisecs = int(secs * 1000) interrupt_event = space.fromcache(State).get_interrupt_event() @@ -331,7 +331,7 @@ if rc == rwin32.WAIT_OBJECT_0: # Yield to make sure real Python signal handler # called. - pytime.sleep(0.001) + rtime.sleep(0.001) raise wrap_oserror(space, OSError(EINTR, "sleep() interrupted")) @unwrap_spec(secs=float) From pypy.commits at gmail.com Fri Feb 26 11:33:19 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 08:33:19 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default Message-ID: <56d07e4f.12871c0a.2e894.3e7c@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82569:12537e5dacda Date: 2016-02-26 17:30 +0100 http://bitbucket.org/pypy/pypy/changeset/12537e5dacda/ Log: hg merge default diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -170,4 +170,17 @@ When creating instances and adding attributes in several different orders depending on some condition, the JIT would create too much code. This is now -fixed. \ No newline at end of file +fixed. + +.. branch: cpyext-gc-support-2 + +Improve CPython C API support, which means lxml now runs unmodified +(after removing pypy hacks, pending pull request) + +.. branch: look-inside-tuple-hash + +Look inside tuple hash, improving mdp benchmark + +.. branch: vlen-resume + +Compress resume data, saving 10-20% of memory consumed by the JIT \ No newline at end of file diff --git a/pypy/module/thread/test/test_thread.py b/pypy/module/thread/test/test_thread.py --- a/pypy/module/thread/test/test_thread.py +++ b/pypy/module/thread/test/test_thread.py @@ -239,14 +239,12 @@ if waiting: _thread.interrupt_main() return - print('tock...', x) # <-force the GIL to be released, as - time.sleep(0.1) # time.sleep doesn't do non-translated + time.sleep(0.1) def busy_wait(): waiting.append(None) for x in range(100): - print('tick...', x) # <-force the GIL to be released, as - time.sleep(0.1) # time.sleep doesn't do non-translated + time.sleep(0.1) waiting.pop() # This is normally called by app_main.py diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py --- a/pypy/module/time/interp_time.py +++ b/pypy/module/time/interp_time.py @@ -4,7 +4,7 @@ from pypy.interpreter.gateway import unwrap_spec from rpython.rtyper.lltypesystem import lltype from rpython.rlib.rarithmetic import intmask -from rpython.rlib import rposix +from rpython.rlib import rposix, rtime from rpython.translator.tool.cbuild import ExternalCompilationInfo import os import sys @@ -314,13 +314,13 @@ if secs < 0: raise OperationError(space.w_IOError, space.wrap("Invalid argument: negative time in sleep")) - pytime.sleep(secs) + rtime.sleep(secs) else: from rpython.rlib import rwin32 from errno import EINTR def _simple_sleep(space, secs, interruptible): if secs == 0.0 or not interruptible: - pytime.sleep(secs) + rtime.sleep(secs) else: millisecs = int(secs * 1000) interrupt_event = space.fromcache(State).get_interrupt_event() @@ -329,7 +329,7 @@ if rc == rwin32.WAIT_OBJECT_0: # Yield to make sure real Python signal handler # called. - pytime.sleep(0.001) + rtime.sleep(0.001) raise wrap_oserror(space, OSError(EINTR, "sleep() interrupted")) @unwrap_spec(secs=float) diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -65,21 +65,8 @@ def delete(self, obj, name, index): pass + @jit.elidable def find_map_attr(self, name, index): - if jit.we_are_jitted(): - # hack for the jit: - # the _find_map_attr method is pure too, but its argument is never - # constant, because it is always a new tuple - return self._find_map_attr_jit_pure(name, index) - else: - return self._find_map_attr_indirection(name, index) - - @jit.elidable - def _find_map_attr_jit_pure(self, name, index): - return self._find_map_attr_indirection(name, index) - - @jit.dont_look_inside - def _find_map_attr_indirection(self, name, index): if (self.space.config.objspace.std.withmethodcache): return self._find_map_attr_cache(name, index) return self._find_map_attr(name, index) diff --git a/pypy/tool/test/test_tab.py b/pypy/tool/test/test_tab.py --- a/pypy/tool/test/test_tab.py +++ b/pypy/tool/test/test_tab.py @@ -6,6 +6,7 @@ from pypy.conftest import pypydir ROOT = os.path.abspath(os.path.join(pypydir, '..')) +RPYTHONDIR = os.path.join(ROOT, "rpython") EXCLUDE = {'/virt_test/lib/python2.7/site-packages/setuptools'} @@ -28,3 +29,27 @@ if not entry.startswith('.'): walk('%s/%s' % (reldir, entry)) walk('') + +def test_no_pypy_import_in_rpython(): + def walk(reldir): + print reldir + if reldir: + path = os.path.join(RPYTHONDIR, *reldir.split('/')) + else: + path = RPYTHONDIR + if os.path.isfile(path): + if not path.lower().endswith('.py'): + return + with file(path) as f: + for line in f: + if "import" not in line: + continue + assert "from pypy." not in line + assert "import pypy." not in line + elif os.path.isdir(path) and not os.path.islink(path): + for entry in os.listdir(path): + if not entry.startswith('.'): + walk('%s/%s' % (reldir, entry)) + + walk('') + diff --git a/rpython/jit/metainterp/test/test_jitdriver.py b/rpython/jit/metainterp/test/test_jitdriver.py --- a/rpython/jit/metainterp/test/test_jitdriver.py +++ b/rpython/jit/metainterp/test/test_jitdriver.py @@ -227,7 +227,7 @@ i += 1 self.meta_interp(f, [0]) - self.check_resops(enter_portal_frame=1, leave_portal_frame=1) + self.check_simple_loop(enter_portal_frame=1, leave_portal_frame=1) class TestLLtype(MultipleJitDriversTests, LLJitMixin): pass diff --git a/rpython/rlib/test/test_posix.py b/rpython/rlib/test/test_posix.py --- a/rpython/rlib/test/test_posix.py +++ b/rpython/rlib/test/test_posix.py @@ -1,4 +1,4 @@ -import py +import py.test from rpython.rtyper.test.tool import BaseRtypingTest from rpython.rtyper.annlowlevel import hlstr from rpython.tool.udir import udir @@ -58,7 +58,7 @@ assert res def test_times(self): - import py; py.test.skip("llinterp does not like tuple returns") + py.test.skip("llinterp does not like tuple returns") from rpython.rtyper.test.test_llinterp import interpret times = interpret(lambda: posix.times(), ()) assert isinstance(times, tuple) @@ -119,21 +119,21 @@ res = self.interpret(f,[fi,20]) assert self.ll_to_string(res) == text - if hasattr(os, 'chown'): - def test_chown(self): - f = open(path, "w") - f.write("xyz") - f.close() - def f(): - try: - posix.chown(path, os.getuid(), os.getgid()) - return 1 - except OSError: - return 2 + @py.test.mark.skipif("not hasattr(os, 'chown')") + def test_chown(self): + f = open(path, "w") + f.write("xyz") + f.close() + def f(): + try: + posix.chown(path, os.getuid(), os.getgid()) + return 1 + except OSError: + return 2 - assert self.interpret(f, []) == 1 - os.unlink(path) - assert self.interpret(f, []) == 2 + assert self.interpret(f, []) == 1 + os.unlink(path) + assert self.interpret(f, []) == 2 def test_close(self): def f(fi): @@ -144,70 +144,70 @@ res = self.interpret(f,[fi]) py.test.raises( OSError, os.fstat, fi) - if hasattr(os, 'ftruncate'): - def test_ftruncate(self): - def f(fi,len): - os.ftruncate(fi,len) - fi = os.open(path,os.O_RDWR,0777) - func = self.interpret(f,[fi,6]) - assert os.fstat(fi).st_size == 6 + @py.test.mark.skipif("not hasattr(os, 'ftruncate')") + def test_ftruncate(self): + def f(fi,len): + os.ftruncate(fi,len) + fi = os.open(path,os.O_RDWR,0777) + func = self.interpret(f,[fi,6]) + assert os.fstat(fi).st_size == 6 - if hasattr(os, 'getuid'): - def test_getuid(self): - def f(): - return os.getuid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'getuid')") + def test_getuid(self): + def f(): + return os.getuid() + assert self.interpret(f, []) == f() - if hasattr(os, 'getgid'): - def test_getgid(self): - def f(): - return os.getgid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'getgid')") + def test_getgid(self): + def f(): + return os.getgid() + assert self.interpret(f, []) == f() - if hasattr(os, 'setuid'): - def test_os_setuid(self): - def f(): - os.setuid(os.getuid()) - return os.getuid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'setuid')") + def test_os_setuid(self): + def f(): + os.setuid(os.getuid()) + return os.getuid() + assert self.interpret(f, []) == f() - if hasattr(os, 'sysconf'): - def test_os_sysconf(self): - def f(i): - return os.sysconf(i) - assert self.interpret(f, [13]) == f(13) + @py.test.mark.skipif("not hasattr(os, 'sysconf')") + def test_os_sysconf(self): + def f(i): + return os.sysconf(i) + assert self.interpret(f, [13]) == f(13) - if hasattr(os, 'confstr'): - def test_os_confstr(self): - def f(i): - try: - return os.confstr(i) - except OSError: - return "oooops!!" - some_value = os.confstr_names.values()[-1] - res = self.interpret(f, [some_value]) - assert hlstr(res) == f(some_value) - res = self.interpret(f, [94781413]) - assert hlstr(res) == "oooops!!" + @py.test.mark.skipif("not hasattr(os, 'confstr')") + def test_os_confstr(self): + def f(i): + try: + return os.confstr(i) + except OSError: + return "oooops!!" + some_value = os.confstr_names.values()[-1] + res = self.interpret(f, [some_value]) + assert hlstr(res) == f(some_value) + res = self.interpret(f, [94781413]) + assert hlstr(res) == "oooops!!" - if hasattr(os, 'pathconf'): - def test_os_pathconf(self): - def f(i): - return os.pathconf("/tmp", i) - i = os.pathconf_names["PC_NAME_MAX"] - some_value = self.interpret(f, [i]) - assert some_value >= 31 + @py.test.mark.skipif("not hasattr(os, 'pathconf')") + def test_os_pathconf(self): + def f(i): + return os.pathconf("/tmp", i) + i = os.pathconf_names["PC_NAME_MAX"] + some_value = self.interpret(f, [i]) + assert some_value >= 31 - if hasattr(os, 'chroot'): - def test_os_chroot(self): - def f(): - try: - os.chroot('!@$#!#%$#^#@!#!$$#^') - except OSError: - return 1 - return 0 + @py.test.mark.skipif("not hasattr(os, 'chroot')") + def test_os_chroot(self): + def f(): + try: + os.chroot('!@$#!#%$#^#@!#!$$#^') + except OSError: + return 1 + return 0 - assert self.interpret(f, []) == 1 + assert self.interpret(f, []) == 1 def test_os_wstar(self): from rpython.rlib import rposix @@ -221,84 +221,84 @@ res = self.interpret(fun, [value]) assert res == fun(value) - if hasattr(os, 'getgroups'): - def test_getgroups(self): - def f(): - return os.getgroups() - ll_a = self.interpret(f, []) - assert self.ll_to_list(ll_a) == f() + @py.test.mark.skipif("not hasattr(os, 'getgroups')") + def test_getgroups(self): + def f(): + return os.getgroups() + ll_a = self.interpret(f, []) + assert self.ll_to_list(ll_a) == f() - if hasattr(os, 'setgroups'): - def test_setgroups(self): - def f(): - try: - os.setgroups(os.getgroups()) - except OSError: - pass - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setgroups')") + def test_setgroups(self): + def f(): + try: + os.setgroups(os.getgroups()) + except OSError: + pass + self.interpret(f, []) - if hasattr(os, 'initgroups'): - def test_initgroups(self): - def f(): - try: - os.initgroups('sUJJeumz', 4321) - except OSError: - return 1 - return 0 - res = self.interpret(f, []) - assert res == 1 + @py.test.mark.skipif("not hasattr(os, 'initgroups')") + def test_initgroups(self): + def f(): + try: + os.initgroups('sUJJeumz', 4321) + except OSError: + return 1 + return 0 + res = self.interpret(f, []) + assert res == 1 - if hasattr(os, 'tcgetpgrp'): - def test_tcgetpgrp(self): - def f(fd): - try: - return os.tcgetpgrp(fd) - except OSError: - return 42 - res = self.interpret(f, [9999]) - assert res == 42 + @py.test.mark.skipif("not hasattr(os, 'tcgetpgrp')") + def test_tcgetpgrp(self): + def f(fd): + try: + return os.tcgetpgrp(fd) + except OSError: + return 42 + res = self.interpret(f, [9999]) + assert res == 42 - if hasattr(os, 'tcsetpgrp'): - def test_tcsetpgrp(self): - def f(fd, pgrp): - try: - os.tcsetpgrp(fd, pgrp) - except OSError: - return 1 - return 0 - res = self.interpret(f, [9999, 1]) - assert res == 1 + @py.test.mark.skipif("not hasattr(os, 'tcsetpgrp')") + def test_tcsetpgrp(self): + def f(fd, pgrp): + try: + os.tcsetpgrp(fd, pgrp) + except OSError: + return 1 + return 0 + res = self.interpret(f, [9999, 1]) + assert res == 1 - if hasattr(os, 'getresuid'): - def test_getresuid(self): - def f(): - a, b, c = os.getresuid() - return a + b * 37 + c * 1291 - res = self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'getresuid')") + def test_getresuid(self): + def f(): a, b, c = os.getresuid() - assert res == a + b * 37 + c * 1291 + return a + b * 37 + c * 1291 + res = self.interpret(f, []) + a, b, c = os.getresuid() + assert res == a + b * 37 + c * 1291 - if hasattr(os, 'getresgid'): - def test_getresgid(self): - def f(): - a, b, c = os.getresgid() - return a + b * 37 + c * 1291 - res = self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'getresgid')") + def test_getresgid(self): + def f(): a, b, c = os.getresgid() - assert res == a + b * 37 + c * 1291 + return a + b * 37 + c * 1291 + res = self.interpret(f, []) + a, b, c = os.getresgid() + assert res == a + b * 37 + c * 1291 - if hasattr(os, 'setresuid'): - def test_setresuid(self): - def f(): - a, b, c = os.getresuid() - a = (a + 1) - 1 - os.setresuid(a, b, c) - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setresuid')") + def test_setresuid(self): + def f(): + a, b, c = os.getresuid() + a = (a + 1) - 1 + os.setresuid(a, b, c) + self.interpret(f, []) - if hasattr(os, 'setresgid'): - def test_setresgid(self): - def f(): - a, b, c = os.getresgid() - a = (a + 1) - 1 - os.setresgid(a, b, c) - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setresgid')") + def test_setresgid(self): + def f(): + a, b, c = os.getresgid() + a = (a + 1) - 1 + os.setresgid(a, b, c) + self.interpret(f, []) From pypy.commits at gmail.com Fri Feb 26 11:33:21 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 08:33:21 -0800 (PST) Subject: [pypy-commit] pypy py3k: Remove py3k modification which is not needed since 4f2954127738. Message-ID: <56d07e51.0357c20a.f94a9.7609@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82570:e3a8051cb5b8 Date: 2016-02-26 17:33 +0100 http://bitbucket.org/pypy/pypy/changeset/e3a8051cb5b8/ Log: Remove py3k modification which is not needed since 4f2954127738. diff --git a/pypy/module/thread/test/test_thread.py b/pypy/module/thread/test/test_thread.py --- a/pypy/module/thread/test/test_thread.py +++ b/pypy/module/thread/test/test_thread.py @@ -243,14 +243,14 @@ def busy_wait(): waiting.append(None) - for x in range(100): + for x in range(50): time.sleep(0.1) waiting.pop() # This is normally called by app_main.py signal.signal(signal.SIGINT, signal.default_int_handler) - for i in range(10): + for i in range(100): print() print("loop", i) waiting = [] From pypy.commits at gmail.com Fri Feb 26 13:20:39 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 10:20:39 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56d09777.01adc20a.f6add.ffffaa05@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82573:e484ea33125b Date: 2016-02-26 19:20 +0100 http://bitbucket.org/pypy/pypy/changeset/e484ea33125b/ Log: hg merge py3k diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -170,4 +170,17 @@ When creating instances and adding attributes in several different orders depending on some condition, the JIT would create too much code. This is now -fixed. \ No newline at end of file +fixed. + +.. branch: cpyext-gc-support-2 + +Improve CPython C API support, which means lxml now runs unmodified +(after removing pypy hacks, pending pull request) + +.. branch: look-inside-tuple-hash + +Look inside tuple hash, improving mdp benchmark + +.. branch: vlen-resume + +Compress resume data, saving 10-20% of memory consumed by the JIT \ No newline at end of file diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -65,7 +65,7 @@ test_reload = "def test():\n raise ValueError\n", infinite_reload = "import infinite_reload, imp; imp.reload(infinite_reload)", del_sys_module = "import sys\ndel sys.modules['del_sys_module']\n", - itertools = "hello_world = 42\n", + _md5 = "hello_world = 42\n", gc = "should_never_be_seen = 42\n", ) root.ensure("packagenamespace", dir=1) # empty, no __init__.py @@ -202,7 +202,7 @@ class AppTestImport(BaseImportTest): spaceconfig = { - "usemodules": ['time', 'struct'], + "usemodules": ['_md5', 'time', 'struct'], } def setup_class(cls): @@ -696,13 +696,15 @@ def test_shadow_extension_1(self): if self.runappdirect: skip("hard to test: module is already imported") + # 'import _md5' is supposed to find _md5.py if there is + # one in sys.path. import sys - sys.modules.pop('itertools', None) - import itertools - assert hasattr(itertools, 'hello_world') - assert not hasattr(itertools, 'count') - assert '(built-in)' not in repr(itertools) - del sys.modules['itertools'] + assert '_md5' not in sys.modules + import _md5 + assert hasattr(_md5, 'hello_world') + assert not hasattr(_md5, 'md5') + assert '(built-in)' not in repr(_md5) + del sys.modules['_md5'] def test_shadow_extension_2(self): if self.runappdirect: skip("hard to test: module is already imported") @@ -712,16 +714,16 @@ # there is one in lib_pypy/_md5.py, which should not be seen # either; hence the (built-in) test below.) import sys - sys.modules.pop('itertools', None) + assert '_md5' not in sys.modules sys.path.append(sys.path.pop(0)) try: - import itertools - assert not hasattr(itertools, 'hello_world') - assert hasattr(itertools, 'islice') - assert '(built-in)' in repr(itertools) + import _md5 + assert not hasattr(_md5, 'hello_world') + assert hasattr(_md5, 'md5') + assert '(built-in)' in repr(_md5) finally: sys.path.insert(0, sys.path.pop()) - del sys.modules['itertools'] + del sys.modules['_md5'] def test_invalid_pathname(self): skip("This test fails on CPython 3.3, but passes on CPython 3.4+") diff --git a/pypy/module/thread/test/test_thread.py b/pypy/module/thread/test/test_thread.py --- a/pypy/module/thread/test/test_thread.py +++ b/pypy/module/thread/test/test_thread.py @@ -239,20 +239,18 @@ if waiting: _thread.interrupt_main() return - print('tock...', x) # <-force the GIL to be released, as - time.sleep(0.1) # time.sleep doesn't do non-translated + time.sleep(0.1) def busy_wait(): waiting.append(None) - for x in range(100): - print('tick...', x) # <-force the GIL to be released, as - time.sleep(0.1) # time.sleep doesn't do non-translated + for x in range(50): + time.sleep(0.1) waiting.pop() # This is normally called by app_main.py signal.signal(signal.SIGINT, signal.default_int_handler) - for i in range(10): + for i in range(100): print() print("loop", i) waiting = [] diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py --- a/pypy/module/time/interp_time.py +++ b/pypy/module/time/interp_time.py @@ -5,7 +5,7 @@ from rpython.rtyper.lltypesystem import lltype from rpython.rlib.rarithmetic import intmask from rpython.rlib.rtime import win_perf_counter -from rpython.rlib import rposix +from rpython.rlib import rposix, rtime from rpython.translator.tool.cbuild import ExternalCompilationInfo import math import os @@ -339,13 +339,13 @@ if secs < 0: raise OperationError(space.w_ValueError, space.wrap("sleep length must be non-negative")) - pytime.sleep(secs) + rtime.sleep(secs) else: from rpython.rlib import rwin32 from errno import EINTR def _simple_sleep(space, secs, interruptible): if secs == 0.0 or not interruptible: - pytime.sleep(secs) + rtime.sleep(secs) else: millisecs = int(secs * 1000) interrupt_event = space.fromcache(State).get_interrupt_event() @@ -354,7 +354,7 @@ if rc == rwin32.WAIT_OBJECT_0: # Yield to make sure real Python signal handler # called. - pytime.sleep(0.001) + rtime.sleep(0.001) raise wrap_oserror(space, OSError(EINTR, "sleep() interrupted")) @unwrap_spec(secs=float) diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -65,21 +65,8 @@ def delete(self, obj, name, index): pass + @jit.elidable def find_map_attr(self, name, index): - if jit.we_are_jitted(): - # hack for the jit: - # the _find_map_attr method is pure too, but its argument is never - # constant, because it is always a new tuple - return self._find_map_attr_jit_pure(name, index) - else: - return self._find_map_attr_indirection(name, index) - - @jit.elidable - def _find_map_attr_jit_pure(self, name, index): - return self._find_map_attr_indirection(name, index) - - @jit.dont_look_inside - def _find_map_attr_indirection(self, name, index): if (self.space.config.objspace.std.withmethodcache): return self._find_map_attr_cache(name, index) return self._find_map_attr(name, index) diff --git a/pypy/tool/test/test_tab.py b/pypy/tool/test/test_tab.py --- a/pypy/tool/test/test_tab.py +++ b/pypy/tool/test/test_tab.py @@ -6,6 +6,7 @@ from pypy.conftest import pypydir ROOT = os.path.abspath(os.path.join(pypydir, '..')) +RPYTHONDIR = os.path.join(ROOT, "rpython") EXCLUDE = {'/virt_test/lib/python2.7/site-packages/setuptools'} @@ -28,3 +29,27 @@ if not entry.startswith('.'): walk('%s/%s' % (reldir, entry)) walk('') + +def test_no_pypy_import_in_rpython(): + def walk(reldir): + print reldir + if reldir: + path = os.path.join(RPYTHONDIR, *reldir.split('/')) + else: + path = RPYTHONDIR + if os.path.isfile(path): + if not path.lower().endswith('.py'): + return + with file(path) as f: + for line in f: + if "import" not in line: + continue + assert "from pypy." not in line + assert "import pypy." not in line + elif os.path.isdir(path) and not os.path.islink(path): + for entry in os.listdir(path): + if not entry.startswith('.'): + walk('%s/%s' % (reldir, entry)) + + walk('') + diff --git a/rpython/jit/metainterp/test/test_jitdriver.py b/rpython/jit/metainterp/test/test_jitdriver.py --- a/rpython/jit/metainterp/test/test_jitdriver.py +++ b/rpython/jit/metainterp/test/test_jitdriver.py @@ -227,7 +227,7 @@ i += 1 self.meta_interp(f, [0]) - self.check_resops(enter_portal_frame=1, leave_portal_frame=1) + self.check_simple_loop(enter_portal_frame=1, leave_portal_frame=1) class TestLLtype(MultipleJitDriversTests, LLJitMixin): pass diff --git a/rpython/rlib/test/test_posix.py b/rpython/rlib/test/test_posix.py --- a/rpython/rlib/test/test_posix.py +++ b/rpython/rlib/test/test_posix.py @@ -1,4 +1,4 @@ -import py +import py.test from rpython.rtyper.test.tool import BaseRtypingTest from rpython.rtyper.annlowlevel import hlstr from rpython.tool.udir import udir @@ -58,7 +58,7 @@ assert res def test_times(self): - import py; py.test.skip("llinterp does not like tuple returns") + py.test.skip("llinterp does not like tuple returns") from rpython.rtyper.test.test_llinterp import interpret times = interpret(lambda: posix.times(), ()) assert isinstance(times, tuple) @@ -119,21 +119,21 @@ res = self.interpret(f,[fi,20]) assert self.ll_to_string(res) == text - if hasattr(os, 'chown'): - def test_chown(self): - f = open(path, "w") - f.write("xyz") - f.close() - def f(): - try: - posix.chown(path, os.getuid(), os.getgid()) - return 1 - except OSError: - return 2 + @py.test.mark.skipif("not hasattr(os, 'chown')") + def test_chown(self): + f = open(path, "w") + f.write("xyz") + f.close() + def f(): + try: + posix.chown(path, os.getuid(), os.getgid()) + return 1 + except OSError: + return 2 - assert self.interpret(f, []) == 1 - os.unlink(path) - assert self.interpret(f, []) == 2 + assert self.interpret(f, []) == 1 + os.unlink(path) + assert self.interpret(f, []) == 2 def test_close(self): def f(fi): @@ -144,70 +144,70 @@ res = self.interpret(f,[fi]) py.test.raises( OSError, os.fstat, fi) - if hasattr(os, 'ftruncate'): - def test_ftruncate(self): - def f(fi,len): - os.ftruncate(fi,len) - fi = os.open(path,os.O_RDWR,0777) - func = self.interpret(f,[fi,6]) - assert os.fstat(fi).st_size == 6 + @py.test.mark.skipif("not hasattr(os, 'ftruncate')") + def test_ftruncate(self): + def f(fi,len): + os.ftruncate(fi,len) + fi = os.open(path,os.O_RDWR,0777) + func = self.interpret(f,[fi,6]) + assert os.fstat(fi).st_size == 6 - if hasattr(os, 'getuid'): - def test_getuid(self): - def f(): - return os.getuid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'getuid')") + def test_getuid(self): + def f(): + return os.getuid() + assert self.interpret(f, []) == f() - if hasattr(os, 'getgid'): - def test_getgid(self): - def f(): - return os.getgid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'getgid')") + def test_getgid(self): + def f(): + return os.getgid() + assert self.interpret(f, []) == f() - if hasattr(os, 'setuid'): - def test_os_setuid(self): - def f(): - os.setuid(os.getuid()) - return os.getuid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'setuid')") + def test_os_setuid(self): + def f(): + os.setuid(os.getuid()) + return os.getuid() + assert self.interpret(f, []) == f() - if hasattr(os, 'sysconf'): - def test_os_sysconf(self): - def f(i): - return os.sysconf(i) - assert self.interpret(f, [13]) == f(13) + @py.test.mark.skipif("not hasattr(os, 'sysconf')") + def test_os_sysconf(self): + def f(i): + return os.sysconf(i) + assert self.interpret(f, [13]) == f(13) - if hasattr(os, 'confstr'): - def test_os_confstr(self): - def f(i): - try: - return os.confstr(i) - except OSError: - return "oooops!!" - some_value = os.confstr_names.values()[-1] - res = self.interpret(f, [some_value]) - assert hlstr(res) == f(some_value) - res = self.interpret(f, [94781413]) - assert hlstr(res) == "oooops!!" + @py.test.mark.skipif("not hasattr(os, 'confstr')") + def test_os_confstr(self): + def f(i): + try: + return os.confstr(i) + except OSError: + return "oooops!!" + some_value = os.confstr_names.values()[-1] + res = self.interpret(f, [some_value]) + assert hlstr(res) == f(some_value) + res = self.interpret(f, [94781413]) + assert hlstr(res) == "oooops!!" - if hasattr(os, 'pathconf'): - def test_os_pathconf(self): - def f(i): - return os.pathconf("/tmp", i) - i = os.pathconf_names["PC_NAME_MAX"] - some_value = self.interpret(f, [i]) - assert some_value >= 31 + @py.test.mark.skipif("not hasattr(os, 'pathconf')") + def test_os_pathconf(self): + def f(i): + return os.pathconf("/tmp", i) + i = os.pathconf_names["PC_NAME_MAX"] + some_value = self.interpret(f, [i]) + assert some_value >= 31 - if hasattr(os, 'chroot'): - def test_os_chroot(self): - def f(): - try: - os.chroot('!@$#!#%$#^#@!#!$$#^') - except OSError: - return 1 - return 0 + @py.test.mark.skipif("not hasattr(os, 'chroot')") + def test_os_chroot(self): + def f(): + try: + os.chroot('!@$#!#%$#^#@!#!$$#^') + except OSError: + return 1 + return 0 - assert self.interpret(f, []) == 1 + assert self.interpret(f, []) == 1 def test_os_wstar(self): from rpython.rlib import rposix @@ -221,84 +221,84 @@ res = self.interpret(fun, [value]) assert res == fun(value) - if hasattr(os, 'getgroups'): - def test_getgroups(self): - def f(): - return os.getgroups() - ll_a = self.interpret(f, []) - assert self.ll_to_list(ll_a) == f() + @py.test.mark.skipif("not hasattr(os, 'getgroups')") + def test_getgroups(self): + def f(): + return os.getgroups() + ll_a = self.interpret(f, []) + assert self.ll_to_list(ll_a) == f() - if hasattr(os, 'setgroups'): - def test_setgroups(self): - def f(): - try: - os.setgroups(os.getgroups()) - except OSError: - pass - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setgroups')") + def test_setgroups(self): + def f(): + try: + os.setgroups(os.getgroups()) + except OSError: + pass + self.interpret(f, []) - if hasattr(os, 'initgroups'): - def test_initgroups(self): - def f(): - try: - os.initgroups('sUJJeumz', 4321) - except OSError: - return 1 - return 0 - res = self.interpret(f, []) - assert res == 1 + @py.test.mark.skipif("not hasattr(os, 'initgroups')") + def test_initgroups(self): + def f(): + try: + os.initgroups('sUJJeumz', 4321) + except OSError: + return 1 + return 0 + res = self.interpret(f, []) + assert res == 1 - if hasattr(os, 'tcgetpgrp'): - def test_tcgetpgrp(self): - def f(fd): - try: - return os.tcgetpgrp(fd) - except OSError: - return 42 - res = self.interpret(f, [9999]) - assert res == 42 + @py.test.mark.skipif("not hasattr(os, 'tcgetpgrp')") + def test_tcgetpgrp(self): + def f(fd): + try: + return os.tcgetpgrp(fd) + except OSError: + return 42 + res = self.interpret(f, [9999]) + assert res == 42 - if hasattr(os, 'tcsetpgrp'): - def test_tcsetpgrp(self): - def f(fd, pgrp): - try: - os.tcsetpgrp(fd, pgrp) - except OSError: - return 1 - return 0 - res = self.interpret(f, [9999, 1]) - assert res == 1 + @py.test.mark.skipif("not hasattr(os, 'tcsetpgrp')") + def test_tcsetpgrp(self): + def f(fd, pgrp): + try: + os.tcsetpgrp(fd, pgrp) + except OSError: + return 1 + return 0 + res = self.interpret(f, [9999, 1]) + assert res == 1 - if hasattr(os, 'getresuid'): - def test_getresuid(self): - def f(): - a, b, c = os.getresuid() - return a + b * 37 + c * 1291 - res = self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'getresuid')") + def test_getresuid(self): + def f(): a, b, c = os.getresuid() - assert res == a + b * 37 + c * 1291 + return a + b * 37 + c * 1291 + res = self.interpret(f, []) + a, b, c = os.getresuid() + assert res == a + b * 37 + c * 1291 - if hasattr(os, 'getresgid'): - def test_getresgid(self): - def f(): - a, b, c = os.getresgid() - return a + b * 37 + c * 1291 - res = self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'getresgid')") + def test_getresgid(self): + def f(): a, b, c = os.getresgid() - assert res == a + b * 37 + c * 1291 + return a + b * 37 + c * 1291 + res = self.interpret(f, []) + a, b, c = os.getresgid() + assert res == a + b * 37 + c * 1291 - if hasattr(os, 'setresuid'): - def test_setresuid(self): - def f(): - a, b, c = os.getresuid() - a = (a + 1) - 1 - os.setresuid(a, b, c) - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setresuid')") + def test_setresuid(self): + def f(): + a, b, c = os.getresuid() + a = (a + 1) - 1 + os.setresuid(a, b, c) + self.interpret(f, []) - if hasattr(os, 'setresgid'): - def test_setresgid(self): - def f(): - a, b, c = os.getresgid() - a = (a + 1) - 1 - os.setresgid(a, b, c) - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setresgid')") + def test_setresgid(self): + def f(): + a, b, c = os.getresgid() + a = (a + 1) - 1 + os.setresgid(a, b, c) + self.interpret(f, []) From pypy.commits at gmail.com Fri Feb 26 13:20:37 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 10:20:37 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default Message-ID: <56d09775.034cc20a.d4ccb.ffffa8da@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82572:f564a192eb18 Date: 2016-02-26 19:13 +0100 http://bitbucket.org/pypy/pypy/changeset/f564a192eb18/ Log: hg merge default Also, bring both test_shadow_extension_* tests closer to default. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -41,7 +41,7 @@ test_reload = "def test():\n raise ValueError\n", infinite_reload = "import infinite_reload, imp; imp.reload(infinite_reload)", del_sys_module = "import sys\ndel sys.modules['del_sys_module']\n", - itertools = "hello_world = 42\n", + _md5 = "hello_world = 42\n", gc = "should_never_be_seen = 42\n", ) root.ensure("notapackage", dir=1) # empty, no __init__.py @@ -209,7 +209,7 @@ class AppTestImport(BaseImportTest): spaceconfig = { - "usemodules": ['time', 'struct'], + "usemodules": ['_md5', 'time', 'struct'], } def setup_class(cls): @@ -732,13 +732,15 @@ def test_shadow_extension_1(self): if self.runappdirect: skip("hard to test: module is already imported") + # 'import _md5' is supposed to find _md5.py if there is + # one in sys.path. import sys - sys.modules.pop('itertools', None) - import itertools - assert hasattr(itertools, 'hello_world') - assert not hasattr(itertools, 'count') - assert '(built-in)' not in repr(itertools) - del sys.modules['itertools'] + assert '_md5' not in sys.modules + import _md5 + assert hasattr(_md5, 'hello_world') + assert not hasattr(_md5, 'md5') + assert '(built-in)' not in repr(_md5) + del sys.modules['_md5'] def test_shadow_extension_2(self): if self.runappdirect: skip("hard to test: module is already imported") @@ -748,16 +750,16 @@ # there is one in lib_pypy/_md5.py, which should not be seen # either; hence the (built-in) test below.) import sys - sys.modules.pop('itertools', None) + assert '_md5' not in sys.modules sys.path.append(sys.path.pop(0)) try: - import itertools - assert not hasattr(itertools, 'hello_world') - assert hasattr(itertools, 'islice') - assert '(built-in)' in repr(itertools) + import _md5 + assert not hasattr(_md5, 'hello_world') + assert hasattr(_md5, 'md5') + assert '(built-in)' in repr(_md5) finally: sys.path.insert(0, sys.path.pop()) - del sys.modules['itertools'] + del sys.modules['_md5'] def test_invalid_pathname(self): import imp From pypy.commits at gmail.com Fri Feb 26 16:25:26 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 13:25:26 -0800 (PST) Subject: [pypy-commit] pypy default: Use try / finally to remove module even in case of an exception. Message-ID: <56d0c2c6.84b61c0a.2e19f.ffff9c8d@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82574:b9f8538d63ca Date: 2016-02-26 22:21 +0100 http://bitbucket.org/pypy/pypy/changeset/b9f8538d63ca/ Log: Use try / finally to remove module even in case of an exception. diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -652,11 +652,13 @@ # one in sys.path. import sys assert '_md5' not in sys.modules - import _md5 - assert hasattr(_md5, 'hello_world') - assert not hasattr(_md5, 'digest_size') - assert '(built-in)' not in repr(_md5) - del sys.modules['_md5'] + try: + import _md5 + assert hasattr(_md5, 'hello_world') + assert not hasattr(_md5, 'digest_size') + assert '(built-in)' not in repr(_md5) + finally: + sys.modules.pop('_md5', None) def test_shadow_extension_2(self): if self.runappdirect: skip("hard to test: module is already imported") @@ -675,7 +677,7 @@ assert '(built-in)' in repr(_md5) finally: sys.path.insert(0, sys.path.pop()) - del sys.modules['_md5'] + sys.modules.pop('_md5', None) def test_invalid_pathname(self): import imp From pypy.commits at gmail.com Fri Feb 26 16:25:28 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 13:25:28 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default Message-ID: <56d0c2c8.080a1c0a.4cd87.ffffa0e1@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82575:6dfc20f28921 Date: 2016-02-26 22:24 +0100 http://bitbucket.org/pypy/pypy/changeset/6dfc20f28921/ Log: hg merge default diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -736,11 +736,13 @@ # one in sys.path. import sys assert '_md5' not in sys.modules - import _md5 - assert hasattr(_md5, 'hello_world') - assert not hasattr(_md5, 'md5') - assert '(built-in)' not in repr(_md5) - del sys.modules['_md5'] + try: + import _md5 + assert hasattr(_md5, 'hello_world') + assert not hasattr(_md5, 'md5') + assert '(built-in)' not in repr(_md5) + finally: + sys.modules.pop('_md5', None) def test_shadow_extension_2(self): if self.runappdirect: skip("hard to test: module is already imported") @@ -759,7 +761,7 @@ assert '(built-in)' in repr(_md5) finally: sys.path.insert(0, sys.path.pop()) - del sys.modules['_md5'] + sys.modules.pop('_md5', None) def test_invalid_pathname(self): import imp From pypy.commits at gmail.com Fri Feb 26 16:25:30 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 13:25:30 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56d0c2ca.44e01c0a.84179.ffffa134@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82576:48dfb592dfb3 Date: 2016-02-26 22:24 +0100 http://bitbucket.org/pypy/pypy/changeset/48dfb592dfb3/ Log: hg merge py3k diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -700,11 +700,13 @@ # one in sys.path. import sys assert '_md5' not in sys.modules - import _md5 - assert hasattr(_md5, 'hello_world') - assert not hasattr(_md5, 'md5') - assert '(built-in)' not in repr(_md5) - del sys.modules['_md5'] + try: + import _md5 + assert hasattr(_md5, 'hello_world') + assert not hasattr(_md5, 'md5') + assert '(built-in)' not in repr(_md5) + finally: + sys.modules.pop('_md5', None) def test_shadow_extension_2(self): if self.runappdirect: skip("hard to test: module is already imported") @@ -723,7 +725,7 @@ assert '(built-in)' in repr(_md5) finally: sys.path.insert(0, sys.path.pop()) - del sys.modules['_md5'] + sys.modules.pop('_md5', None) def test_invalid_pathname(self): skip("This test fails on CPython 3.3, but passes on CPython 3.4+") From pypy.commits at gmail.com Fri Feb 26 18:18:39 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 15:18:39 -0800 (PST) Subject: [pypy-commit] pypy py3k: Skip test_bad_data(). See comment for details. Message-ID: <56d0dd4f.44e21c0a.35ba1.ffff8d00@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82577:cb3ee53b1885 Date: 2016-02-26 23:52 +0100 http://bitbucket.org/pypy/pypy/changeset/cb3ee53b1885/ Log: Skip test_bad_data(). See comment for details. diff --git a/pypy/module/marshal/test/test_marshal.py b/pypy/module/marshal/test/test_marshal.py --- a/pypy/module/marshal/test/test_marshal.py +++ b/pypy/module/marshal/test/test_marshal.py @@ -200,6 +200,12 @@ assert str(exc.value) == "bad marshal data (unknown type code)" def test_bad_data(self): + # If you have sufficiently little memory, the line at the end of the + # test will fail immediately. If not, the test will consume high + # amounts of memory and make your system unstable. CPython (I tried + # 3.3 and 3.5) shows the same behaviour on my computers (4 GB and 12 GB). + skip("takes too much memory") + import marshal # Yes, there is code that depends on this :-( raises(EOFError, marshal.loads, b'') From pypy.commits at gmail.com Fri Feb 26 18:18:41 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 15:18:41 -0800 (PST) Subject: [pypy-commit] pypy default: Rewrite _vmprof test slightly to make it work on py3k. Message-ID: <56d0dd51.55031c0a.6fd73.7b5d@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82578:de33a380d179 Date: 2016-02-27 00:18 +0100 http://bitbucket.org/pypy/pypy/changeset/de33a380d179/ Log: Rewrite _vmprof test slightly to make it work on py3k. The problem with the previous way it was written is that in the py3k branch the test is executed in a separate process, where the file descriptor opened in the setup method is not available anymore. diff --git a/pypy/module/_vmprof/test/test__vmprof.py b/pypy/module/_vmprof/test/test__vmprof.py --- a/pypy/module/_vmprof/test/test__vmprof.py +++ b/pypy/module/_vmprof/test/test__vmprof.py @@ -5,14 +5,15 @@ class AppTestVMProf(object): def setup_class(cls): cls.space = gettestobjspace(usemodules=['_vmprof', 'struct']) - cls.tmpfile = udir.join('test__vmprof.1').open('wb') - cls.w_tmpfileno = cls.space.wrap(cls.tmpfile.fileno()) - cls.w_tmpfilename = cls.space.wrap(cls.tmpfile.name) - cls.tmpfile2 = udir.join('test__vmprof.2').open('wb') - cls.w_tmpfileno2 = cls.space.wrap(cls.tmpfile2.fileno()) - cls.w_tmpfilename2 = cls.space.wrap(cls.tmpfile2.name) + cls.w_tmpfilename = cls.space.wrap(str(udir.join('test__vmprof.1'))) + cls.w_tmpfilename2 = cls.space.wrap(str(udir.join('test__vmprof.2'))) def test_import_vmprof(self): + tmpfile = open(self.tmpfilename, 'wb') + tmpfileno = tmpfile.fileno() + tmpfile2 = open(self.tmpfilename2, 'wb') + tmpfileno2 = tmpfile2.fileno() + import struct, sys WORD = struct.calcsize('l') @@ -45,7 +46,7 @@ return count import _vmprof - _vmprof.enable(self.tmpfileno, 0.01) + _vmprof.enable(tmpfileno, 0.01) _vmprof.disable() s = open(self.tmpfilename, 'rb').read() no_of_codes = count(s) @@ -56,7 +57,7 @@ pass """ in d - _vmprof.enable(self.tmpfileno2, 0.01) + _vmprof.enable(tmpfileno2, 0.01) exec """def foo2(): pass From pypy.commits at gmail.com Fri Feb 26 19:39:09 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 16:39:09 -0800 (PST) Subject: [pypy-commit] pypy py3k: Port failing test from eea8f92e03. Message-ID: <56d0f02d.05e41c0a.304f6.ffffc3ad@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82579:a3d2806acab8 Date: 2016-02-27 01:23 +0100 http://bitbucket.org/pypy/pypy/changeset/a3d2806acab8/ Log: Port failing test from eea8f92e03. eea8f92e03 should have been done in py3k. I'm going to apply the rpython changes to default to reduce differences between branches. After that I'll port the changes to fix the test. diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -614,7 +614,7 @@ import _socket self.foo = _socket.socket() - def test_subclass(self): + def test_subclass_init(self): # Socket is not created in __new__, but in __init__. import socket class Socket_IPV6(socket.socket): @@ -622,6 +622,18 @@ socket.socket.__init__(self, family=socket.AF_INET6) assert Socket_IPV6().family == socket.AF_INET6 + def test_subclass_noinit(self): + from _socket import socket + class MySock(socket): + def __init__(self, *args): + pass # don't call super + s = MySock() + assert s.type == 0 + assert s.proto == 0 + assert s.family == 0 + assert s.fileno() < 0 + raises(OSError, s.bind, ('localhost', 0)) + def test_dealloc_warn(self): import _socket import gc From pypy.commits at gmail.com Fri Feb 26 19:39:13 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 16:39:13 -0800 (PST) Subject: [pypy-commit] pypy py3k: hg merge default Message-ID: <56d0f031.865a1c0a.804ad.ffffc8fc@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82581:d5c477e0d86f Date: 2016-02-27 01:27 +0100 http://bitbucket.org/pypy/pypy/changeset/d5c477e0d86f/ Log: hg merge default diff --git a/pypy/module/_vmprof/test/test__vmprof.py b/pypy/module/_vmprof/test/test__vmprof.py --- a/pypy/module/_vmprof/test/test__vmprof.py +++ b/pypy/module/_vmprof/test/test__vmprof.py @@ -5,14 +5,15 @@ class AppTestVMProf(object): def setup_class(cls): cls.space = gettestobjspace(usemodules=['_vmprof', 'struct']) - cls.tmpfile = udir.join('test__vmprof.1').open('wb') - cls.w_tmpfileno = cls.space.wrap(cls.tmpfile.fileno()) - cls.w_tmpfilename = cls.space.wrap(cls.tmpfile.name) - cls.tmpfile2 = udir.join('test__vmprof.2').open('wb') - cls.w_tmpfileno2 = cls.space.wrap(cls.tmpfile2.fileno()) - cls.w_tmpfilename2 = cls.space.wrap(cls.tmpfile2.name) + cls.w_tmpfilename = cls.space.wrap(str(udir.join('test__vmprof.1'))) + cls.w_tmpfilename2 = cls.space.wrap(str(udir.join('test__vmprof.2'))) def test_import_vmprof(self): + tmpfile = open(self.tmpfilename, 'wb') + tmpfileno = tmpfile.fileno() + tmpfile2 = open(self.tmpfilename2, 'wb') + tmpfileno2 = tmpfile2.fileno() + import struct, sys WORD = struct.calcsize('l') @@ -45,7 +46,7 @@ return count import _vmprof - _vmprof.enable(self.tmpfileno, 0.01) + _vmprof.enable(tmpfileno, 0.01) _vmprof.disable() s = open(self.tmpfilename, 'rb').read() no_of_codes = count(s) @@ -59,7 +60,7 @@ pass """, d) - _vmprof.enable(self.tmpfileno2, 0.01) + _vmprof.enable(tmpfileno2, 0.01) exec_("""def foo2(): pass diff --git a/rpython/rlib/rsocket.py b/rpython/rlib/rsocket.py --- a/rpython/rlib/rsocket.py +++ b/rpython/rlib/rsocket.py @@ -516,6 +516,10 @@ """RPython-level socket object. """ fd = _c.INVALID_SOCKET + family = 0 + type = 0 + proto = 0 + timeout = -1.0 def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fd=_c.INVALID_SOCKET): @@ -531,6 +535,11 @@ self.proto = proto self.timeout = defaults.timeout + @staticmethod + def empty_rsocket(): + rsocket = instantiate(RSocket) + return rsocket + @rgc.must_be_light_finalizer def __del__(self): fd = self.fd From pypy.commits at gmail.com Fri Feb 26 19:39:11 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 16:39:11 -0800 (PST) Subject: [pypy-commit] pypy default: Port rpython changes from eea8f92e03. Message-ID: <56d0f02f.080a1c0a.4cd87.ffffc899@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82580:71df1c9619fa Date: 2016-02-27 01:25 +0100 http://bitbucket.org/pypy/pypy/changeset/71df1c9619fa/ Log: Port rpython changes from eea8f92e03. diff --git a/rpython/rlib/rsocket.py b/rpython/rlib/rsocket.py --- a/rpython/rlib/rsocket.py +++ b/rpython/rlib/rsocket.py @@ -516,6 +516,10 @@ """RPython-level socket object. """ fd = _c.INVALID_SOCKET + family = 0 + type = 0 + proto = 0 + timeout = -1.0 def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fd=_c.INVALID_SOCKET): @@ -531,6 +535,11 @@ self.proto = proto self.timeout = defaults.timeout + @staticmethod + def empty_rsocket(): + rsocket = instantiate(RSocket) + return rsocket + @rgc.must_be_light_finalizer def __del__(self): fd = self.fd From pypy.commits at gmail.com Fri Feb 26 19:39:15 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 16:39:15 -0800 (PST) Subject: [pypy-commit] pypy py3k: Port remaining changes from eea8f92e03 to py3k. Message-ID: <56d0f033.030f1c0a.9d77d.ffffd0be@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82582:4c5a11012b5e Date: 2016-02-27 01:35 +0100 http://bitbucket.org/pypy/pypy/changeset/4c5a11012b5e/ Log: Port remaining changes from eea8f92e03 to py3k. diff --git a/pypy/module/_socket/interp_socket.py b/pypy/module/_socket/interp_socket.py --- a/pypy/module/_socket/interp_socket.py +++ b/pypy/module/_socket/interp_socket.py @@ -166,6 +166,7 @@ def descr_new(space, w_subtype, __args__): sock = space.allocate_instance(W_Socket, w_subtype) + W_Socket.__init__(sock, space, RSocket.empty_rsocket()) return space.wrap(sock) @unwrap_spec(family=int, type=int, proto=int, diff --git a/pypy/module/_socket/test/test_sock_app.py b/pypy/module/_socket/test/test_sock_app.py --- a/pypy/module/_socket/test/test_sock_app.py +++ b/pypy/module/_socket/test/test_sock_app.py @@ -623,7 +623,7 @@ assert Socket_IPV6().family == socket.AF_INET6 def test_subclass_noinit(self): - from _socket import socket + from _socket import socket, error class MySock(socket): def __init__(self, *args): pass # don't call super @@ -632,7 +632,7 @@ assert s.proto == 0 assert s.family == 0 assert s.fileno() < 0 - raises(OSError, s.bind, ('localhost', 0)) + raises(error, s.bind, ('localhost', 0)) def test_dealloc_warn(self): import _socket From pypy.commits at gmail.com Fri Feb 26 19:39:17 2016 From: pypy.commits at gmail.com (mjacob) Date: Fri, 26 Feb 2016 16:39:17 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56d0f035.0775c20a.81e6.0451@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82583:01bf93ab834d Date: 2016-02-27 01:38 +0100 http://bitbucket.org/pypy/pypy/changeset/01bf93ab834d/ Log: hg merge py3k diff --git a/pypy/module/_vmprof/test/test__vmprof.py b/pypy/module/_vmprof/test/test__vmprof.py --- a/pypy/module/_vmprof/test/test__vmprof.py +++ b/pypy/module/_vmprof/test/test__vmprof.py @@ -5,14 +5,15 @@ class AppTestVMProf(object): def setup_class(cls): cls.space = gettestobjspace(usemodules=['_vmprof', 'struct']) - cls.tmpfile = udir.join('test__vmprof.1').open('wb') - cls.w_tmpfileno = cls.space.wrap(cls.tmpfile.fileno()) - cls.w_tmpfilename = cls.space.wrap(cls.tmpfile.name) - cls.tmpfile2 = udir.join('test__vmprof.2').open('wb') - cls.w_tmpfileno2 = cls.space.wrap(cls.tmpfile2.fileno()) - cls.w_tmpfilename2 = cls.space.wrap(cls.tmpfile2.name) + cls.w_tmpfilename = cls.space.wrap(str(udir.join('test__vmprof.1'))) + cls.w_tmpfilename2 = cls.space.wrap(str(udir.join('test__vmprof.2'))) def test_import_vmprof(self): + tmpfile = open(self.tmpfilename, 'wb') + tmpfileno = tmpfile.fileno() + tmpfile2 = open(self.tmpfilename2, 'wb') + tmpfileno2 = tmpfile2.fileno() + import struct, sys WORD = struct.calcsize('l') @@ -45,7 +46,7 @@ return count import _vmprof - _vmprof.enable(self.tmpfileno, 0.01) + _vmprof.enable(tmpfileno, 0.01) _vmprof.disable() s = open(self.tmpfilename, 'rb').read() no_of_codes = count(s) @@ -59,7 +60,7 @@ pass """, d) - _vmprof.enable(self.tmpfileno2, 0.01) + _vmprof.enable(tmpfileno2, 0.01) exec_("""def foo2(): pass diff --git a/pypy/module/marshal/test/test_marshal.py b/pypy/module/marshal/test/test_marshal.py --- a/pypy/module/marshal/test/test_marshal.py +++ b/pypy/module/marshal/test/test_marshal.py @@ -200,6 +200,12 @@ assert str(exc.value) == "bad marshal data (unknown type code)" def test_bad_data(self): + # If you have sufficiently little memory, the line at the end of the + # test will fail immediately. If not, the test will consume high + # amounts of memory and make your system unstable. CPython (I tried + # 3.3 and 3.5) shows the same behaviour on my computers (4 GB and 12 GB). + skip("takes too much memory") + import marshal # Yes, there is code that depends on this :-( raises(EOFError, marshal.loads, b'') From pypy.commits at gmail.com Sat Feb 27 03:47:18 2016 From: pypy.commits at gmail.com (plan_rich) Date: Sat, 27 Feb 2016 00:47:18 -0800 (PST) Subject: [pypy-commit] pypy new-jit-log: finished a first (not yet compiling) version that both writes loops and bridges in a more condensed format Message-ID: <56d16296.82561c0a.34b61.1f6a@mx.google.com> Author: Richard Plangger Branch: new-jit-log Changeset: r82584:ed87960a6c98 Date: 2016-02-26 19:13 +0100 http://bitbucket.org/pypy/pypy/changeset/ed87960a6c98/ Log: finished a first (not yet compiling) version that both writes loops and bridges in a more condensed format diff --git a/rpython/jit/backend/x86/assembler.py b/rpython/jit/backend/x86/assembler.py --- a/rpython/jit/backend/x86/assembler.py +++ b/rpython/jit/backend/x86/assembler.py @@ -532,8 +532,8 @@ looptoken._x86_ops_offset = ops_offset looptoken._ll_function_addr = rawstart if logger: - logger.log_loop(inputargs, operations, 0, "rewritten", - name=loopname, ops_offset=ops_offset) + logger.log_trace(logger.MARK_ASM, inputargs, operations, + ops_offset=ops_offset) self.fixup_target_tokens(rawstart) self.teardown() @@ -587,8 +587,8 @@ frame_depth = max(self.current_clt.frame_info.jfi_frame_depth, frame_depth_no_fixed_size + JITFRAME_FIXED_SIZE) if logger: - logger.log_bridge(inputargs, operations, "rewritten", faildescr, - ops_offset=ops_offset) + logger.log_trace(logger.MARK_ASM, inputargs, operations, + faildescr=faildescr, ops_offset=ops_offset) self.fixup_target_tokens(rawstart) self.update_frame_depth(frame_depth) self.teardown() diff --git a/rpython/jit/metainterp/jitlog.py b/rpython/jit/metainterp/jitlog.py --- a/rpython/jit/metainterp/jitlog.py +++ b/rpython/jit/metainterp/jitlog.py @@ -1,22 +1,46 @@ from rpython.rlib.rvmprof.rvmprof import cintf class VMProfJitLogger(object): + + MARK_BLOCK_ASM = 0x10 + + MARK_INPUT_ARGS = 0x11 + MARK_RESOP = 0x12 + + MARK_RESOP_META = 0x13 + def __init__(self): self.cintf = cintf.setup() - def _ensure_init(self): + def setup_once(self): self.cintf.jitlog_try_init_using_env() + if self.cintf.jitlog_filter(0x0): + return + self.cintf.jitlog_write_marker(MARK_RESOP_META); + count = len(resoperation.opname) + self.cintf.jitlog_write_int(count) + for opnum, opname in resoperation.opname.items(): + self.cintf.write_marker(opnum) + self.cintf.write_string(opname) - self.cintf.write_marker(BinaryJitLogger.JIT_META_MARKER) - count = len(resoperation.opname) - assert count < 256 - self.cintf.write_marker(count) - for opnum, opname in resoperation.opname.items(): - self.cintf.write_byte(opnum) - self.cintf.write_string(opnum) + def log_trace(self, tag, args, ops, + faildescr=None, ops_offset={}): + if self.cintf.jitlog_filter(tag): + return + assert isinstance(tag, int) + self.cintf.jitlog_write_marker(tag); - def log_loop(self, operations): - pass + # input args + self.cintf.jitlog_write_marker(MARK_INPUT_ARGS); + str_args = [arg.repr_short(arg._repr_memo) for arg in args] + self.cintf.jitlog_write_string(','.join(str_args)) - def _log_resoperation(self, op): - pass + self.cintf.jitlog_write_int(len(ops)) + for i,op in enumerate(ops): + self.cintf.jitlog_write_marker(MARK_RESOP) + self.cintf.jitlog_write_marker(op.getopnum()) + str_args = [arg.repr_short(arg._repr_memo) for arg in op.getarglist()] + descr = op.getdescr() + if descr: + str_args += ['descr='+descr] + self.cintf.jitlog_write_string(','.join(str_args)) diff --git a/rpython/jit/metainterp/optimizeopt/__init__.py b/rpython/jit/metainterp/optimizeopt/__init__.py --- a/rpython/jit/metainterp/optimizeopt/__init__.py +++ b/rpython/jit/metainterp/optimizeopt/__init__.py @@ -54,7 +54,6 @@ debug_start("jit-optimize") inputargs = compile_data.start_label.getarglist() try: - metainterp.jitlog.log_loop(inputargs, compile_data.operations, memo) metainterp_sd.logger_noopt.log_loop(inputargs, compile_data.operations, memo=memo) diff --git a/rpython/rlib/rvmprof/cintf.py b/rpython/rlib/rvmprof/cintf.py --- a/rpython/rlib/rvmprof/cintf.py +++ b/rpython/rlib/rvmprof/cintf.py @@ -55,6 +55,9 @@ jitlog_init = rffi.llexternal("jitlog_init", [rffi.INT, rffi.CHARP], rffi.CHARP, compilation_info=eci, save_err=rffi.RFFI_SAVE_ERRNO) + jitlog_init = rffi.llexternal("jitlog_write_marker", [rffi.INT, rffi.CHARP], + rffi.CHARP, compilation_info=eci, + save_err=rffi.RFFI_SAVE_ERRNO) return CInterface(locals()) diff --git a/rpython/rlib/rvmprof/src/jitlog_main.h b/rpython/rlib/rvmprof/src/jitlog_main.h --- a/rpython/rlib/rvmprof/src/jitlog_main.h +++ b/rpython/rlib/rvmprof/src/jitlog_main.h @@ -5,6 +5,12 @@ static int jitlog_ready = 0; RPY_EXTERN +int jitlog_filter(int tag) +{ + return 0; // TODO +} + +RPY_EXTERN void jitlog_try_init_using_env(void) { if (jitlog_ready) { return; } diff --git a/rpython/rlib/rvmprof/src/rvmprof.h b/rpython/rlib/rvmprof/src/rvmprof.h --- a/rpython/rlib/rvmprof/src/rvmprof.h +++ b/rpython/rlib/rvmprof/src/rvmprof.h @@ -11,3 +11,4 @@ RPY_EXTERN char * jitlog_init(int, char*); RPY_EXTERN void jitlog_try_init_using_env(void); +RPY_EXTERN int jitlog_filter(int tag); From pypy.commits at gmail.com Sat Feb 27 03:47:20 2016 From: pypy.commits at gmail.com (plan_rich) Date: Sat, 27 Feb 2016 00:47:20 -0800 (PST) Subject: [pypy-commit] pypy new-jit-log: remove some errors in the c code of the jitlog file Message-ID: <56d16298.865a1c0a.804ad.1f08@mx.google.com> Author: Richard Plangger Branch: new-jit-log Changeset: r82585:e3c8440d6583 Date: 2016-02-27 09:46 +0100 http://bitbucket.org/pypy/pypy/changeset/e3c8440d6583/ Log: remove some errors in the c code of the jitlog file diff --git a/rpython/rlib/rvmprof/src/jitlog_main.h b/rpython/rlib/rvmprof/src/jitlog_main.h --- a/rpython/rlib/rvmprof/src/jitlog_main.h +++ b/rpython/rlib/rvmprof/src/jitlog_main.h @@ -1,4 +1,8 @@ #include +#include +#include +#include +#include static int jitlog_fd = -1; static char * jitlog_prefix = NULL; @@ -30,8 +34,8 @@ } else { /* JITLOG=prefix:filename --- conditional logging */ int n = colon - filename; - debug_prefix = malloc(n + 1); - memcpy(debug_prefix, filename, n); + jitlog_prefix = malloc(n + 1); + memcpy(jitlog_prefix, filename, n); debug_prefix[n] = '\0'; filename = colon + 1; } From pypy.commits at gmail.com Sat Feb 27 04:01:55 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 27 Feb 2016 01:01:55 -0800 (PST) Subject: [pypy-commit] pypy py3k: Skip all _continuation zpickle tests instead of one. Message-ID: <56d16603.d4e41c0a.41084.274a@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82586:09cd2a01ee8d Date: 2016-02-27 10:01 +0100 http://bitbucket.org/pypy/pypy/changeset/09cd2a01ee8d/ Log: Skip all _continuation zpickle tests instead of one. Running these tests on top of CPython has always been fragile. diff --git a/pypy/module/_continuation/test/test_zpickle.py b/pypy/module/_continuation/test/test_zpickle.py --- a/pypy/module/_continuation/test/test_zpickle.py +++ b/pypy/module/_continuation/test/test_zpickle.py @@ -1,6 +1,9 @@ import py +py.test.skip("XXX: crashes: https://bitbucket.org/pypy/pypy/issue/1773") + + class AppTestCopy: spaceconfig = dict(usemodules=['_continuation'], continuation=True) @@ -215,7 +218,6 @@ ''', mod.__dict__) def test_pickle_continulet_real_subclass(self): - skip("XXX: triggers a crash: https://bitbucket.org/pypy/pypy/issue/1773") import types, sys mod = types.ModuleType('test_pickle_continulet_real_subclass') sys.modules['test_pickle_continulet_real_subclass'] = mod From pypy.commits at gmail.com Sat Feb 27 04:11:12 2016 From: pypy.commits at gmail.com (mjacob) Date: Sat, 27 Feb 2016 01:11:12 -0800 (PST) Subject: [pypy-commit] pypy py3k: Re-add lone pyc file support in py3k. Message-ID: <56d16830.e853c20a.e2a2a.6ae6@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82587:a3620d381009 Date: 2016-02-27 10:11 +0100 http://bitbucket.org/pypy/pypy/changeset/a3620d381009/ Log: Re-add lone pyc file support in py3k. This was accidentally removed in 299018381016. diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -76,6 +76,12 @@ if file_exists(pyfile): return PY_SOURCE, ".pyw", "U" + # The .py file does not exist, check the .pyc file + pycfile = filepart + ".pyc" + if file_exists(pycfile): + # existing .pyc file + return PY_COMPILED, ".pyc", "rb" + if has_so_extension(space): so_extension = get_so_extension(space) pydfile = filepart + so_extension From pypy.commits at gmail.com Sat Feb 27 05:59:38 2016 From: pypy.commits at gmail.com (plan_rich) Date: Sat, 27 Feb 2016 02:59:38 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: minor changes while going through the code with arigato Message-ID: <56d1819a.07811c0a.daec5.4416@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82588:22b3fac614dd Date: 2016-02-27 11:58 +0100 http://bitbucket.org/pypy/pypy/changeset/22b3fac614dd/ Log: minor changes while going through the code with arigato diff --git a/rpython/jit/backend/zarch/opassembler.py b/rpython/jit/backend/zarch/opassembler.py --- a/rpython/jit/backend/zarch/opassembler.py +++ b/rpython/jit/backend/zarch/opassembler.py @@ -534,24 +534,9 @@ if not is_frame: mc.LGR(r.r0, loc_base) # unusual argument location - if len(arglocs) > 1: - loc_index = arglocs[1] - # loc_index may be in r2 to r5. - # the wb_slow_path may trash these registers - - if loc_index.is_reg() and loc_index.value < 6: - mc.LAY(r.SP, l.addr(-WORD, r.SP)) - mc.STG(loc_index, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) - mc.load_imm(r.r14, self.wb_slowpath[helper_num]) mc.BASR(r.r14, r.r14) - if len(arglocs) > 1: - loc_index = arglocs[1] - if loc_index.is_reg() and loc_index.value < 6: - mc.LG(loc_index, l.addr(STD_FRAME_SIZE_IN_BYTES, r.SP)) - mc.LAY(r.SP, l.addr(WORD, r.SP)) - if card_marking_mask: # The helper ends again with a check of the flag in the object. # So here, we can simply write again a beq, which will be @@ -576,7 +561,7 @@ # compute in tmp_loc the byte offset: # tmp_loc = ~(index >> (card_page_shift + 3)) - mc.SRAG(tmp_loc, loc_index, l.addr(n+3)) + mc.SRLG(tmp_loc, loc_index, l.addr(n+3)) # invert the bits of tmp_loc # compute in SCRATCH the index of the bit inside the byte: @@ -584,8 +569,6 @@ # 0x80 sets zero flag. will store 0 into all not selected bits mc.RISBGN(r.SCRATCH, loc_index, l.imm(61), l.imm(0x80 | 63), l.imm(64-n)) mc.XG(tmp_loc, l.pool(self.pool.constant_64_ones)) - #mc.SRAG(r.SCRATCH, loc_index, l.addr(n)) - #mc.NILL(r.SCRATCH, l.imm(0x7)) # set SCRATCH2 to 1 << r1 mc.LGHI(r.SCRATCH2, l.imm(1)) @@ -596,13 +579,13 @@ addr = l.addr(0, loc_base, tmp_loc) mc.LLGC(r.SCRATCH, addr) mc.OGRK(r.SCRATCH, r.SCRATCH, r.SCRATCH2) - mc.STC(r.SCRATCH, addr) + mc.STCY(r.SCRATCH, addr) # done else: byte_index = loc_index.value >> descr.jit_wb_card_page_shift byte_ofs = ~(byte_index >> 3) byte_val = 1 << (byte_index & 7) - assert check_imm_value(byte_ofs) + assert check_imm_value(byte_ofs, lower_bound=-2**19, upper_bound=2**19-1) addr = l.addr(byte_ofs, loc_base) mc.LLGC(r.SCRATCH, addr) From pypy.commits at gmail.com Sat Feb 27 10:19:52 2016 From: pypy.commits at gmail.com (rlamy) Date: Sat, 27 Feb 2016 07:19:52 -0800 (PST) Subject: [pypy-commit] pypy default: Avoid using deprecated pytest features (i.e. yielding test cases and using test methods instead of test functions) Message-ID: <56d1be98.cf0b1c0a.dfe07.ffff9926@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82589:96f67d31a0af Date: 2016-02-27 16:18 +0100 http://bitbucket.org/pypy/pypy/changeset/96f67d31a0af/ Log: Avoid using deprecated pytest features (i.e. yielding test cases and using test methods instead of test functions) diff --git a/rpython/rtyper/test/test_rdict.py b/rpython/rtyper/test/test_rdict.py --- a/rpython/rtyper/test/test_rdict.py +++ b/rpython/rtyper/test/test_rdict.py @@ -1182,118 +1182,113 @@ count_frees += 1 assert count_frees >= 3 -class TestStress: - def test_stress(self): - from rpython.annotator.dictdef import DictKey, DictValue - from rpython.annotator import model as annmodel - dictrepr = rdict.DictRepr(None, rint.signed_repr, rint.signed_repr, - DictKey(None, annmodel.SomeInteger()), - DictValue(None, annmodel.SomeInteger())) - dictrepr.setup() - l_dict = rdict.ll_newdict(dictrepr.DICT) - referencetable = [None] * 400 - referencelength = 0 - value = 0 +def test_stress(): + from rpython.annotator.dictdef import DictKey, DictValue + from rpython.annotator import model as annmodel + dictrepr = rdict.DictRepr(None, rint.signed_repr, rint.signed_repr, + DictKey(None, annmodel.SomeInteger()), + DictValue(None, annmodel.SomeInteger())) + dictrepr.setup() + l_dict = rdict.ll_newdict(dictrepr.DICT) + referencetable = [None] * 400 + referencelength = 0 + value = 0 - def complete_check(): - for n, refvalue in zip(range(len(referencetable)), referencetable): - try: - gotvalue = rdict.ll_dict_getitem(l_dict, n) - except KeyError: - assert refvalue is None - else: - assert gotvalue == refvalue + def complete_check(): + for n, refvalue in zip(range(len(referencetable)), referencetable): + try: + gotvalue = rdict.ll_dict_getitem(l_dict, n) + except KeyError: + assert refvalue is None + else: + assert gotvalue == refvalue - for x in not_really_random(): - n = int(x*100.0) # 0 <= x < 400 - op = repr(x)[-1] - if op <= '2' and referencetable[n] is not None: - rdict.ll_dict_delitem(l_dict, n) - referencetable[n] = None - referencelength -= 1 - elif op <= '6': - rdict.ll_dict_setitem(l_dict, n, value) - if referencetable[n] is None: - referencelength += 1 - referencetable[n] = value - value += 1 + for x in not_really_random(): + n = int(x*100.0) # 0 <= x < 400 + op = repr(x)[-1] + if op <= '2' and referencetable[n] is not None: + rdict.ll_dict_delitem(l_dict, n) + referencetable[n] = None + referencelength -= 1 + elif op <= '6': + rdict.ll_dict_setitem(l_dict, n, value) + if referencetable[n] is None: + referencelength += 1 + referencetable[n] = value + value += 1 + else: + try: + gotvalue = rdict.ll_dict_getitem(l_dict, n) + except KeyError: + assert referencetable[n] is None else: - try: - gotvalue = rdict.ll_dict_getitem(l_dict, n) - except KeyError: - assert referencetable[n] is None - else: - assert gotvalue == referencetable[n] - if 1.38 <= x <= 1.39: - complete_check() - print 'current dict length:', referencelength - assert l_dict.num_items == referencelength - complete_check() + assert gotvalue == referencetable[n] + if 1.38 <= x <= 1.39: + complete_check() + print 'current dict length:', referencelength + assert l_dict.num_items == referencelength + complete_check() - def test_stress_2(self): - yield self.stress_combination, True, False - yield self.stress_combination, False, True - yield self.stress_combination, False, False - yield self.stress_combination, True, True + at py.test.mark.parametrize('key_can_be_none', [True, False]) + at py.test.mark.parametrize('value_can_be_none', [True, False]) +def test_stress_2(key_can_be_none, value_can_be_none): + from rpython.rtyper.lltypesystem.rstr import string_repr + from rpython.annotator.dictdef import DictKey, DictValue + from rpython.annotator import model as annmodel - def stress_combination(self, key_can_be_none, value_can_be_none): - from rpython.rtyper.lltypesystem.rstr import string_repr - from rpython.annotator.dictdef import DictKey, DictValue - from rpython.annotator import model as annmodel + print + print "Testing combination with can_be_None: keys %s, values %s" % ( + key_can_be_none, value_can_be_none) - print - print "Testing combination with can_be_None: keys %s, values %s" % ( - key_can_be_none, value_can_be_none) + class PseudoRTyper: + cache_dummy_values = {} + dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, + DictKey(None, annmodel.SomeString(key_can_be_none)), + DictValue(None, annmodel.SomeString(value_can_be_none))) + dictrepr.setup() + print dictrepr.lowleveltype + for key, value in dictrepr.DICTENTRY._adtmeths.items(): + print ' %s = %s' % (key, value) + l_dict = rdict.ll_newdict(dictrepr.DICT) + referencetable = [None] * 400 + referencelength = 0 + values = not_really_random() + keytable = [string_repr.convert_const("foo%d" % n) + for n in range(len(referencetable))] - class PseudoRTyper: - cache_dummy_values = {} - dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, - DictKey(None, annmodel.SomeString(key_can_be_none)), - DictValue(None, annmodel.SomeString(value_can_be_none))) - dictrepr.setup() - print dictrepr.lowleveltype - for key, value in dictrepr.DICTENTRY._adtmeths.items(): - print ' %s = %s' % (key, value) - l_dict = rdict.ll_newdict(dictrepr.DICT) - referencetable = [None] * 400 - referencelength = 0 - values = not_really_random() - keytable = [string_repr.convert_const("foo%d" % n) - for n in range(len(referencetable))] + def complete_check(): + for n, refvalue in zip(range(len(referencetable)), referencetable): + try: + gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) + except KeyError: + assert refvalue is None + else: + assert gotvalue == refvalue - def complete_check(): - for n, refvalue in zip(range(len(referencetable)), referencetable): - try: - gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) - except KeyError: - assert refvalue is None - else: - assert gotvalue == refvalue + for x in not_really_random(): + n = int(x*100.0) # 0 <= x < 400 + op = repr(x)[-1] + if op <= '2' and referencetable[n] is not None: + rdict.ll_dict_delitem(l_dict, keytable[n]) + referencetable[n] = None + referencelength -= 1 + elif op <= '6': + ll_value = string_repr.convert_const(str(values.next())) + rdict.ll_dict_setitem(l_dict, keytable[n], ll_value) + if referencetable[n] is None: + referencelength += 1 + referencetable[n] = ll_value + else: + try: + gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) + except KeyError: + assert referencetable[n] is None + else: + assert gotvalue == referencetable[n] + if 1.38 <= x <= 1.39: + complete_check() + print 'current dict length:', referencelength + assert l_dict.num_items == referencelength + complete_check() - for x in not_really_random(): - n = int(x*100.0) # 0 <= x < 400 - op = repr(x)[-1] - if op <= '2' and referencetable[n] is not None: - rdict.ll_dict_delitem(l_dict, keytable[n]) - referencetable[n] = None - referencelength -= 1 - elif op <= '6': - ll_value = string_repr.convert_const(str(values.next())) - rdict.ll_dict_setitem(l_dict, keytable[n], ll_value) - if referencetable[n] is None: - referencelength += 1 - referencetable[n] = ll_value - else: - try: - gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) - except KeyError: - assert referencetable[n] is None - else: - assert gotvalue == referencetable[n] - if 1.38 <= x <= 1.39: - complete_check() - print 'current dict length:', referencelength - assert l_dict.num_items == referencelength - complete_check() - From pypy.commits at gmail.com Sat Feb 27 10:52:57 2016 From: pypy.commits at gmail.com (arigo) Date: Sat, 27 Feb 2016 07:52:57 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: Add an XXX about PyDict_GetItem() and IntMutableCells Message-ID: <56d1c659.c1b3c20a.186d4.ffffe1ee@mx.google.com> Author: Armin Rigo Branch: cpyext-ext Changeset: r82590:e2c600e30aa7 Date: 2016-02-27 16:52 +0100 http://bitbucket.org/pypy/pypy/changeset/e2c600e30aa7/ Log: Add an XXX about PyDict_GetItem() and IntMutableCells diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -23,6 +23,7 @@ # NOTE: this works so far because all our dict strategies store # *values* as full objects, which stay alive as long as the dict is # alive and not modified. So we can return a borrowed ref. + # XXX this is wrong with IntMutableCell. Hope it works... return w_res @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) @@ -62,6 +63,7 @@ # NOTE: this works so far because all our dict strategies store # *values* as full objects, which stay alive as long as the dict is # alive and not modified. So we can return a borrowed ref. + # XXX this is wrong with IntMutableCell. Hope it works... return w_res @cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=-1) From pypy.commits at gmail.com Sat Feb 27 13:16:36 2016 From: pypy.commits at gmail.com (plan_rich) Date: Sat, 27 Feb 2016 10:16:36 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: added new module gcstress that compiles a very minimal language in a zrpy test and feed it with input from hypothesis (work in progress) Message-ID: <56d1e804.e83cc20a.2a8a.0e9d@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82591:a2ccecb333f0 Date: 2016-02-27 19:15 +0100 http://bitbucket.org/pypy/pypy/changeset/a2ccecb333f0/ Log: added new module gcstress that compiles a very minimal language in a zrpy test and feed it with input from hypothesis (work in progress) diff --git a/rpython/jit/backend/llsupport/gcstress/__init__.py b/rpython/jit/backend/llsupport/gcstress/__init__.py new file mode 100644 diff --git a/rpython/jit/backend/llsupport/gcstress/code.py b/rpython/jit/backend/llsupport/gcstress/code.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/gcstress/code.py @@ -0,0 +1,160 @@ + +import struct + +class ByteCode(object): + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + +_c = 0 + +LIST_TYP = 'l' +INT_TYP = 'i' +OBJ_TYP = 'o' +STR_TYP = 's' +VAL_TYP = 'v' # either one of the earlier + +def unique_code(): + global _c + v = _c + _c = v + 1 + return v + +class Context(object): + def __init__(self): + self.consts = {} + self.const_idx = 0 + self.bytecode = [] + + def append_byte(self, byte): + self.bytecode.append(('b', byte)) + + def get_byte(self, i): + typ, byte = self.bytecode[i] + assert typ == 'b' + return byte + + def get_short(self, i): + typ, int = self.bytecode[i] + assert typ == 'h' + return int + + def append_short(self, byte): + self.bytecode.append(('h', byte)) + + def append_int(self, byte): + self.bytecode.append(('i', byte)) + + def const_str(self, str): + self.consts[self.const_idx] = str + self.append_short(self.const_idx) + self.const_idx += 1 + + def to_string(self): + code = [] + for typ, nmr in self.bytecode: + code.append(struct.pack(typ, nmr)) + return ''.join(code) + +def requires_stack(*types): + def method(clazz): + clazz._stack_types = tuple(types) + return clazz + + return method + + at requires_stack() +class CondJump(ByteCode): + BYTE_CODE = unique_code() + + COND_EQ = 0 + COND_LT = 1 + COND_GT = 2 + COND_LE = 3 + COND_GE = 4 + + def __init__(self, cond): + self.cond = cond + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + ctx.append_byte(self.cond) + + at requires_stack() +class Jump(ByteCode): + BYTE_CODE = unique_code() + def __init__(self): + pass + + at requires_stack() +class LoadStr(ByteCode): + BYTE_CODE = unique_code() + def __init__(self, string): + self.string = string + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + ctx.const_str(self.string) + + at requires_stack(STR_TYP, STR_TYP) +class AddStr(ByteCode): + BYTE_CODE = unique_code() + def __init__(self): + pass + + at requires_stack(LIST_TYP, LIST_TYP) +class AddList(ByteCode): + BYTE_CODE = unique_code() + def __init__(self): + pass + + at requires_stack() +class CreateList(ByteCode): + BYTE_CODE = unique_code() + def __init__(self, size=8): + self.size = size + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + ctx.append_short(self.size) + + at requires_stack() +class PutInt(ByteCode): + BYTE_CODE = unique_code() + def __init__(self, value): + self.integral = value + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + ctx.append_short(self.integral) + + at requires_stack(LIST_TYP, INT_TYP, VAL_TYP) +class InsertList(ByteCode): + BYTE_CODE = unique_code() + def __init__(self, index): + self.index = index + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + ctx.append_int(self.index) + + at requires_stack(LIST_TYP, INT_TYP) +class DelList(ByteCode): + BYTE_CODE = unique_code() + def __init__(self, index): + self.index = index + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + ctx.append_int(self.index) + + at requires_stack(LIST_TYP, INT_TYP, VAL_TYP) +class AppendList(ByteCode): + BYTE_CODE = unique_code() + def __init__(self): + pass + + at requires_stack(LIST_TYP) +class LenList(ByteCode): + BYTE_CODE = unique_code() + def __init__(self): + self.required_stack('l') + + at requires_stack(INT_TYP, INT_TYP) +class CompareInt(ByteCode): + BYTE_CODE = unique_code() + def __init__(self): + pass diff --git a/rpython/jit/backend/llsupport/gcstress/interp.py b/rpython/jit/backend/llsupport/gcstress/interp.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/gcstress/interp.py @@ -0,0 +1,23 @@ +class W_Root(object): + pass + +class W_ListObject(W_Root): + def __init__(self): + self.items = [] + +def entry_point(argv): + pass + #bytecode = argv[0] + #pc = 0 + #end = len(bytecode) + #stack = Stack(512) + #while i < end: + # opcode = ord(bytecode[i]) + # if opcode == 0x0: + # stack.push(space.new_list()) + # elif opcode == 0x1: + # w_elem = stack.pop() + # w_list = stack.pick(0) + # space.list_append(w_list, w_elem) + # i += 1 + #return 0 diff --git a/rpython/jit/backend/llsupport/gcstress/stack.py b/rpython/jit/backend/llsupport/gcstress/stack.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/gcstress/stack.py @@ -0,0 +1,55 @@ +from rpython.rlib.jit import JitDriver, hint, dont_look_inside, promote + +class Stack(object): + _virtualizable_ = ['stackpos', 'stack[*]'] + + def __init__(self, size): + self = hint(self, access_directly=True, fresh_virtualizable=True) + self.stack = [0] * size + self.stackpos = 0 # always store a known-nonneg integer here + + def append(self, elem): + self.stack[self.stackpos] = elem + self.stackpos += 1 + + def pop(self): + stackpos = self.stackpos - 1 + if stackpos < 0: + raise IndexError + self.stackpos = stackpos # always store a known-nonneg integer here + return self.stack[stackpos] + + def pick(self, i): + n = self.stackpos - i - 1 + assert n >= 0 + self.append(self.stack[n]) + + def put(self, i): + elem = self.pop() + n = self.stackpos - i - 1 + assert n >= 0 + self.stack[n] = elem + + @dont_look_inside + def roll(self, r): + if r < -1: + i = self.stackpos + r + if i < 0: + raise IndexError + n = self.stackpos - 1 + assert n >= 0 + elem = self.stack[n] + for j in range(self.stackpos - 2, i - 1, -1): + assert j >= 0 + self.stack[j + 1] = self.stack[j] + self.stack[i] = elem + elif r > 1: + i = self.stackpos - r + if i < 0: + raise IndexError + elem = self.stack[i] + for j in range(i, self.stackpos - 1): + self.stack[j] = self.stack[j + 1] + n = self.stackpos - 1 + assert n >= 0 + self.stack[n] = elem diff --git a/rpython/jit/backend/llsupport/gcstress/test/__init__.py b/rpython/jit/backend/llsupport/gcstress/test/__init__.py new file mode 100644 diff --git a/rpython/jit/backend/llsupport/gcstress/test/test_interp.py b/rpython/jit/backend/llsupport/gcstress/test/test_interp.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/gcstress/test/test_interp.py @@ -0,0 +1,22 @@ + +from rpython.jit.backend.llsupport.gcstress import code + +class TestByteCode(object): + def test_load_str(self): + c = code.Context() + code.LoadStr("hello world").encode(c) + assert c.consts[0] == "hello world" + assert c.get_byte(0) == code.LoadStr.BYTE_CODE + assert c.get_short(1) == 0 + + def test_str_add(self): + c = code.Context() + code.LoadStr("hello").encode(c) + code.LoadStr("world").encode(c) + code.AddStr().encode(c) + assert len(c.consts) == 2 + assert c.get_byte(4) == code.AddStr.BYTE_CODE + assert c.get_short(3) == 1 + +class TestInterp(object): + pass diff --git a/rpython/jit/backend/llsupport/gcstress/test/zrpy_gc_hypo_test.py b/rpython/jit/backend/llsupport/gcstress/test/zrpy_gc_hypo_test.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/gcstress/test/zrpy_gc_hypo_test.py @@ -0,0 +1,33 @@ +from rpython.jit.backend.detect_cpu import getcpuclass +from rpython.jit.tool.oparser import parse +from rpython.jit.metainterp.history import JitCellToken, NoStats +from rpython.jit.metainterp.history import BasicFinalDescr, BasicFailDescr +from rpython.jit.metainterp.gc import get_description +from rpython.jit.metainterp.optimize import SpeculativeError +from rpython.annotator.listdef import s_list_of_strings +from rpython.rtyper.lltypesystem import lltype, llmemory, rffi +from rpython.rtyper.rclass import getclassrepr, getinstancerepr +from rpython.translator.unsimplify import call_initial_function +from rpython.translator.translator import TranslationContext +from rpython.translator.c import genc +from rpython.jit.backend.llsupport.gcstress import interp + +class GCHypothesis(object): + def setup_class(self): + t = TranslationContext() + t.config.translation.gc = "incminimark" + t.config.translation.gcremovetypeptr = True + ann = t.buildannotator() + ann.build_types(interp.entry_point, [s_list_of_strings], main_entry_point=True) + rtyper = t.buildrtyper() + rtyper.specialize() + + cbuilder = genc.CStandaloneBuilder(t, f, t.config) + cbuilder.generate_source(defines=cbuilder.DEBUG_DEFINES) + cbuilder.compile() + + import pdb; pdb.set_trace() + + + def test_void(self): + pass diff --git a/rpython/jit/backend/zarch/test/test_rpy_gc.py b/rpython/jit/backend/zarch/test/test_rpy_gc.py new file mode 100644 diff --git a/rpython/jit/backend/zarch/test/test_zrpy_gc_hypo.py b/rpython/jit/backend/zarch/test/test_zrpy_gc_hypo.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/zarch/test/test_zrpy_gc_hypo.py @@ -0,0 +1,7 @@ +from rpython.jit.backend.llsupport.gcstress.test.zrpy_gc_hypo_test import GCHypothesis + + +class TestGCHypothesis(GCHypothesis): + # runs ../../llsupport/gcstress/test/zrpy_gc_hypo_test.py + gcrootfinder = "shadowstack" + gc = "incminimark" From pypy.commits at gmail.com Sat Feb 27 14:52:36 2016 From: pypy.commits at gmail.com (rlamy) Date: Sat, 27 Feb 2016 11:52:36 -0800 (PST) Subject: [pypy-commit] pypy default: Simplify tests by using a dict as the reference. Message-ID: <56d1fe84.4577c20a.7d582.2e44@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82592:56afe29ed2bf Date: 2016-02-27 16:35 +0100 http://bitbucket.org/pypy/pypy/changeset/56afe29ed2bf/ Log: Simplify tests by using a dict as the reference. diff --git a/rpython/rtyper/test/test_rdict.py b/rpython/rtyper/test/test_rdict.py --- a/rpython/rtyper/test/test_rdict.py +++ b/rpython/rtyper/test/test_rdict.py @@ -1,5 +1,8 @@ from rpython.translator.translator import TranslationContext +from rpython.annotator import model as annmodel +from rpython.annotator.dictdef import DictKey, DictValue from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.rtyper.lltypesystem.rstr import string_repr from rpython.rtyper import rint from rpython.rtyper.lltypesystem import rdict, rstr from rpython.rtyper.test.tool import BaseRtypingTest @@ -1182,113 +1185,88 @@ count_frees += 1 assert count_frees >= 3 +N_KEYS = 400 def test_stress(): - from rpython.annotator.dictdef import DictKey, DictValue - from rpython.annotator import model as annmodel dictrepr = rdict.DictRepr(None, rint.signed_repr, rint.signed_repr, DictKey(None, annmodel.SomeInteger()), DictValue(None, annmodel.SomeInteger())) dictrepr.setup() l_dict = rdict.ll_newdict(dictrepr.DICT) - referencetable = [None] * 400 - referencelength = 0 + reference = {} value = 0 + def check_value(n): + try: + gotvalue = rdict.ll_dict_getitem(l_dict, n) + except KeyError: + n not in reference + else: + assert gotvalue == reference[n] + def complete_check(): - for n, refvalue in zip(range(len(referencetable)), referencetable): - try: - gotvalue = rdict.ll_dict_getitem(l_dict, n) - except KeyError: - assert refvalue is None - else: - assert gotvalue == refvalue + for n in range(N_KEYS): + check_value(n) for x in not_really_random(): n = int(x*100.0) # 0 <= x < 400 op = repr(x)[-1] - if op <= '2' and referencetable[n] is not None: + if op <= '2' and n in reference: rdict.ll_dict_delitem(l_dict, n) - referencetable[n] = None - referencelength -= 1 + del reference[n] elif op <= '6': rdict.ll_dict_setitem(l_dict, n, value) - if referencetable[n] is None: - referencelength += 1 - referencetable[n] = value + reference[n] = value value += 1 else: - try: - gotvalue = rdict.ll_dict_getitem(l_dict, n) - except KeyError: - assert referencetable[n] is None - else: - assert gotvalue == referencetable[n] + check_value(n) if 1.38 <= x <= 1.39: complete_check() - print 'current dict length:', referencelength - assert l_dict.num_items == referencelength + print 'current dict length:', len(reference) + assert l_dict.num_items == len(reference) complete_check() + @py.test.mark.parametrize('key_can_be_none', [True, False]) @py.test.mark.parametrize('value_can_be_none', [True, False]) def test_stress_2(key_can_be_none, value_can_be_none): - from rpython.rtyper.lltypesystem.rstr import string_repr - from rpython.annotator.dictdef import DictKey, DictValue - from rpython.annotator import model as annmodel - - print - print "Testing combination with can_be_None: keys %s, values %s" % ( - key_can_be_none, value_can_be_none) - class PseudoRTyper: cache_dummy_values = {} dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, DictKey(None, annmodel.SomeString(key_can_be_none)), DictValue(None, annmodel.SomeString(value_can_be_none))) dictrepr.setup() - print dictrepr.lowleveltype - for key, value in dictrepr.DICTENTRY._adtmeths.items(): - print ' %s = %s' % (key, value) l_dict = rdict.ll_newdict(dictrepr.DICT) - referencetable = [None] * 400 - referencelength = 0 + reference = {} values = not_really_random() - keytable = [string_repr.convert_const("foo%d" % n) - for n in range(len(referencetable))] + keytable = [string_repr.convert_const("foo%d" % n) for n in range(N_KEYS)] + + def check_value(n): + try: + gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) + except KeyError: + assert n not in reference + else: + assert gotvalue == reference[n] def complete_check(): - for n, refvalue in zip(range(len(referencetable)), referencetable): - try: - gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) - except KeyError: - assert refvalue is None - else: - assert gotvalue == refvalue + for n in range(N_KEYS): + check_value(n) for x in not_really_random(): n = int(x*100.0) # 0 <= x < 400 op = repr(x)[-1] - if op <= '2' and referencetable[n] is not None: + if op <= '2' and n in reference: rdict.ll_dict_delitem(l_dict, keytable[n]) - referencetable[n] = None - referencelength -= 1 + del reference[n] elif op <= '6': ll_value = string_repr.convert_const(str(values.next())) rdict.ll_dict_setitem(l_dict, keytable[n], ll_value) - if referencetable[n] is None: - referencelength += 1 - referencetable[n] = ll_value + reference[n] = ll_value else: - try: - gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) - except KeyError: - assert referencetable[n] is None - else: - assert gotvalue == referencetable[n] + check_value(n) if 1.38 <= x <= 1.39: complete_check() - print 'current dict length:', referencelength - assert l_dict.num_items == referencelength + print 'current dict length:', len(reference) + assert l_dict.num_items == len(reference) complete_check() - From pypy.commits at gmail.com Sat Feb 27 23:18:50 2016 From: pypy.commits at gmail.com (mattip) Date: Sat, 27 Feb 2016 20:18:50 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: remove docstring test, skip non-inheritance test until app-level test implemented Message-ID: <56d2752a.05e41c0a.304f6.4c0c@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82593:4b4ad3294135 Date: 2016-02-25 18:42 +0100 http://bitbucket.org/pypy/pypy/changeset/4b4ad3294135/ Log: remove docstring test, skip non-inheritance test until app-level test implemented diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -123,7 +123,7 @@ constant_names = """ Py_TPFLAGS_READY Py_TPFLAGS_READYING Py_TPFLAGS_HAVE_GETCHARBUFFER -METH_COEXIST METH_STATIC METH_CLASS +METH_COEXIST METH_STATIC METH_CLASS Py_TPFLAGS_BASETYPE METH_NOARGS METH_VARARGS METH_KEYWORDS METH_O Py_TPFLAGS_HEAPTYPE Py_TPFLAGS_HAVE_CLASS Py_LT Py_LE Py_EQ Py_NE Py_GT Py_GE Py_TPFLAGS_CHECKTYPES diff --git a/pypy/module/cpyext/test/foo.c b/pypy/module/cpyext/test/foo.c --- a/pypy/module/cpyext/test/foo.c +++ b/pypy/module/cpyext/test/foo.c @@ -629,126 +629,6 @@ (destructor)custom_dealloc, /*tp_dealloc*/ }; -static PyObject * cmp_docstring(PyObject * self, PyObject * args) -{ - PyObject *obj; - PyObject *str; - char *docstr, *attr_as_str; - static char *msg = "has no docstring"; - PyObject *tp_dict = footype.tp_dict; - PyObject *myobj; - static PyTypeObject *PyMemberDescr_TypePtr = NULL; /* a PyMemberDescr_Type* */ - static PyTypeObject *PyGetSetDescr_TypePtr = NULL; /* a PyGetSetDescr_Type* */ - static PyTypeObject *PyMethodDescr_TypePtr = NULL; /* a PyClassMethodDescr_Type* */ - - if (PyGetSetDescr_TypePtr == NULL) { - myobj = PyDict_GetItemString(tp_dict, "name"); - if (myobj != NULL) { - PyGetSetDescr_TypePtr = Py_TYPE(myobj); - } - } - if (PyMemberDescr_TypePtr == NULL) { - myobj = PyDict_GetItemString(tp_dict, "int_member"); - if (myobj != NULL) { - PyMemberDescr_TypePtr = Py_TYPE(myobj); - } - } - if (PyMethodDescr_TypePtr == NULL) { - myobj = PyDict_GetItemString(tp_dict, "classmeth"); - if (myobj != NULL) { - PyMethodDescr_TypePtr = Py_TYPE(myobj); - } - } - if (!PyArg_ParseTuple(args, "OO!", &obj, &PyString_Type, &str)) { - return NULL; - } - if (PyMethodDescr_TypePtr == PyMemberDescr_TypePtr || - PyMethodDescr_TypePtr == PyGetSetDescr_TypePtr || - PyMemberDescr_TypePtr == PyGetSetDescr_TypePtr) - { - PyErr_Format(PyExc_RuntimeError, - "at least two of the 'Py{Method,Member,GetSet}Descr_Type's are the same\n" - "(in cmp_docstring %s %d)", __FILE__, __LINE__); - return NULL; - } - docstr = PyString_AS_STRING(str); -#define _TESTDOC1(typebase) (Py_TYPE(obj) == &Py##typebase##_Type) -#define _TESTDOC2(typebase) (Py_TYPE(obj) == Py##typebase##_TypePtr) -#define _CMPDOC(typebase, doc, name) do { \ - Py##typebase##Object *new = (Py##typebase##Object *)obj; \ - if (!(doc)) { \ - PyErr_Format(PyExc_RuntimeError, "%s method %s", name, msg); \ - return NULL; \ - } \ - else { \ - if (strcmp(doc, docstr) != 0) \ - { \ - PyErr_Format(PyExc_RuntimeError, \ - "%s method's docstring '%s' is not '%s'", \ - name, doc, docstr); \ - return NULL; \ - } \ - } \ - } while (0) - - if (_TESTDOC1(CFunction)) { - _CMPDOC(CFunction, new->m_ml->ml_doc, new->m_ml->ml_name); - } - else if (_TESTDOC1(Type)) { - PyTypeObject *new = (PyTypeObject *)obj; - if (!(new->tp_doc)) { - PyErr_Format(PyExc_RuntimeError, "Type '%s' %s", new->tp_name, msg); - return NULL; - } - else { - if (strcmp(new->tp_doc, docstr) != 0) - { - PyErr_Format(PyExc_RuntimeError, - "%s's docstring '%s' is not '%s'", - new->tp_name, new->tp_doc, docstr); - return NULL; - } - } - } - else if (_TESTDOC2(MemberDescr)) { - _CMPDOC(MemberDescr, new->d_member->doc, new->d_member->name); - } - else if (_TESTDOC2(GetSetDescr)) { - _CMPDOC(GetSetDescr, new->d_getset->doc, new->d_getset->name); - } - else if (_TESTDOC2(MethodDescr)) { - _CMPDOC(MethodDescr, new->d_method->ml_doc, new->d_method->ml_name); - } - else { - PyObject *doc_attr; - - doc_attr = PyObject_GetAttrString(obj, "__doc__"); - if (doc_attr == NULL || doc_attr == Py_None) { - PyErr_Format(PyExc_RuntimeError, "object %s", msg); - Py_XDECREF(doc_attr); - return NULL; - } - - attr_as_str = PyString_AS_STRING(doc_attr); - if (strcmp(attr_as_str, docstr) != 0) - { - PyErr_Format(PyExc_RuntimeError, - "objects's docstring '%s' is not '%s'", - attr_as_str, docstr); - Py_XDECREF(doc_attr); - return NULL; - } - Py_XDECREF(doc_attr); - Py_RETURN_NONE; - } - -#undef _TESTDOC1 -#undef _TESTDOC2 -#undef _ADDDOC - - Py_RETURN_NONE; -} - static PyObject *size_of_instances(PyObject *self, PyObject *t) { return PyInt_FromLong(((PyTypeObject *)t)->tp_basicsize); @@ -759,7 +639,6 @@ static PyMethodDef foo_functions[] = { {"new", (PyCFunction)foo_new, METH_NOARGS, NULL}, {"newCustom", (PyCFunction)newCustom, METH_NOARGS, NULL}, - {"cmp_docstring", (PyCFunction)cmp_docstring, METH_VARARGS, NULL}, {"size_of_instances", (PyCFunction)size_of_instances, METH_O, NULL}, {NULL, NULL} /* Sentinel */ }; diff --git a/pypy/module/cpyext/test/test_typeobject.py b/pypy/module/cpyext/test/test_typeobject.py --- a/pypy/module/cpyext/test/test_typeobject.py +++ b/pypy/module/cpyext/test/test_typeobject.py @@ -55,13 +55,6 @@ raises(SystemError, "obj.broken_member = 42") assert module.fooType.broken_member.__doc__ is None assert module.fooType.object_member.__doc__ == "A Python object." - for m in dir(module.fooType): - obj = getattr(module.fooType, m) - docstring = obj.__doc__ - if not docstring: - raises(RuntimeError, module.cmp_docstring, obj, 'xxxrandomxxx') - else: - module.cmp_docstring(obj, docstring) assert str(type(module.fooType.int_member)) == "" def test_typeobject_object_member(self): @@ -190,13 +183,17 @@ module = self.import_module(name='foo') assert module.MetaType.__mro__ == (module.MetaType, type, object) assert type(module.fooType).__mro__ == (type, object) - # XXX FIX - must raise since fooType does not have flag Py_TPFLAGS_BASETYPE - raises(TypeError, module.MetaType, 'other', (module.fooType,), {}) - y = module.MetaType('other', (module.fooType,), {}) + y = module.MetaType('other', (module.MetaType,), {}) assert isinstance(y, module.MetaType) - x = y() + x = y('something', (type(y),), {}) del x, y + def test_metaclass_compatible2(self): + skip('type.__new__ does not check acceptable_as_base_class') + # XXX FIX - must raise since fooType (which is a base type) + # does not have flag Py_TPFLAGS_BASETYPE + module = self.import_module(name='foo') + raises(TypeError, module.MetaType, 'other', (module.fooType,), {}) def test_sre(self): import sys for m in ['_sre', 'sre_compile', 'sre_constants', 'sre_parse', 're']: @@ -803,6 +800,12 @@ class bar(module.fooType, module.UnicodeSubtype): pass except TypeError as e: - assert str(e) == 'instance layout conflicts in multiple inheritance' + import sys + if '__pypy__' in sys.builtin_module_names: + assert str(e) == 'instance layout conflicts in multiple inheritance' + + else: + assert str(e) == ('Error when calling the metaclass bases\n' + ' multiple bases have instance lay-out conflict') else: raise AssertionError("did not get TypeError!") diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -17,7 +17,7 @@ generic_cpy_call, Py_TPFLAGS_READY, Py_TPFLAGS_READYING, Py_TPFLAGS_HEAPTYPE, METH_VARARGS, METH_KEYWORDS, CANNOT_FAIL, Py_TPFLAGS_HAVE_GETCHARBUFFER, build_type_checkers, StaticObjectBuilder, - PyObjectFields) + PyObjectFields, Py_TPFLAGS_BASETYPE) from pypy.module.cpyext.methodobject import ( PyDescr_NewWrapper, PyCFunction_NewEx, PyCFunction_typedef) from pypy.module.cpyext.modsupport import convert_method_defs From pypy.commits at gmail.com Sat Feb 27 23:18:52 2016 From: pypy.commits at gmail.com (mattip) Date: Sat, 27 Feb 2016 20:18:52 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: add a failing test Message-ID: <56d2752c.28acc20a.20e03.ffffa8c4@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82594:fc01e33f1288 Date: 2016-02-27 20:56 -0500 http://bitbucket.org/pypy/pypy/changeset/fc01e33f1288/ Log: add a failing test diff --git a/pypy/module/cpyext/test/test_pyerrors.py b/pypy/module/cpyext/test/test_pyerrors.py --- a/pypy/module/cpyext/test/test_pyerrors.py +++ b/pypy/module/cpyext/test/test_pyerrors.py @@ -102,7 +102,15 @@ assert api.PyExceptionInstance_Class(instance) is space.w_ValueError def test_interrupt_occurred(self, space, api): - assert False # XXX test PyOS_InterruptOccurred + assert not api.PyOS_InterruptOccurred() + import signal, os + recieved = [] + def default_int_handler(*args): + recieved.append('ok') + signal.signal(signal.SIGINT, default_int_handler) + os.kill(os.getpid(), signal.SIGINT) + assert recieved == ['ok'] + assert api.PyOS_InterruptOccurred() class AppTestFetch(AppTestCpythonExtensionBase): From pypy.commits at gmail.com Sat Feb 27 23:18:56 2016 From: pypy.commits at gmail.com (mattip) Date: Sat, 27 Feb 2016 20:18:56 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: add NotImplemented getsiice, setslice Message-ID: <56d27530.05e41c0a.304f6.4c0e@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82595:67b8df2d7d8d Date: 2016-02-27 20:58 -0500 http://bitbucket.org/pypy/pypy/changeset/67b8df2d7d8d/ Log: add NotImplemented getsiice, setslice diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py --- a/pypy/module/cpyext/sequence.py +++ b/pypy/module/cpyext/sequence.py @@ -254,6 +254,17 @@ storage = self.unerase(w_list.lstorage) return storage._elems + def getslice(self, w_list, start, stop, step, length): + #storage = self.unerase(w_list.lstorage) + raise oefmt(w_list.space.w_NotImplementedError, + "settting a slice of a PySequence_Fast is not supported") + + def setslice(self, w_list, start, stop, step, length): + #storage = self.unerase(w_list.lstorage) + raise oefmt(w_list.space.w_NotImplementedError, + "settting a slice of a PySequence_Fast is not supported") + + PyObjectList = lltype.Ptr(lltype.Array(PyObject, hints={'nolength': True})) class CPyListStorage(object): From pypy.commits at gmail.com Sat Feb 27 23:25:37 2016 From: pypy.commits at gmail.com (mattip) Date: Sat, 27 Feb 2016 20:25:37 -0800 (PST) Subject: [pypy-commit] buildbot default: add test runner, path-to-test to bottom of failure long representation Message-ID: <56d276c1.6507c20a.db17a.ffff91f1@mx.google.com> Author: mattip Branch: Changeset: r990:535a6e83f514 Date: 2016-02-26 07:43 +0100 http://bitbucket.org/pypy/buildbot/changeset/535a6e83f514/ Log: add test runner, path-to-test to bottom of failure long representation diff --git a/bot2/pypybuildbot/summary.py b/bot2/pypybuildbot/summary.py --- a/bot2/pypybuildbot/summary.py +++ b/bot2/pypybuildbot/summary.py @@ -602,6 +602,9 @@ return html.div([html.h2(self.getTitle(request)), html.pre(longrepr), + html.pre('builder: ' + builderName, + style='border-top:1px solid;'), + html.pre('test: ' + self.getTitle(request).replace('.','/')), py.xml.raw("" % outcome_set_cache.stats()) ]).unicode() diff --git a/bot2/pypybuildbot/test/log.txt b/bot2/pypybuildbot/test/log.txt new file mode 100644 --- /dev/null +++ b/bot2/pypybuildbot/test/log.txt @@ -0,0 +1,626 @@ +. pypy/module/pypyjit/test_pypy_c/test_shift.py::TestShift::()::test_revert_shift_allcases::[94] +. pypy/module/pypyjit/test_pypy_c/test_shift.py::TestShift::()::test_revert_shift_allcases::[95] +F pypy/module/pypyjit/test_pypy_c/test_string.py::TestString::()::test_lookup_default_encoding + self = + + def test_lookup_default_encoding(self): + def main(n): + import string + i = 0 + letters = string.letters + uletters = unicode(string.letters) + while i < n: + i += letters[i % len(letters)] == uletters[i % len(letters)] + return i + + log = self.run(main, [300], import_site=True) + assert log.result == 300 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i14 = int_lt(i6, i9) + guard_true(i14, descr=...) + guard_not_invalidated(descr=...) + i16 = int_eq(i6, %d) + guard_false(i16, descr=...) + i15 = int_mod(i6, i10) + i17 = int_rshift(i15, %d) + i18 = int_and(i10, i17) + i19 = int_add(i15, i18) + i21 = int_lt(i19, 0) + guard_false(i21, descr=...) + i22 = int_ge(i19, i10) + guard_false(i22, descr=...) + i23 = strgetitem(p11, i19) + i24 = int_ge(i19, i12) + guard_false(i24, descr=...) + i25 = unicodegetitem(p13, i19) + p27 = newstr(1) + strsetitem(p27, 0, i23) + p30 = call_r(ConstClass(ll_str2unicode__rpy_stringPtr), p27, descr=...) + guard_no_exception(descr=...) + i32 = call_i(ConstClass(_ll_2_str_eq_checknull_char__rpy_unicodePtr_UniChar), p30, i25, descr=...) + guard_true(i32, descr=...) + i34 = int_add(i6, 1) + --TICK-- + jump(..., descr=...) + > """ % (-sys.maxint-1, SHIFT)) + + pypy/module/pypyjit/test_pypy_c/test_string.py:53: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = [[i2 = getfield_gc_i(p0, descr=), guard_not_invalidated(descr=), i74 = int_eq(i68, -9223372036854775808), i75 = int_mod(i68, i42), i76 = int_rshift(i75, 63), ...] + matcher = + + def match(self, expected_src, **kwds): + ops = self.allops() + matcher = OpMatcher(ops) + > return matcher.match(expected_src, **kwds) + + pypy/module/pypyjit/test_pypy_c/model.py:186: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + expected_src = '\n i14 = int_lt(i6, i9)\n guard_true(i14, descr=...)\n guard_not_invalidated(descr=....lt(ticker0, 0)\n guard_false(ticker_cond0, descr=...)\n \n jump(..., descr=...)\n ' + ignore_ops = [] + + def match(self, expected_src, ignore_ops=[]): + def format(src, opindex=None): + if src is None: + return '' + text = str(py.code.Source(src).deindent().indent()) + lines = text.splitlines(True) + if opindex is not None and 0 <= opindex <= len(lines): + lines.insert(opindex, '\n\t===== HERE =====\n') + return ''.join(lines) + # + expected_src = self.preprocess_expected_src(expected_src) + expected_ops = self.parse_ops(expected_src) + try: + > self.match_loop(expected_ops, ignore_ops) + + pypy/module/pypyjit/test_pypy_c/model.py:527: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + expected_ops = [('int_lt', 'i14', ['i6', 'i9'], None, True), ('guard_true', None, ['i14'], '...', True), ('guard_not_invalidated', No...775808'], None, True), ('guard_false', None, ['i16'], '...', True), ('int_mod', 'i15', ['i6', 'i10'], None, True), ...] + ignore_ops = [] + + def match_loop(self, expected_ops, ignore_ops): + """ + A note about partial matching: the '...' operator is non-greedy, + i.e. it matches all the operations until it finds one that matches + what is after the '...'. The '{{{' and '}}}' operators mark a + group of lines that can match in any order. + """ + iter_exp_ops = iter(expected_ops) + iter_ops = RevertableIterator(self.ops) + for exp_op in iter_exp_ops: + try: + if exp_op == '...': + # loop until we find an operation which matches + try: + exp_op = iter_exp_ops.next() + except StopIteration: + # the ... is the last line in the expected_ops, so we just + # return because it matches everything until the end + return + op = self.match_until(exp_op, iter_ops) + elif exp_op == '{{{': + self.match_any_order(iter_exp_ops, iter_ops, ignore_ops) + continue + else: + op = self._next_op(iter_ops, ignore_ops=ignore_ops) + try: + > self.match_op(op, exp_op) + + pypy/module/pypyjit/test_pypy_c/model.py:503: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + op = i75 = int_mod(i68, i42), .2 = ('guard_false', None, ['i16'], '...', True) + + def match_op(self, op, (exp_opname, exp_res, exp_args, exp_descr, _)): + if exp_opname == '--end--': + self._assert(op == '--end--', 'got more ops than expected') + return + self._assert(op != '--end--', 'got less ops than expected') + > self._assert(op.name == exp_opname, "operation mismatch") + + pypy/module/pypyjit/test_pypy_c/model.py:412: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + cond = False, message = 'operation mismatch' + + def _assert(self, cond, message): + if not cond: + > raise InvalidMatch(message, frame=sys._getframe(1)) + E InvalidMatch: operation mismatch + + pypy/module/pypyjit/test_pypy_c/model.py:405: InvalidMatch + ---------- Captured stdout ---------- + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Loops don't match + ================= + loop id = None + ('operation mismatch',) + assert 'int_mod' == 'guard_false' + - int_mod + + guard_false + + Ignore ops: [] + Got: + i73 = int_lt(i68, i37) + guard_true(i73, descr=) + guard_not_invalidated(descr=) + i74 = int_eq(i68, -9223372036854775808) + + ===== HERE ===== + i75 = int_mod(i68, i42) + i76 = int_rshift(i75, 63) + i77 = int_and(i42, i76) + i78 = int_add(i75, i77) + i79 = int_lt(i78, 0) + guard_false(i79, descr=) + i80 = int_ge(i78, i42) + guard_false(i80, descr=) + i81 = strgetitem(p41, i78) + i82 = int_ge(i78, i57) + guard_false(i82, descr=) + i83 = unicodegetitem(p56, i78) + p85 = newstr(1) + strsetitem(p85, 0, i81) + p86 = call_r(ConstClass(ll_str2unicode__rpy_stringPtr), p85, descr=) + guard_no_exception(descr=) + i88 = call_i(ConstClass(_ll_2_str_eq_checknull_char__rpy_unicodePtr_UniChar), p86, i83, descr=) + guard_true(i88, descr=) + i89 = int_add(i68, 1) + i90 = getfield_raw_i(139918906561408, descr=) + i91 = int_lt(i90, 0) + guard_false(i91, descr=) + jump(p0, p1, p6, p7, p10, p12, i89, p16, p18, i37, i42, p41, i57, p56, descr=TargetToken(139918786249504)) + + Expected: + + i14 = int_lt(i6, i9) + guard_true(i14, descr=...) + guard_not_invalidated(descr=...) + i16 = int_eq(i6, -9223372036854775808) + guard_false(i16, descr=...) + i15 = int_mod(i6, i10) + i17 = int_rshift(i15, 63) + i18 = int_and(i10, i17) + i19 = int_add(i15, i18) + i21 = int_lt(i19, 0) + guard_false(i21, descr=...) + i22 = int_ge(i19, i10) + guard_false(i22, descr=...) + i23 = strgetitem(p11, i19) + i24 = int_ge(i19, i12) + guard_false(i24, descr=...) + i25 = unicodegetitem(p13, i19) + p27 = newstr(1) + strsetitem(p27, 0, i23) + p30 = call_r(ConstClass(ll_str2unicode__rpy_stringPtr), p27, descr=...) + guard_no_exception(descr=...) + i32 = call_i(ConstClass(_ll_2_str_eq_checknull_char__rpy_unicodePtr_UniChar), p30, i25, descr=...) + guard_true(i32, descr=...) + i34 = int_add(i6, 1) + + guard_not_invalidated? + ticker0 = getfield_raw_i(#, descr=) + ticker_cond0 = int_lt(ticker0, 0) + guard_false(ticker_cond0, descr=...) + + jump(..., descr=...) +F pypy/module/pypyjit/test_pypy_c/test_string.py::TestString::()::test_long + self = + + def test_long(self): + def main(n): + import string + i = 1 + while i < n: + i += int(long(string.digits[i % len(string.digits)], 16)) + return i + + log = self.run(main, [1100], import_site=True) + assert log.result == main(1100) + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i11 = int_lt(i6, i7) + guard_true(i11, descr=...) + guard_not_invalidated(descr=...) + i13 = int_eq(i6, %d) # value provided below + guard_false(i13, descr=...) + i15 = int_mod(i6, 10) + i17 = int_rshift(i15, %d) # value provided below + i18 = int_and(10, i17) + i19 = int_add(i15, i18) + i21 = int_lt(i19, 0) + guard_false(i21, descr=...) + i22 = int_ge(i19, 10) + guard_false(i22, descr=...) + i23 = strgetitem(p10, i19) + p25 = newstr(1) + strsetitem(p25, 0, i23) + p93 = call_r(ConstClass(fromstr), p25, 16, descr=) + guard_no_exception(descr=...) + i95 = getfield_gc_i(p93, descr=) + i96 = int_gt(i95, #) + guard_false(i96, descr=...) + i94 = call_i(ConstClass(rbigint._toint_helper), p93, descr=) + guard_no_exception(descr=...) + i95 = int_add_ovf(i6, i94) + guard_no_overflow(descr=...) + --TICK-- + jump(..., descr=...) + > """ % (-sys.maxint-1, SHIFT)) + + pypy/module/pypyjit/test_pypy_c/test_string.py:94: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = [[i2 = getfield_gc_i(p0, descr=), guard_not_invalidated(descr=), i75 = int_eq(i69, -9223372036854775808), i76 = int_mod(i69, 10), i77 = int_rshift(i76, 63), ...] + matcher = + + def match(self, expected_src, **kwds): + ops = self.allops() + matcher = OpMatcher(ops) + > return matcher.match(expected_src, **kwds) + + pypy/module/pypyjit/test_pypy_c/model.py:186: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + expected_src = '\n i11 = int_lt(i6, i7)\n guard_true(i11, descr=...)\n guard_not_invalidated(descr=....lt(ticker0, 0)\n guard_false(ticker_cond0, descr=...)\n \n jump(..., descr=...)\n ' + ignore_ops = [] + + def match(self, expected_src, ignore_ops=[]): + def format(src, opindex=None): + if src is None: + return '' + text = str(py.code.Source(src).deindent().indent()) + lines = text.splitlines(True) + if opindex is not None and 0 <= opindex <= len(lines): + lines.insert(opindex, '\n\t===== HERE =====\n') + return ''.join(lines) + # + expected_src = self.preprocess_expected_src(expected_src) + expected_ops = self.parse_ops(expected_src) + try: + > self.match_loop(expected_ops, ignore_ops) + + pypy/module/pypyjit/test_pypy_c/model.py:527: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + expected_ops = [('int_lt', 'i11', ['i6', 'i7'], None, True), ('guard_true', None, ['i11'], '...', True), ('guard_not_invalidated', No...4775808'], None, True), ('guard_false', None, ['i13'], '...', True), ('int_mod', 'i15', ['i6', '10'], None, True), ...] + ignore_ops = [] + + def match_loop(self, expected_ops, ignore_ops): + """ + A note about partial matching: the '...' operator is non-greedy, + i.e. it matches all the operations until it finds one that matches + what is after the '...'. The '{{{' and '}}}' operators mark a + group of lines that can match in any order. + """ + iter_exp_ops = iter(expected_ops) + iter_ops = RevertableIterator(self.ops) + for exp_op in iter_exp_ops: + try: + if exp_op == '...': + # loop until we find an operation which matches + try: + exp_op = iter_exp_ops.next() + except StopIteration: + # the ... is the last line in the expected_ops, so we just + # return because it matches everything until the end + return + op = self.match_until(exp_op, iter_ops) + elif exp_op == '{{{': + self.match_any_order(iter_exp_ops, iter_ops, ignore_ops) + continue + else: + op = self._next_op(iter_ops, ignore_ops=ignore_ops) + try: + > self.match_op(op, exp_op) + + pypy/module/pypyjit/test_pypy_c/model.py:503: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + op = i76 = int_mod(i69, 10), .2 = ('guard_false', None, ['i13'], '...', True) + + def match_op(self, op, (exp_opname, exp_res, exp_args, exp_descr, _)): + if exp_opname == '--end--': + self._assert(op == '--end--', 'got more ops than expected') + return + self._assert(op != '--end--', 'got less ops than expected') + > self._assert(op.name == exp_opname, "operation mismatch") + + pypy/module/pypyjit/test_pypy_c/model.py:412: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + cond = False, message = 'operation mismatch' + + def _assert(self, cond, message): + if not cond: + > raise InvalidMatch(message, frame=sys._getframe(1)) + E InvalidMatch: operation mismatch + + pypy/module/pypyjit/test_pypy_c/model.py:405: InvalidMatch + ---------- Captured stdout ---------- + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Loops don't match + ================= + loop id = None + ('operation mismatch',) + assert 'int_mod' == 'guard_false' + - int_mod + + guard_false + + Ignore ops: [] + Got: + i74 = int_lt(i69, i35) + guard_true(i74, descr=) + guard_not_invalidated(descr=) + i75 = int_eq(i69, -9223372036854775808) + + ===== HERE ===== + i76 = int_mod(i69, 10) + i77 = int_rshift(i76, 63) + i78 = int_and(10, i77) + i79 = int_add(i76, i78) + i80 = int_lt(i79, 0) + guard_false(i80, descr=) + i81 = int_ge(i79, 10) + guard_false(i81, descr=) + i82 = strgetitem(ConstPtr(ptr55), i79) + p84 = newstr(1) + strsetitem(p84, 0, i82) + p85 = call_r(ConstClass(fromstr), p84, 16, descr=) + guard_no_exception(descr=) + i86 = getfield_gc_i(p85, descr=) + i87 = int_gt(i86, 2) + guard_false(i87, descr=) + i88 = call_i(ConstClass(rbigint._toint_helper), p85, descr=) + guard_no_exception(descr=) + i89 = int_add_ovf(i69, i88) + guard_no_overflow(descr=) + i90 = getfield_raw_i(139865325286272, descr=) + i91 = int_lt(i90, 0) + guard_false(i91, descr=) + jump(p0, p1, p6, p7, p10, p12, i89, i35, descr=TargetToken(139865204882848)) + + Expected: + + i11 = int_lt(i6, i7) + guard_true(i11, descr=...) + guard_not_invalidated(descr=...) + i13 = int_eq(i6, -9223372036854775808) # value provided below + guard_false(i13, descr=...) + i15 = int_mod(i6, 10) + i17 = int_rshift(i15, 63) # value provided below + i18 = int_and(10, i17) + i19 = int_add(i15, i18) + i21 = int_lt(i19, 0) + guard_false(i21, descr=...) + i22 = int_ge(i19, 10) + guard_false(i22, descr=...) + i23 = strgetitem(p10, i19) + p25 = newstr(1) + strsetitem(p25, 0, i23) + p93 = call_r(ConstClass(fromstr), p25, 16, descr=) + guard_no_exception(descr=...) + i95 = getfield_gc_i(p93, descr=) + i96 = int_gt(i95, #) + guard_false(i96, descr=...) + i94 = call_i(ConstClass(rbigint._toint_helper), p93, descr=) + guard_no_exception(descr=...) + i95 = int_add_ovf(i6, i94) + guard_no_overflow(descr=...) + + guard_not_invalidated? + ticker0 = getfield_raw_i(#, descr=) + ticker_cond0 = int_lt(ticker0, 0) + guard_false(ticker_cond0, descr=...) + + jump(..., descr=...) +. pypy/module/pypyjit/test_pypy_c/test_string.py::TestString::()::test_str_mod +F pypy/module/pypyjit/test_pypy_c/test_string.py::TestString::()::test_getattr_promote + self = + + def test_getattr_promote(self): + def main(n): + class A(object): + def meth_a(self): + return 1 + def meth_b(self): + return 2 + a = A() + + l = ['a', 'b'] + s = 0 + for i in range(n): + name = 'meth_' + l[i & 1] + meth = getattr(a, name) # ID: getattr + s += meth() + return s + + log = self.run(main, [1000]) + assert log.result == main(1000) + loops = log.loops_by_filename(self.filepath) + assert len(loops) == 1 + for loop in loops: + > assert loop.match_by_id('getattr',''' + guard_not_invalidated? + i32 = strlen(p31) + i34 = int_add(5, i32) + p35 = newstr(i34) + strsetitem(p35, 0, 109) + strsetitem(p35, 1, 101) + strsetitem(p35, 2, 116) + strsetitem(p35, 3, 104) + strsetitem(p35, 4, 95) + copystrcontent(p31, p35, 0, 5, i32) + i49 = call_i(ConstClass(_ll_2_str_eq_nonnull__rpy_stringPtr_rpy_stringPtr), p35, ConstPtr(ptr48), descr=) + guard_value(i49, 1, descr=...) + ''') + + pypy/module/pypyjit/test_pypy_c/test_string.py:169: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = [[i2 = getfield_gc_i(p0, descr=), i121 = strlen(p120), i123 = int_add(5, i121), p124 = newstr(i123), strsetitem(p124, 0, 109), strsetitem(p124, 1, 101), ...] + matcher = + + def match_by_id(self, id, expected_src, ignore_ops=[], **kwds): + ops = list(self.ops_by_id(id, **kwds)) + matcher = OpMatcher(ops, id) + > return matcher.match(expected_src, ignore_ops=ignore_ops) + + pypy/module/pypyjit/test_pypy_c/model.py:191: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + expected_src = '\n guard_not_invalidated?\n i32 = strlen(p31)\n i34 = int_add(5, i32)\n p...tr), p35, ConstPtr(ptr48), descr=)\n guard_value(i49, 1, descr=...)\n ' + ignore_ops = [] + + def match(self, expected_src, ignore_ops=[]): + def format(src, opindex=None): + if src is None: + return '' + text = str(py.code.Source(src).deindent().indent()) + lines = text.splitlines(True) + if opindex is not None and 0 <= opindex <= len(lines): + lines.insert(opindex, '\n\t===== HERE =====\n') + return ''.join(lines) + # + expected_src = self.preprocess_expected_src(expected_src) + expected_ops = self.parse_ops(expected_src) + try: + > self.match_loop(expected_ops, ignore_ops) + + pypy/module/pypyjit/test_pypy_c/model.py:527: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + expected_ops = [('guard_not_invalidated', None, [], '...', False), ('strlen', 'i32', ['p31'], None, True), ('int_add', 'i34', ['5', '...ue), ('strsetitem', None, ['p35', '0', '109'], None, True), ('strsetitem', None, ['p35', '1', '101'], None, True), ...] + ignore_ops = [] + + def match_loop(self, expected_ops, ignore_ops): + """ + A note about partial matching: the '...' operator is non-greedy, + i.e. it matches all the operations until it finds one that matches + what is after the '...'. The '{{{' and '}}}' operators mark a + group of lines that can match in any order. + """ + iter_exp_ops = iter(expected_ops) + iter_ops = RevertableIterator(self.ops) + for exp_op in iter_exp_ops: + try: + if exp_op == '...': + # loop until we find an operation which matches + try: + exp_op = iter_exp_ops.next() + except StopIteration: + # the ... is the last line in the expected_ops, so we just + # return because it matches everything until the end + return + op = self.match_until(exp_op, iter_ops) + elif exp_op == '{{{': + self.match_any_order(iter_exp_ops, iter_ops, ignore_ops) + continue + else: + op = self._next_op(iter_ops, ignore_ops=ignore_ops) + try: + > self.match_op(op, exp_op) + + pypy/module/pypyjit/test_pypy_c/model.py:503: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + op = p134 = call_r(ConstClass(W_TypeObject._pure_lookup_where_with_method_cache), ConstPtr(ptr80), p124, ConstPtr(ptr81), descr=) + .2 = ('call_i', 'i49', ['ConstClass(_ll_2_str_eq_nonnull__rpy_stringPtr_rpy_stringPtr)', 'p35', 'ConstPtr(ptr48)'], '', True) + + def match_op(self, op, (exp_opname, exp_res, exp_args, exp_descr, _)): + if exp_opname == '--end--': + self._assert(op == '--end--', 'got more ops than expected') + return + self._assert(op != '--end--', 'got less ops than expected') + > self._assert(op.name == exp_opname, "operation mismatch") + + pypy/module/pypyjit/test_pypy_c/model.py:412: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + + self = + cond = False, message = 'operation mismatch' + + def _assert(self, cond, message): + if not cond: + > raise InvalidMatch(message, frame=sys._getframe(1)) + E InvalidMatch: operation mismatch + + pypy/module/pypyjit/test_pypy_c/model.py:405: InvalidMatch + ---------- Captured stdout ---------- + @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ + Loops don't match + ================= + loop id = 'getattr' + ('operation mismatch',) + assert 'call_r' == 'call_i' + - call_r + ? ^ + + call_i + ? ^ + + Ignore ops: [] + Got: + guard_not_invalidated(descr=) + i121 = strlen(p120) + i123 = int_add(5, i121) + p124 = newstr(i123) + strsetitem(p124, 0, 109) + strsetitem(p124, 1, 101) + strsetitem(p124, 2, 116) + strsetitem(p124, 3, 104) + strsetitem(p124, 4, 95) + copystrcontent(p120, p124, 0, 5, i121) + + ===== HERE ===== + p134 = call_r(ConstClass(W_TypeObject._pure_lookup_where_with_method_cache), ConstPtr(ptr80), p124, ConstPtr(ptr81), descr=) + guard_no_exception(descr=) + p135 = getfield_gc_r(p134, descr=) + p136 = getfield_gc_r(p134, descr=) + guard_nonnull_class(p136, ConstClass(Function), descr=) + p137 = call_r(ConstClass(AbstractAttribute._find_map_attr_jit_pure), ConstPtr(ptr63), p124, 0, descr=) + guard_isnull(p137, descr=) + + Expected: + + guard_not_invalidated? + i32 = strlen(p31) + i34 = int_add(5, i32) + p35 = newstr(i34) + strsetitem(p35, 0, 109) + strsetitem(p35, 1, 101) + strsetitem(p35, 2, 116) + strsetitem(p35, 3, 104) + strsetitem(p35, 4, 95) + copystrcontent(p31, p35, 0, 5, i32) + i49 = call_i(ConstClass(_ll_2_str_eq_nonnull__rpy_stringPtr_rpy_stringPtr), p35, ConstPtr(ptr48), descr=) + guard_value(i49, 1, descr=...) +. pypy/module/pypyjit/test_pypy_c/test_string.py::TestString::()::test_remove_duplicate_method_calls diff --git a/bot2/pypybuildbot/test/test_summary.py b/bot2/pypybuildbot/test/test_summary.py --- a/bot2/pypybuildbot/test/test_summary.py +++ b/bot2/pypybuildbot/test/test_summary.py @@ -5,7 +5,7 @@ from buildbot.process import factory as process_factory from pypybuildbot import summary from StringIO import StringIO -import re, time +import re, time, os class TestOutcomes(object): @@ -855,3 +855,28 @@ assert '(03 Dec..05 Dec)' in out # pruning of builds older than 7 days assert '(29 Nov)' not in out + + def test_fail_body(self): + builder = status_builder.BuilderStatus('builder0', None, self.master, '') + with open(os.path.dirname(__file__) + '/log.txt') as fid: + log = fid.read() + add_builds(builder, [(60000, log)]) + #fail = list(rev_outcome_set.failed)[0] + + + req = FakeRequest([builder], { + 'builder': ['builder0'], + 'build': [0], + 'mod': [0], + }) + longrepr = summary.LongRepr() + outcome_set = summary.outcome_set_cache.get( + longrepr.getStatus(req), + ('builder0', 0)) + key = list(outcome_set.failed)[0] + req.args['mod'] = [key[0]] + req.args['testname'] = [key[1]] + out = longrepr.body(req) + print out + assert False + From pypy.commits at gmail.com Sun Feb 28 05:37:17 2016 From: pypy.commits at gmail.com (arigo) Date: Sun, 28 Feb 2016 02:37:17 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: update the values Message-ID: <56d2cddd.c711c30a.329b7.ffffe6af@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r710:8da4f4c42178 Date: 2016-02-28 11:37 +0100 http://bitbucket.org/pypy/pypy.org/changeset/8da4f4c42178/ Log: update the values diff --git a/don1.html b/don1.html --- a/don1.html +++ b/don1.html @@ -15,7 +15,7 @@ - $62926 of $105000 (59.9%) + $62936 of $105000 (59.9%)
    @@ -23,7 +23,7 @@
  • From pypy.commits at gmail.com Sun Feb 28 09:24:40 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 28 Feb 2016 06:24:40 -0800 (PST) Subject: [pypy-commit] pypy llvm-translation-backend: Kill GCPolicy's get_gc_fields_lltype() / get_gc_fields() methods. Message-ID: <56d30328.c3e01c0a.40ffb.ffffe76f@mx.google.com> Author: Manuel Jacob Branch: llvm-translation-backend Changeset: r82596:2ebe1593da04 Date: 2016-02-28 13:16 +0100 http://bitbucket.org/pypy/pypy/changeset/2ebe1593da04/ Log: Kill GCPolicy's get_gc_fields_lltype() / get_gc_fields() methods. diff --git a/rpython/translator/llvm/genllvm.py b/rpython/translator/llvm/genllvm.py --- a/rpython/translator/llvm/genllvm.py +++ b/rpython/translator/llvm/genllvm.py @@ -453,7 +453,8 @@ self.needs_gc_header = needs_gc_header fields = list(fields) if needs_gc_header: - fields = database.genllvm.gcpolicy.get_gc_fields() + fields + header_type = database.genllvm.gcpolicy.gctransformer.HDR + fields.insert(0, (database.get_type(header_type), '_gc_header')) elif all(t is LLVMVoid for t, f in fields): fields.append((LLVMSigned, '_fill')) self.fields = fields @@ -799,8 +800,8 @@ self.types[type] = ret = class_() ret.setup_from_lltype(self, type) if ret.needs_gc_header: - _llvm_needs_header[type] = database.genllvm.gcpolicy \ - .get_gc_fields_lltype() # hint for ll2ctypes + gctransformer = database.genllvm.gcpolicy.gctransformer + _llvm_needs_header[type] = [(gctransformer.HDR, '_gc_header')] ret.lltype = type return ret @@ -1645,12 +1646,6 @@ def get_setup_ptr(self): return None - def get_gc_fields_lltype(self): - return [(self.gctransformer.HDR, '_gc_header')] - - def get_gc_fields(self): - return [(database.get_type(self.gctransformer.HDR), '_gc_header')] - def finish(self): genllvm = self.genllvm while self.delayed_ptrs: From pypy.commits at gmail.com Sun Feb 28 09:24:44 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 28 Feb 2016 06:24:44 -0800 (PST) Subject: [pypy-commit] pypy llvm-translation-backend: hg merge default Message-ID: <56d3032c.c96cc20a.f363c.1f33@mx.google.com> Author: Manuel Jacob Branch: llvm-translation-backend Changeset: r82598:5efe50af086f Date: 2016-02-28 13:51 +0100 http://bitbucket.org/pypy/pypy/changeset/5efe50af086f/ Log: hg merge default diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -170,12 +170,8 @@ cmdline="--translationmodules", suggests=[("objspace.allworkingmodules", False)]), - BoolOption("usepycfiles", "Write and read pyc files when importing", - default=True), - BoolOption("lonepycfiles", "Import pyc files with no matching py file", - default=False, - requires=[("objspace.usepycfiles", True)]), + default=False), StrOption("soabi", "Tag to differentiate extension modules built for different Python interpreters", diff --git a/pypy/doc/how-to-release.rst b/pypy/doc/how-to-release.rst --- a/pypy/doc/how-to-release.rst +++ b/pypy/doc/how-to-release.rst @@ -1,5 +1,20 @@ -Making a PyPy Release -===================== +The PyPy Release Process +======================== + +Release Policy +++++++++++++++ + +We try to create a stable release a few times a year. These are released on +a branch named like release-2.x or release-4.x, and each release is tagged, +for instance release-4.0.1. + +After release, inevitably there are bug fixes. It is the responsibility of +the commiter who fixes a bug to make sure this fix is on the release branch, +so that we can then create a tagged bug-fix release, which will hopefully +happen more often than stable releases. + +How to Create a PyPy Release +++++++++++++++++++++++++++++ Overview -------- diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst --- a/pypy/doc/whatsnew-head.rst +++ b/pypy/doc/whatsnew-head.rst @@ -170,4 +170,17 @@ When creating instances and adding attributes in several different orders depending on some condition, the JIT would create too much code. This is now -fixed. \ No newline at end of file +fixed. + +.. branch: cpyext-gc-support-2 + +Improve CPython C API support, which means lxml now runs unmodified +(after removing pypy hacks, pending pull request) + +.. branch: look-inside-tuple-hash + +Look inside tuple hash, improving mdp benchmark + +.. branch: vlen-resume + +Compress resume data, saving 10-20% of memory consumed by the JIT \ No newline at end of file diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py --- a/pypy/goal/targetpypystandalone.py +++ b/pypy/goal/targetpypystandalone.py @@ -277,7 +277,6 @@ if config.translation.sandbox: config.objspace.lonepycfiles = False - config.objspace.usepycfiles = False config.translating = True diff --git a/pypy/module/_vmprof/test/test__vmprof.py b/pypy/module/_vmprof/test/test__vmprof.py --- a/pypy/module/_vmprof/test/test__vmprof.py +++ b/pypy/module/_vmprof/test/test__vmprof.py @@ -5,14 +5,15 @@ class AppTestVMProf(object): def setup_class(cls): cls.space = gettestobjspace(usemodules=['_vmprof', 'struct']) - cls.tmpfile = udir.join('test__vmprof.1').open('wb') - cls.w_tmpfileno = cls.space.wrap(cls.tmpfile.fileno()) - cls.w_tmpfilename = cls.space.wrap(cls.tmpfile.name) - cls.tmpfile2 = udir.join('test__vmprof.2').open('wb') - cls.w_tmpfileno2 = cls.space.wrap(cls.tmpfile2.fileno()) - cls.w_tmpfilename2 = cls.space.wrap(cls.tmpfile2.name) + cls.w_tmpfilename = cls.space.wrap(str(udir.join('test__vmprof.1'))) + cls.w_tmpfilename2 = cls.space.wrap(str(udir.join('test__vmprof.2'))) def test_import_vmprof(self): + tmpfile = open(self.tmpfilename, 'wb') + tmpfileno = tmpfile.fileno() + tmpfile2 = open(self.tmpfilename2, 'wb') + tmpfileno2 = tmpfile2.fileno() + import struct, sys WORD = struct.calcsize('l') @@ -45,7 +46,7 @@ return count import _vmprof - _vmprof.enable(self.tmpfileno, 0.01) + _vmprof.enable(tmpfileno, 0.01) _vmprof.disable() s = open(self.tmpfilename, 'rb').read() no_of_codes = count(s) @@ -56,7 +57,7 @@ pass """ in d - _vmprof.enable(self.tmpfileno2, 0.01) + _vmprof.enable(tmpfileno2, 0.01) exec """def foo2(): pass diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -85,7 +85,7 @@ # The "imp" module does not respect this, and is allowed to find # lone .pyc files. # check the .pyc file - if space.config.objspace.usepycfiles and space.config.objspace.lonepycfiles: + if space.config.objspace.lonepycfiles: pycfile = filepart + ".pyc" if file_exists(pycfile): # existing .pyc file @@ -888,17 +888,11 @@ """ w = space.wrap - if space.config.objspace.usepycfiles: - src_stat = os.fstat(fd) - cpathname = pathname + 'c' - mtime = int(src_stat[stat.ST_MTIME]) - mode = src_stat[stat.ST_MODE] - stream = check_compiled_module(space, cpathname, mtime) - else: - cpathname = None - mtime = 0 - mode = 0 - stream = None + src_stat = os.fstat(fd) + cpathname = pathname + 'c' + mtime = int(src_stat[stat.ST_MTIME]) + mode = src_stat[stat.ST_MODE] + stream = check_compiled_module(space, cpathname, mtime) if stream: # existing and up-to-date .pyc file @@ -913,7 +907,7 @@ else: code_w = parse_source_module(space, pathname, source) - if space.config.objspace.usepycfiles and write_pyc: + if write_pyc: if not space.is_true(space.sys.get('dont_write_bytecode')): write_compiled_module(space, code_w, cpathname, mode, mtime) diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -98,6 +98,10 @@ 'a=5\nb=6\rc="""hello\r\nworld"""\r', mode='wb') p.join('mod.py').write( 'a=15\nb=16\rc="""foo\r\nbar"""\r', mode='wb') + setuppkg("test_bytecode", + a = '', + b = '', + c = '') # create compiled/x.py and a corresponding pyc file p = setuppkg("compiled", x = "x = 84") @@ -119,7 +123,7 @@ stream.try_to_find_file_descriptor()) finally: stream.close() - if space.config.objspace.usepycfiles: + if not space.config.translation.sandbox: # also create a lone .pyc file p.join('lone.pyc').write(p.join('x.pyc').read(mode='rb'), mode='wb') @@ -146,6 +150,8 @@ """) def _teardown(space, w_saved_modules): + p = udir.join('impsubdir') + p.remove() space.appexec([w_saved_modules], """ ((saved_path, saved_modules)): import sys @@ -646,11 +652,13 @@ # one in sys.path. import sys assert '_md5' not in sys.modules - import _md5 - assert hasattr(_md5, 'hello_world') - assert not hasattr(_md5, 'count') - assert '(built-in)' not in repr(_md5) - del sys.modules['_md5'] + try: + import _md5 + assert hasattr(_md5, 'hello_world') + assert not hasattr(_md5, 'digest_size') + assert '(built-in)' not in repr(_md5) + finally: + sys.modules.pop('_md5', None) def test_shadow_extension_2(self): if self.runappdirect: skip("hard to test: module is already imported") @@ -669,7 +677,7 @@ assert '(built-in)' in repr(_md5) finally: sys.path.insert(0, sys.path.pop()) - del sys.modules['_md5'] + sys.modules.pop('_md5', None) def test_invalid_pathname(self): import imp @@ -1342,15 +1350,56 @@ assert isinstance(importer, zipimport.zipimporter) -class AppTestNoPycFile(object): +class AppTestWriteBytecode(object): spaceconfig = { - "objspace.usepycfiles": False, - "objspace.lonepycfiles": False + "translation.sandbox": False } + def setup_class(cls): - usepycfiles = cls.spaceconfig['objspace.usepycfiles'] + cls.saved_modules = _setup(cls.space) + sandbox = cls.spaceconfig['translation.sandbox'] + cls.w_sandbox = cls.space.wrap(sandbox) + + def teardown_class(cls): + _teardown(cls.space, cls.saved_modules) + cls.space.appexec([], """ + (): + import sys + sys.dont_write_bytecode = False + """) + + def test_default(self): + import os.path + from test_bytecode import a + assert a.__file__.endswith('a.py') + assert os.path.exists(a.__file__ + 'c') == (not self.sandbox) + + def test_write_bytecode(self): + import os.path + import sys + sys.dont_write_bytecode = False + from test_bytecode import b + assert b.__file__.endswith('b.py') + assert os.path.exists(b.__file__ + 'c') + + def test_dont_write_bytecode(self): + import os.path + import sys + sys.dont_write_bytecode = True + from test_bytecode import c + assert c.__file__.endswith('c.py') + assert not os.path.exists(c.__file__ + 'c') + + +class AppTestWriteBytecodeSandbox(AppTestWriteBytecode): + spaceconfig = { + "translation.sandbox": True + } + + +class _AppTestLonePycFileBase(object): + def setup_class(cls): lonepycfiles = cls.spaceconfig['objspace.lonepycfiles'] - cls.w_usepycfiles = cls.space.wrap(usepycfiles) cls.w_lonepycfiles = cls.space.wrap(lonepycfiles) cls.saved_modules = _setup(cls.space) @@ -1359,10 +1408,7 @@ def test_import_possibly_from_pyc(self): from compiled import x - if self.usepycfiles: - assert x.__file__.endswith('x.pyc') - else: - assert x.__file__.endswith('x.py') + assert x.__file__.endswith('x.pyc') try: from compiled import lone except ImportError: @@ -1371,15 +1417,13 @@ assert self.lonepycfiles, "should not have found 'lone.pyc'" assert lone.__file__.endswith('lone.pyc') -class AppTestNoLonePycFile(AppTestNoPycFile): +class AppTestNoLonePycFile(_AppTestLonePycFileBase): spaceconfig = { - "objspace.usepycfiles": True, "objspace.lonepycfiles": False } -class AppTestLonePycFile(AppTestNoPycFile): +class AppTestLonePycFile(_AppTestLonePycFileBase): spaceconfig = { - "objspace.usepycfiles": True, "objspace.lonepycfiles": True } diff --git a/pypy/module/sys/__init__.py b/pypy/module/sys/__init__.py --- a/pypy/module/sys/__init__.py +++ b/pypy/module/sys/__init__.py @@ -77,7 +77,7 @@ 'meta_path' : 'space.wrap([])', 'path_hooks' : 'space.wrap([])', 'path_importer_cache' : 'space.wrap({})', - 'dont_write_bytecode' : 'space.w_False', + 'dont_write_bytecode' : 'space.wrap(space.config.translation.sandbox)', 'getdefaultencoding' : 'interp_encoding.getdefaultencoding', 'setdefaultencoding' : 'interp_encoding.setdefaultencoding', diff --git a/pypy/module/thread/test/test_thread.py b/pypy/module/thread/test/test_thread.py --- a/pypy/module/thread/test/test_thread.py +++ b/pypy/module/thread/test/test_thread.py @@ -239,14 +239,12 @@ if waiting: thread.interrupt_main() return - print 'tock...', x # <-force the GIL to be released, as - time.sleep(0.1) # time.sleep doesn't do non-translated + time.sleep(0.1) def busy_wait(): waiting.append(None) for x in range(50): - print 'tick...', x # <-force the GIL to be released, as - time.sleep(0.1) # time.sleep doesn't do non-translated + time.sleep(0.1) waiting.pop() # This is normally called by app_main.py diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py --- a/pypy/module/time/interp_time.py +++ b/pypy/module/time/interp_time.py @@ -4,7 +4,7 @@ from pypy.interpreter.gateway import unwrap_spec from rpython.rtyper.lltypesystem import lltype from rpython.rlib.rarithmetic import intmask -from rpython.rlib import rposix +from rpython.rlib import rposix, rtime from rpython.translator.tool.cbuild import ExternalCompilationInfo import os import sys @@ -316,13 +316,13 @@ if secs < 0: raise OperationError(space.w_IOError, space.wrap("Invalid argument: negative time in sleep")) - pytime.sleep(secs) + rtime.sleep(secs) else: from rpython.rlib import rwin32 from errno import EINTR def _simple_sleep(space, secs, interruptible): if secs == 0.0 or not interruptible: - pytime.sleep(secs) + rtime.sleep(secs) else: millisecs = int(secs * 1000) interrupt_event = space.fromcache(State).get_interrupt_event() @@ -331,7 +331,7 @@ if rc == rwin32.WAIT_OBJECT_0: # Yield to make sure real Python signal handler # called. - pytime.sleep(0.001) + rtime.sleep(0.001) raise wrap_oserror(space, OSError(EINTR, "sleep() interrupted")) @unwrap_spec(secs=float) diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -65,21 +65,8 @@ def delete(self, obj, name, index): pass + @jit.elidable def find_map_attr(self, name, index): - if jit.we_are_jitted(): - # hack for the jit: - # the _find_map_attr method is pure too, but its argument is never - # constant, because it is always a new tuple - return self._find_map_attr_jit_pure(name, index) - else: - return self._find_map_attr_indirection(name, index) - - @jit.elidable - def _find_map_attr_jit_pure(self, name, index): - return self._find_map_attr_indirection(name, index) - - @jit.dont_look_inside - def _find_map_attr_indirection(self, name, index): if (self.space.config.objspace.std.withmethodcache): return self._find_map_attr_cache(name, index) return self._find_map_attr(name, index) diff --git a/pypy/tool/test/test_tab.py b/pypy/tool/test/test_tab.py --- a/pypy/tool/test/test_tab.py +++ b/pypy/tool/test/test_tab.py @@ -6,6 +6,7 @@ from pypy.conftest import pypydir ROOT = os.path.abspath(os.path.join(pypydir, '..')) +RPYTHONDIR = os.path.join(ROOT, "rpython") EXCLUDE = {'/virt_test/lib/python2.7/site-packages/setuptools'} @@ -28,3 +29,27 @@ if not entry.startswith('.'): walk('%s/%s' % (reldir, entry)) walk('') + +def test_no_pypy_import_in_rpython(): + def walk(reldir): + print reldir + if reldir: + path = os.path.join(RPYTHONDIR, *reldir.split('/')) + else: + path = RPYTHONDIR + if os.path.isfile(path): + if not path.lower().endswith('.py'): + return + with file(path) as f: + for line in f: + if "import" not in line: + continue + assert "from pypy." not in line + assert "import pypy." not in line + elif os.path.isdir(path) and not os.path.islink(path): + for entry in os.listdir(path): + if not entry.startswith('.'): + walk('%s/%s' % (reldir, entry)) + + walk('') + diff --git a/rpython/jit/metainterp/test/test_jitdriver.py b/rpython/jit/metainterp/test/test_jitdriver.py --- a/rpython/jit/metainterp/test/test_jitdriver.py +++ b/rpython/jit/metainterp/test/test_jitdriver.py @@ -227,7 +227,7 @@ i += 1 self.meta_interp(f, [0]) - self.check_resops(enter_portal_frame=1, leave_portal_frame=1) + self.check_simple_loop(enter_portal_frame=1, leave_portal_frame=1) class TestLLtype(MultipleJitDriversTests, LLJitMixin): pass diff --git a/rpython/memory/gctransform/boehm.py b/rpython/memory/gctransform/boehm.py --- a/rpython/memory/gctransform/boehm.py +++ b/rpython/memory/gctransform/boehm.py @@ -156,9 +156,9 @@ resulttype = lltype.Signed) hop.genop('int_invert', [v_int], resultvar=hop.spaceop.result) - def gcheader_initdata(self, defnode): + def gcheader_initdata(self, obj): hdr = lltype.malloc(self.HDR, immortal=True) - hdr.hash = lltype.identityhash_nocache(defnode.obj._as_ptr()) + hdr.hash = lltype.identityhash_nocache(obj._as_ptr()) return hdr._obj diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py --- a/rpython/memory/gctransform/framework.py +++ b/rpython/memory/gctransform/framework.py @@ -1479,8 +1479,8 @@ resulttype=llmemory.Address) llops.genop('raw_memclear', [v_adr, v_totalsize]) - def gcheader_initdata(self, defnode): - o = lltype.top_container(defnode.obj) + def gcheader_initdata(self, obj): + o = lltype.top_container(obj) needs_hash = self.get_prebuilt_hash(o) is not None hdr = self.gc_header_for(o, needs_hash) return hdr._obj diff --git a/rpython/memory/gctransform/refcounting.py b/rpython/memory/gctransform/refcounting.py --- a/rpython/memory/gctransform/refcounting.py +++ b/rpython/memory/gctransform/refcounting.py @@ -286,8 +286,8 @@ hop.genop("direct_call", [self.identityhash_ptr, v_adr], resultvar=hop.spaceop.result) - def gcheader_initdata(self, defnode): - top = lltype.top_container(defnode.obj) + def gcheader_initdata(self, obj): + top = lltype.top_container(obj) return self.gcheaderbuilder.header_of_object(top)._obj def gct_zero_gc_pointers_inside(self, hop): diff --git a/rpython/rlib/rsocket.py b/rpython/rlib/rsocket.py --- a/rpython/rlib/rsocket.py +++ b/rpython/rlib/rsocket.py @@ -516,6 +516,10 @@ """RPython-level socket object. """ fd = _c.INVALID_SOCKET + family = 0 + type = 0 + proto = 0 + timeout = -1.0 def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fd=_c.INVALID_SOCKET): @@ -531,6 +535,11 @@ self.proto = proto self.timeout = defaults.timeout + @staticmethod + def empty_rsocket(): + rsocket = instantiate(RSocket) + return rsocket + @rgc.must_be_light_finalizer def __del__(self): fd = self.fd diff --git a/rpython/rlib/test/test_posix.py b/rpython/rlib/test/test_posix.py --- a/rpython/rlib/test/test_posix.py +++ b/rpython/rlib/test/test_posix.py @@ -1,4 +1,4 @@ -import py +import py.test from rpython.rtyper.test.tool import BaseRtypingTest from rpython.rtyper.annlowlevel import hlstr from rpython.tool.udir import udir @@ -58,7 +58,7 @@ assert res def test_times(self): - import py; py.test.skip("llinterp does not like tuple returns") + py.test.skip("llinterp does not like tuple returns") from rpython.rtyper.test.test_llinterp import interpret times = interpret(lambda: posix.times(), ()) assert isinstance(times, tuple) @@ -119,21 +119,21 @@ res = self.interpret(f,[fi,20]) assert self.ll_to_string(res) == text - if hasattr(os, 'chown'): - def test_chown(self): - f = open(path, "w") - f.write("xyz") - f.close() - def f(): - try: - posix.chown(path, os.getuid(), os.getgid()) - return 1 - except OSError: - return 2 + @py.test.mark.skipif("not hasattr(os, 'chown')") + def test_chown(self): + f = open(path, "w") + f.write("xyz") + f.close() + def f(): + try: + posix.chown(path, os.getuid(), os.getgid()) + return 1 + except OSError: + return 2 - assert self.interpret(f, []) == 1 - os.unlink(path) - assert self.interpret(f, []) == 2 + assert self.interpret(f, []) == 1 + os.unlink(path) + assert self.interpret(f, []) == 2 def test_close(self): def f(fi): @@ -144,70 +144,70 @@ res = self.interpret(f,[fi]) py.test.raises( OSError, os.fstat, fi) - if hasattr(os, 'ftruncate'): - def test_ftruncate(self): - def f(fi,len): - os.ftruncate(fi,len) - fi = os.open(path,os.O_RDWR,0777) - func = self.interpret(f,[fi,6]) - assert os.fstat(fi).st_size == 6 + @py.test.mark.skipif("not hasattr(os, 'ftruncate')") + def test_ftruncate(self): + def f(fi,len): + os.ftruncate(fi,len) + fi = os.open(path,os.O_RDWR,0777) + func = self.interpret(f,[fi,6]) + assert os.fstat(fi).st_size == 6 - if hasattr(os, 'getuid'): - def test_getuid(self): - def f(): - return os.getuid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'getuid')") + def test_getuid(self): + def f(): + return os.getuid() + assert self.interpret(f, []) == f() - if hasattr(os, 'getgid'): - def test_getgid(self): - def f(): - return os.getgid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'getgid')") + def test_getgid(self): + def f(): + return os.getgid() + assert self.interpret(f, []) == f() - if hasattr(os, 'setuid'): - def test_os_setuid(self): - def f(): - os.setuid(os.getuid()) - return os.getuid() - assert self.interpret(f, []) == f() + @py.test.mark.skipif("not hasattr(os, 'setuid')") + def test_os_setuid(self): + def f(): + os.setuid(os.getuid()) + return os.getuid() + assert self.interpret(f, []) == f() - if hasattr(os, 'sysconf'): - def test_os_sysconf(self): - def f(i): - return os.sysconf(i) - assert self.interpret(f, [13]) == f(13) + @py.test.mark.skipif("not hasattr(os, 'sysconf')") + def test_os_sysconf(self): + def f(i): + return os.sysconf(i) + assert self.interpret(f, [13]) == f(13) - if hasattr(os, 'confstr'): - def test_os_confstr(self): - def f(i): - try: - return os.confstr(i) - except OSError: - return "oooops!!" - some_value = os.confstr_names.values()[-1] - res = self.interpret(f, [some_value]) - assert hlstr(res) == f(some_value) - res = self.interpret(f, [94781413]) - assert hlstr(res) == "oooops!!" + @py.test.mark.skipif("not hasattr(os, 'confstr')") + def test_os_confstr(self): + def f(i): + try: + return os.confstr(i) + except OSError: + return "oooops!!" + some_value = os.confstr_names.values()[-1] + res = self.interpret(f, [some_value]) + assert hlstr(res) == f(some_value) + res = self.interpret(f, [94781413]) + assert hlstr(res) == "oooops!!" - if hasattr(os, 'pathconf'): - def test_os_pathconf(self): - def f(i): - return os.pathconf("/tmp", i) - i = os.pathconf_names["PC_NAME_MAX"] - some_value = self.interpret(f, [i]) - assert some_value >= 31 + @py.test.mark.skipif("not hasattr(os, 'pathconf')") + def test_os_pathconf(self): + def f(i): + return os.pathconf("/tmp", i) + i = os.pathconf_names["PC_NAME_MAX"] + some_value = self.interpret(f, [i]) + assert some_value >= 31 - if hasattr(os, 'chroot'): - def test_os_chroot(self): - def f(): - try: - os.chroot('!@$#!#%$#^#@!#!$$#^') - except OSError: - return 1 - return 0 + @py.test.mark.skipif("not hasattr(os, 'chroot')") + def test_os_chroot(self): + def f(): + try: + os.chroot('!@$#!#%$#^#@!#!$$#^') + except OSError: + return 1 + return 0 - assert self.interpret(f, []) == 1 + assert self.interpret(f, []) == 1 def test_os_wstar(self): from rpython.rlib import rposix @@ -221,84 +221,84 @@ res = self.interpret(fun, [value]) assert res == fun(value) - if hasattr(os, 'getgroups'): - def test_getgroups(self): - def f(): - return os.getgroups() - ll_a = self.interpret(f, []) - assert self.ll_to_list(ll_a) == f() + @py.test.mark.skipif("not hasattr(os, 'getgroups')") + def test_getgroups(self): + def f(): + return os.getgroups() + ll_a = self.interpret(f, []) + assert self.ll_to_list(ll_a) == f() - if hasattr(os, 'setgroups'): - def test_setgroups(self): - def f(): - try: - os.setgroups(os.getgroups()) - except OSError: - pass - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setgroups')") + def test_setgroups(self): + def f(): + try: + os.setgroups(os.getgroups()) + except OSError: + pass + self.interpret(f, []) - if hasattr(os, 'initgroups'): - def test_initgroups(self): - def f(): - try: - os.initgroups('sUJJeumz', 4321) - except OSError: - return 1 - return 0 - res = self.interpret(f, []) - assert res == 1 + @py.test.mark.skipif("not hasattr(os, 'initgroups')") + def test_initgroups(self): + def f(): + try: + os.initgroups('sUJJeumz', 4321) + except OSError: + return 1 + return 0 + res = self.interpret(f, []) + assert res == 1 - if hasattr(os, 'tcgetpgrp'): - def test_tcgetpgrp(self): - def f(fd): - try: - return os.tcgetpgrp(fd) - except OSError: - return 42 - res = self.interpret(f, [9999]) - assert res == 42 + @py.test.mark.skipif("not hasattr(os, 'tcgetpgrp')") + def test_tcgetpgrp(self): + def f(fd): + try: + return os.tcgetpgrp(fd) + except OSError: + return 42 + res = self.interpret(f, [9999]) + assert res == 42 - if hasattr(os, 'tcsetpgrp'): - def test_tcsetpgrp(self): - def f(fd, pgrp): - try: - os.tcsetpgrp(fd, pgrp) - except OSError: - return 1 - return 0 - res = self.interpret(f, [9999, 1]) - assert res == 1 + @py.test.mark.skipif("not hasattr(os, 'tcsetpgrp')") + def test_tcsetpgrp(self): + def f(fd, pgrp): + try: + os.tcsetpgrp(fd, pgrp) + except OSError: + return 1 + return 0 + res = self.interpret(f, [9999, 1]) + assert res == 1 - if hasattr(os, 'getresuid'): - def test_getresuid(self): - def f(): - a, b, c = os.getresuid() - return a + b * 37 + c * 1291 - res = self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'getresuid')") + def test_getresuid(self): + def f(): a, b, c = os.getresuid() - assert res == a + b * 37 + c * 1291 + return a + b * 37 + c * 1291 + res = self.interpret(f, []) + a, b, c = os.getresuid() + assert res == a + b * 37 + c * 1291 - if hasattr(os, 'getresgid'): - def test_getresgid(self): - def f(): - a, b, c = os.getresgid() - return a + b * 37 + c * 1291 - res = self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'getresgid')") + def test_getresgid(self): + def f(): a, b, c = os.getresgid() - assert res == a + b * 37 + c * 1291 + return a + b * 37 + c * 1291 + res = self.interpret(f, []) + a, b, c = os.getresgid() + assert res == a + b * 37 + c * 1291 - if hasattr(os, 'setresuid'): - def test_setresuid(self): - def f(): - a, b, c = os.getresuid() - a = (a + 1) - 1 - os.setresuid(a, b, c) - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setresuid')") + def test_setresuid(self): + def f(): + a, b, c = os.getresuid() + a = (a + 1) - 1 + os.setresuid(a, b, c) + self.interpret(f, []) - if hasattr(os, 'setresgid'): - def test_setresgid(self): - def f(): - a, b, c = os.getresgid() - a = (a + 1) - 1 - os.setresgid(a, b, c) - self.interpret(f, []) + @py.test.mark.skipif("not hasattr(os, 'setresgid')") + def test_setresgid(self): + def f(): + a, b, c = os.getresgid() + a = (a + 1) - 1 + os.setresgid(a, b, c) + self.interpret(f, []) diff --git a/rpython/rtyper/test/test_rdict.py b/rpython/rtyper/test/test_rdict.py --- a/rpython/rtyper/test/test_rdict.py +++ b/rpython/rtyper/test/test_rdict.py @@ -1,5 +1,8 @@ from rpython.translator.translator import TranslationContext +from rpython.annotator import model as annmodel +from rpython.annotator.dictdef import DictKey, DictValue from rpython.rtyper.lltypesystem import lltype, rffi +from rpython.rtyper.lltypesystem.rstr import string_repr from rpython.rtyper import rint from rpython.rtyper.lltypesystem import rdict, rstr from rpython.rtyper.test.tool import BaseRtypingTest @@ -1182,118 +1185,88 @@ count_frees += 1 assert count_frees >= 3 -class TestStress: +N_KEYS = 400 - def test_stress(self): - from rpython.annotator.dictdef import DictKey, DictValue - from rpython.annotator import model as annmodel - dictrepr = rdict.DictRepr(None, rint.signed_repr, rint.signed_repr, - DictKey(None, annmodel.SomeInteger()), - DictValue(None, annmodel.SomeInteger())) - dictrepr.setup() - l_dict = rdict.ll_newdict(dictrepr.DICT) - referencetable = [None] * 400 - referencelength = 0 - value = 0 +def test_stress(): + dictrepr = rdict.DictRepr(None, rint.signed_repr, rint.signed_repr, + DictKey(None, annmodel.SomeInteger()), + DictValue(None, annmodel.SomeInteger())) + dictrepr.setup() + l_dict = rdict.ll_newdict(dictrepr.DICT) + reference = {} + value = 0 - def complete_check(): - for n, refvalue in zip(range(len(referencetable)), referencetable): - try: - gotvalue = rdict.ll_dict_getitem(l_dict, n) - except KeyError: - assert refvalue is None - else: - assert gotvalue == refvalue + def check_value(n): + try: + gotvalue = rdict.ll_dict_getitem(l_dict, n) + except KeyError: + n not in reference + else: + assert gotvalue == reference[n] - for x in not_really_random(): - n = int(x*100.0) # 0 <= x < 400 - op = repr(x)[-1] - if op <= '2' and referencetable[n] is not None: - rdict.ll_dict_delitem(l_dict, n) - referencetable[n] = None - referencelength -= 1 - elif op <= '6': - rdict.ll_dict_setitem(l_dict, n, value) - if referencetable[n] is None: - referencelength += 1 - referencetable[n] = value - value += 1 - else: - try: - gotvalue = rdict.ll_dict_getitem(l_dict, n) - except KeyError: - assert referencetable[n] is None - else: - assert gotvalue == referencetable[n] - if 1.38 <= x <= 1.39: - complete_check() - print 'current dict length:', referencelength - assert l_dict.num_items == referencelength - complete_check() + def complete_check(): + for n in range(N_KEYS): + check_value(n) - def test_stress_2(self): - yield self.stress_combination, True, False - yield self.stress_combination, False, True - yield self.stress_combination, False, False - yield self.stress_combination, True, True + for x in not_really_random(): + n = int(x*100.0) # 0 <= x < 400 + op = repr(x)[-1] + if op <= '2' and n in reference: + rdict.ll_dict_delitem(l_dict, n) + del reference[n] + elif op <= '6': + rdict.ll_dict_setitem(l_dict, n, value) + reference[n] = value + value += 1 + else: + check_value(n) + if 1.38 <= x <= 1.39: + complete_check() + print 'current dict length:', len(reference) + assert l_dict.num_items == len(reference) + complete_check() - def stress_combination(self, key_can_be_none, value_can_be_none): - from rpython.rtyper.lltypesystem.rstr import string_repr - from rpython.annotator.dictdef import DictKey, DictValue - from rpython.annotator import model as annmodel - print - print "Testing combination with can_be_None: keys %s, values %s" % ( - key_can_be_none, value_can_be_none) + at py.test.mark.parametrize('key_can_be_none', [True, False]) + at py.test.mark.parametrize('value_can_be_none', [True, False]) +def test_stress_2(key_can_be_none, value_can_be_none): + class PseudoRTyper: + cache_dummy_values = {} + dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, + DictKey(None, annmodel.SomeString(key_can_be_none)), + DictValue(None, annmodel.SomeString(value_can_be_none))) + dictrepr.setup() + l_dict = rdict.ll_newdict(dictrepr.DICT) + reference = {} + values = not_really_random() + keytable = [string_repr.convert_const("foo%d" % n) for n in range(N_KEYS)] - class PseudoRTyper: - cache_dummy_values = {} - dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, - DictKey(None, annmodel.SomeString(key_can_be_none)), - DictValue(None, annmodel.SomeString(value_can_be_none))) - dictrepr.setup() - print dictrepr.lowleveltype - for key, value in dictrepr.DICTENTRY._adtmeths.items(): - print ' %s = %s' % (key, value) - l_dict = rdict.ll_newdict(dictrepr.DICT) - referencetable = [None] * 400 - referencelength = 0 - values = not_really_random() - keytable = [string_repr.convert_const("foo%d" % n) - for n in range(len(referencetable))] + def check_value(n): + try: + gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) + except KeyError: + assert n not in reference + else: + assert gotvalue == reference[n] - def complete_check(): - for n, refvalue in zip(range(len(referencetable)), referencetable): - try: - gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) - except KeyError: - assert refvalue is None - else: - assert gotvalue == refvalue + def complete_check(): + for n in range(N_KEYS): + check_value(n) - for x in not_really_random(): - n = int(x*100.0) # 0 <= x < 400 - op = repr(x)[-1] - if op <= '2' and referencetable[n] is not None: - rdict.ll_dict_delitem(l_dict, keytable[n]) - referencetable[n] = None - referencelength -= 1 - elif op <= '6': - ll_value = string_repr.convert_const(str(values.next())) - rdict.ll_dict_setitem(l_dict, keytable[n], ll_value) - if referencetable[n] is None: - referencelength += 1 - referencetable[n] = ll_value - else: - try: - gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) - except KeyError: - assert referencetable[n] is None - else: - assert gotvalue == referencetable[n] - if 1.38 <= x <= 1.39: - complete_check() - print 'current dict length:', referencelength - assert l_dict.num_items == referencelength - complete_check() - + for x in not_really_random(): + n = int(x*100.0) # 0 <= x < 400 + op = repr(x)[-1] + if op <= '2' and n in reference: + rdict.ll_dict_delitem(l_dict, keytable[n]) + del reference[n] + elif op <= '6': + ll_value = string_repr.convert_const(str(values.next())) + rdict.ll_dict_setitem(l_dict, keytable[n], ll_value) + reference[n] = ll_value + else: + check_value(n) + if 1.38 <= x <= 1.39: + complete_check() + print 'current dict length:', len(reference) + assert l_dict.num_items == len(reference) + complete_check() diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -546,7 +546,7 @@ if needs_gcheader(T): gct = self.db.gctransformer if gct is not None: - self.gc_init = gct.gcheader_initdata(self) + self.gc_init = gct.gcheader_initdata(self.obj) db.getcontainernode(self.gc_init) else: self.gc_init = None @@ -677,7 +677,7 @@ if needs_gcheader(T): gct = self.db.gctransformer if gct is not None: - self.gc_init = gct.gcheader_initdata(self) + self.gc_init = gct.gcheader_initdata(self.obj) db.getcontainernode(self.gc_init) else: self.gc_init = None From pypy.commits at gmail.com Sun Feb 28 09:24:46 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 28 Feb 2016 06:24:46 -0800 (PST) Subject: [pypy-commit] pypy llvm-translation-backend: Kill get_gc_field_values() methods of various GCPolicy subclasses. Message-ID: <56d3032e.83561c0a.79e98.1f61@mx.google.com> Author: Manuel Jacob Branch: llvm-translation-backend Changeset: r82599:2486e9444a13 Date: 2016-02-28 13:54 +0100 http://bitbucket.org/pypy/pypy/changeset/2486e9444a13/ Log: Kill get_gc_field_values() methods of various GCPolicy subclasses. diff --git a/rpython/translator/llvm/genllvm.py b/rpython/translator/llvm/genllvm.py --- a/rpython/translator/llvm/genllvm.py +++ b/rpython/translator/llvm/genllvm.py @@ -511,7 +511,8 @@ if self.is_zero(value): return 'zeroinitializer' if self.needs_gc_header: - data = database.genllvm.gcpolicy.get_gc_field_values(value) + gctransformer = database.genllvm.gcpolicy.gctransformer + data = [gctransformer.gcheader_initdata(value)] data.extend(getattr(value, fn) for _, fn in self.fields[1:]) else: data = [getattr(value, fn) for _, fn in self.fields] @@ -1684,12 +1685,6 @@ def get_setup_ptr(self): return self.gctransformer.frameworkgc_setup_ptr.value - def get_gc_field_values(self, obj): - obj = lltype.top_container(obj) - needs_hash = self.gctransformer.get_prebuilt_hash(obj) is not None - hdr = self.gctransformer.gc_header_for(obj, needs_hash) - return [hdr._obj] - class RefcountGCPolicy(GCPolicy): class RttiType(FuncType): @@ -1707,10 +1702,6 @@ GCPolicy.__init__(self, genllvm) self.gctransformer = RefcountingGCTransformer(genllvm.translator) - def get_gc_field_values(self, obj): - obj = lltype.top_container(obj) - return [self.gctransformer.gcheaderbuilder.header_of_object(obj)._obj] - def extfunc(name, args, result, compilation_info): func_type = lltype.FuncType(args, result) From pypy.commits at gmail.com Sun Feb 28 09:24:42 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 28 Feb 2016 06:24:42 -0800 (PST) Subject: [pypy-commit] pypy default: Pass object instead of defnode to GCPolicy's gcheader_initdata(). Message-ID: <56d3032a.45631c0a.f3f57.ffffec19@mx.google.com> Author: Manuel Jacob Branch: Changeset: r82597:c8c1757077e6 Date: 2016-02-28 13:49 +0100 http://bitbucket.org/pypy/pypy/changeset/c8c1757077e6/ Log: Pass object instead of defnode to GCPolicy's gcheader_initdata(). diff --git a/rpython/memory/gctransform/boehm.py b/rpython/memory/gctransform/boehm.py --- a/rpython/memory/gctransform/boehm.py +++ b/rpython/memory/gctransform/boehm.py @@ -156,9 +156,9 @@ resulttype = lltype.Signed) hop.genop('int_invert', [v_int], resultvar=hop.spaceop.result) - def gcheader_initdata(self, defnode): + def gcheader_initdata(self, obj): hdr = lltype.malloc(self.HDR, immortal=True) - hdr.hash = lltype.identityhash_nocache(defnode.obj._as_ptr()) + hdr.hash = lltype.identityhash_nocache(obj._as_ptr()) return hdr._obj diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py --- a/rpython/memory/gctransform/framework.py +++ b/rpython/memory/gctransform/framework.py @@ -1479,8 +1479,8 @@ resulttype=llmemory.Address) llops.genop('raw_memclear', [v_adr, v_totalsize]) - def gcheader_initdata(self, defnode): - o = lltype.top_container(defnode.obj) + def gcheader_initdata(self, obj): + o = lltype.top_container(obj) needs_hash = self.get_prebuilt_hash(o) is not None hdr = self.gc_header_for(o, needs_hash) return hdr._obj diff --git a/rpython/memory/gctransform/refcounting.py b/rpython/memory/gctransform/refcounting.py --- a/rpython/memory/gctransform/refcounting.py +++ b/rpython/memory/gctransform/refcounting.py @@ -286,6 +286,6 @@ hop.genop("direct_call", [self.identityhash_ptr, v_adr], resultvar=hop.spaceop.result) - def gcheader_initdata(self, defnode): - top = lltype.top_container(defnode.obj) + def gcheader_initdata(self, obj): + top = lltype.top_container(obj) return self.gcheaderbuilder.header_of_object(top)._obj diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -546,7 +546,7 @@ if needs_gcheader(T): gct = self.db.gctransformer if gct is not None: - self.gc_init = gct.gcheader_initdata(self) + self.gc_init = gct.gcheader_initdata(self.obj) db.getcontainernode(self.gc_init) else: self.gc_init = None @@ -677,7 +677,7 @@ if needs_gcheader(T): gct = self.db.gctransformer if gct is not None: - self.gc_init = gct.gcheader_initdata(self) + self.gc_init = gct.gcheader_initdata(self.obj) db.getcontainernode(self.gc_init) else: self.gc_init = None From pypy.commits at gmail.com Sun Feb 28 12:42:59 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 28 Feb 2016 09:42:59 -0800 (PST) Subject: [pypy-commit] pypy default: Create stateful hypothesis test for rdict. Message-ID: <56d331a3.0bdf1c0a.a5e6.242c@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82600:9053529ba3ba Date: 2016-02-28 17:24 +0000 http://bitbucket.org/pypy/pypy/changeset/9053529ba3ba/ Log: Create stateful hypothesis test for rdict. This will eventually replace test_stress() and test_stress_2() diff --git a/rpython/rtyper/test/test_rdict.py b/rpython/rtyper/test/test_rdict.py --- a/rpython/rtyper/test/test_rdict.py +++ b/rpython/rtyper/test/test_rdict.py @@ -1270,3 +1270,72 @@ print 'current dict length:', len(reference) assert l_dict.num_items == len(reference) complete_check() + +from hypothesis.strategies import builds, sampled_from, binary, just + +class Action(object): + pass + +class SetItem(Action): + def __init__(self, key, value): + self.key = key + self.value = value + + def __repr__(self): + return 'SetItem(%r, %r)' % (self.key, self.value) + +class DelItem(Action): + def __init__(self, key): + self.key = key + + def __repr__(self): + return 'DelItem(%r)' % (self.key) + +class CompleteCheck(Action): + pass + +st_keys = binary() +st_values = binary() +st_setitem = builds(SetItem, st_keys, st_values) + +def st_delitem(keys): + return builds(DelItem, sampled_from(keys)) + +from hypothesis.stateful import GenericStateMachine + +_ll = string_repr.convert_const + +class StressTest(GenericStateMachine): + def __init__(self): + class PseudoRTyper: + cache_dummy_values = {} + dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, + DictKey(None, annmodel.SomeString(False)), + DictValue(None, annmodel.SomeString(False))) + dictrepr.setup() + self.l_dict = rdict.ll_newdict(dictrepr.DICT) + self.reference = {} + + def steps(self): + return (st_setitem | st_delitem(self.reference) | just(CompleteCheck())) if self.reference else (st_setitem | just(CompleteCheck())) + + def execute_step(self, action): + if isinstance(action, SetItem): + ll_key = string_repr.convert_const(action.key) + ll_value = string_repr.convert_const(action.value) + rdict.ll_dict_setitem(self.l_dict, ll_key, ll_value) + self.reference[action.key] = action.value + assert rdict.ll_contains(self.l_dict, ll_key) + elif isinstance(action, DelItem): + ll_key = string_repr.convert_const(action.key) + rdict.ll_dict_delitem(self.l_dict, ll_key) + del self.reference[action.key] + assert not rdict.ll_contains(self.l_dict, ll_key) + elif isinstance(action, CompleteCheck): + assert self.l_dict.num_items == len(self.reference) + for key, value in self.reference.iteritems(): + assert rdict.ll_dict_getitem(self.l_dict, _ll(key)) == _ll(value) + + +TestHyp = StressTest.TestCase + From pypy.commits at gmail.com Sun Feb 28 12:43:01 2016 From: pypy.commits at gmail.com (rlamy) Date: Sun, 28 Feb 2016 09:43:01 -0800 (PST) Subject: [pypy-commit] pypy default: merge heads Message-ID: <56d331a5.455e1c0a.7b411.22bc@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82601:fd41d042c08e Date: 2016-02-28 17:42 +0000 http://bitbucket.org/pypy/pypy/changeset/fd41d042c08e/ Log: merge heads diff --git a/rpython/memory/gctransform/boehm.py b/rpython/memory/gctransform/boehm.py --- a/rpython/memory/gctransform/boehm.py +++ b/rpython/memory/gctransform/boehm.py @@ -156,9 +156,9 @@ resulttype = lltype.Signed) hop.genop('int_invert', [v_int], resultvar=hop.spaceop.result) - def gcheader_initdata(self, defnode): + def gcheader_initdata(self, obj): hdr = lltype.malloc(self.HDR, immortal=True) - hdr.hash = lltype.identityhash_nocache(defnode.obj._as_ptr()) + hdr.hash = lltype.identityhash_nocache(obj._as_ptr()) return hdr._obj diff --git a/rpython/memory/gctransform/framework.py b/rpython/memory/gctransform/framework.py --- a/rpython/memory/gctransform/framework.py +++ b/rpython/memory/gctransform/framework.py @@ -1479,8 +1479,8 @@ resulttype=llmemory.Address) llops.genop('raw_memclear', [v_adr, v_totalsize]) - def gcheader_initdata(self, defnode): - o = lltype.top_container(defnode.obj) + def gcheader_initdata(self, obj): + o = lltype.top_container(obj) needs_hash = self.get_prebuilt_hash(o) is not None hdr = self.gc_header_for(o, needs_hash) return hdr._obj diff --git a/rpython/memory/gctransform/refcounting.py b/rpython/memory/gctransform/refcounting.py --- a/rpython/memory/gctransform/refcounting.py +++ b/rpython/memory/gctransform/refcounting.py @@ -286,6 +286,6 @@ hop.genop("direct_call", [self.identityhash_ptr, v_adr], resultvar=hop.spaceop.result) - def gcheader_initdata(self, defnode): - top = lltype.top_container(defnode.obj) + def gcheader_initdata(self, obj): + top = lltype.top_container(obj) return self.gcheaderbuilder.header_of_object(top)._obj diff --git a/rpython/translator/c/node.py b/rpython/translator/c/node.py --- a/rpython/translator/c/node.py +++ b/rpython/translator/c/node.py @@ -546,7 +546,7 @@ if needs_gcheader(T): gct = self.db.gctransformer if gct is not None: - self.gc_init = gct.gcheader_initdata(self) + self.gc_init = gct.gcheader_initdata(self.obj) db.getcontainernode(self.gc_init) else: self.gc_init = None @@ -677,7 +677,7 @@ if needs_gcheader(T): gct = self.db.gctransformer if gct is not None: - self.gc_init = gct.gcheader_initdata(self) + self.gc_init = gct.gcheader_initdata(self.obj) db.getcontainernode(self.gc_init) else: self.gc_init = None From pypy.commits at gmail.com Sun Feb 28 18:47:49 2016 From: pypy.commits at gmail.com (marky1991) Date: Sun, 28 Feb 2016 15:47:49 -0800 (PST) Subject: [pypy-commit] pypy fix_magic_reload: Add new parm to save_module_content_for_reload to allow explicit calls to save the entire current module.w_dict into initialdict. (Fixes test_magic) Message-ID: <56d38725.29cec20a.7641f.ffffd28b@mx.google.com> Author: Mark Young Branch: fix_magic_reload Changeset: r82602:486605b16836 Date: 2016-02-23 00:09 -0500 http://bitbucket.org/pypy/pypy/changeset/486605b16836/ Log: Add new parm to save_module_content_for_reload to allow explicit calls to save the entire current module.w_dict into initialdict. (Fixes test_magic) diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -57,7 +57,7 @@ if not self.lazy and self.w_initialdict is None: self.save_module_content_for_future_reload() - def save_module_content_for_future_reload(self): + def save_module_content_for_future_reload(self, force_override=False): # Because setdictvalue is unable to immediately load all attributes # (due to an importlib bootstrapping problem), this method needs to be # able to support saving the content of a module's dict without @@ -69,7 +69,7 @@ w_items = self.space.iteriterable(self.space.call_method(self.w_dict,'items')) for w_item in w_items: w_key, w_value = self.space.fixedview(w_item, expected_length=2) - if not self.space.is_true(self.space.contains(self.w_initialdict, w_key)): + if force_override or not self.space.is_true(self.space.contains(self.w_initialdict, w_key)): self.space.setitem(self.w_initialdict, w_key, w_value) else: self.w_initialdict = self.space.call_method(self.w_dict, 'copy') diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py --- a/pypy/module/__pypy__/interp_magic.py +++ b/pypy/module/__pypy__/interp_magic.py @@ -130,7 +130,7 @@ @unwrap_spec(w_module=MixedModule) def save_module_content_for_future_reload(space, w_module): - w_module.save_module_content_for_future_reload() + w_module.save_module_content_for_future_reload(force_override=True) def set_code_callback(space, w_callable): cache = space.fromcache(CodeHookCache) From pypy.commits at gmail.com Sun Feb 28 18:47:56 2016 From: pypy.commits at gmail.com (marky1991) Date: Sun, 28 Feb 2016 15:47:56 -0800 (PST) Subject: [pypy-commit] pypy fix_magic_reload: Skip all that checking when we're just trying to replace the initialdict with the current dict. Message-ID: <56d3872c.28acc20a.20e03.ffffe149@mx.google.com> Author: Mark Young Branch: fix_magic_reload Changeset: r82605:f573ee8d305a Date: 2016-02-28 18:42 -0500 http://bitbucket.org/pypy/pypy/changeset/f573ee8d305a/ Log: Skip all that checking when we're just trying to replace the initialdict with the current dict. diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -65,14 +65,14 @@ # properly, when updating the dict, we must be careful to never # overwrite the value of a key already in w_initialdict. (So as to avoid # overriding the builtin value with a user-provided value) - if not self.space.is_none(self.w_initialdict): + if self.space.is_none(self.w_initialdict) or save_all: + self.w_initialdict = self.space.call_method(self.w_dict, 'copy') + else: w_items = self.space.call_method(self.w_dict, 'items') for w_item in self.space.iteriterable(w_items): w_key, w_value = self.space.fixedview(w_item, expected_length=2) - if save_all or not self.space.contains_w(self.w_initialdict, w_key): + if not self.space.contains_w(self.w_initialdict, w_key): self.space.setitem(self.w_initialdict, w_key, w_value) - else: - self.w_initialdict = self.space.call_method(self.w_dict, 'copy') def get_applevel_name(cls): """ NOT_RPYTHON """ From pypy.commits at gmail.com Sun Feb 28 18:47:51 2016 From: pypy.commits at gmail.com (marky1991) Date: Sun, 28 Feb 2016 15:47:51 -0800 (PST) Subject: [pypy-commit] pypy fix_magic_reload: Rename the parameter to something (hopefully) more clear. Message-ID: <56d38727.86e31c0a.2d41d.ffff8ceb@mx.google.com> Author: Mark Young Branch: fix_magic_reload Changeset: r82603:a63321cb4f25 Date: 2016-02-27 09:08 -0500 http://bitbucket.org/pypy/pypy/changeset/a63321cb4f25/ Log: Rename the parameter to something (hopefully) more clear. diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -57,7 +57,7 @@ if not self.lazy and self.w_initialdict is None: self.save_module_content_for_future_reload() - def save_module_content_for_future_reload(self, force_override=False): + def save_module_content_for_future_reload(self, save_all=False): # Because setdictvalue is unable to immediately load all attributes # (due to an importlib bootstrapping problem), this method needs to be # able to support saving the content of a module's dict without @@ -69,7 +69,9 @@ w_items = self.space.iteriterable(self.space.call_method(self.w_dict,'items')) for w_item in w_items: w_key, w_value = self.space.fixedview(w_item, expected_length=2) - if force_override or not self.space.is_true(self.space.contains(self.w_initialdict, w_key)): + if save_all or not self.space.is_true( + self.space.contains(self.w_initialdict, + w_key)): self.space.setitem(self.w_initialdict, w_key, w_value) else: self.w_initialdict = self.space.call_method(self.w_dict, 'copy') diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py --- a/pypy/module/__pypy__/interp_magic.py +++ b/pypy/module/__pypy__/interp_magic.py @@ -130,7 +130,7 @@ @unwrap_spec(w_module=MixedModule) def save_module_content_for_future_reload(space, w_module): - w_module.save_module_content_for_future_reload(force_override=True) + w_module.save_module_content_for_future_reload(save_all=True) def set_code_callback(space, w_callable): cache = space.fromcache(CodeHookCache) From pypy.commits at gmail.com Sun Feb 28 18:47:57 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 28 Feb 2016 15:47:57 -0800 (PST) Subject: [pypy-commit] pypy py3.3: Merged in marky1991/pypy_new/fix_magic_reload (pull request #405) Message-ID: <56d3872d.c13fc20a.046a.ffffc9f4@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82606:e0e2bb1b3050 Date: 2016-02-29 00:46 +0100 http://bitbucket.org/pypy/pypy/changeset/e0e2bb1b3050/ Log: Merged in marky1991/pypy_new/fix_magic_reload (pull request #405) 3.3: Fix_magic_reload diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -57,7 +57,7 @@ if not self.lazy and self.w_initialdict is None: self.save_module_content_for_future_reload() - def save_module_content_for_future_reload(self): + def save_module_content_for_future_reload(self, save_all=False): # Because setdictvalue is unable to immediately load all attributes # (due to an importlib bootstrapping problem), this method needs to be # able to support saving the content of a module's dict without @@ -65,14 +65,14 @@ # properly, when updating the dict, we must be careful to never # overwrite the value of a key already in w_initialdict. (So as to avoid # overriding the builtin value with a user-provided value) - if not self.space.is_none(self.w_initialdict): + if self.space.is_none(self.w_initialdict) or save_all: + self.w_initialdict = self.space.call_method(self.w_dict, 'copy') + else: w_items = self.space.call_method(self.w_dict, 'items') for w_item in self.space.iteriterable(w_items): w_key, w_value = self.space.fixedview(w_item, expected_length=2) if not self.space.contains_w(self.w_initialdict, w_key): self.space.setitem(self.w_initialdict, w_key, w_value) - else: - self.w_initialdict = self.space.call_method(self.w_dict, 'copy') def get_applevel_name(cls): """ NOT_RPYTHON """ diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py --- a/pypy/module/__pypy__/interp_magic.py +++ b/pypy/module/__pypy__/interp_magic.py @@ -130,7 +130,7 @@ @unwrap_spec(w_module=MixedModule) def save_module_content_for_future_reload(space, w_module): - w_module.save_module_content_for_future_reload() + w_module.save_module_content_for_future_reload(save_all=True) def set_code_callback(space, w_callable): cache = space.fromcache(CodeHookCache) From pypy.commits at gmail.com Sun Feb 28 18:47:54 2016 From: pypy.commits at gmail.com (marky1991) Date: Sun, 28 Feb 2016 15:47:54 -0800 (PST) Subject: [pypy-commit] pypy fix_magic_reload: Merge py3.3 in Message-ID: <56d3872a.016b1c0a.5e28e.3411@mx.google.com> Author: Mark Young Branch: fix_magic_reload Changeset: r82604:8b81a227cb96 Date: 2016-02-27 19:32 -0500 http://bitbucket.org/pypy/pypy/changeset/8b81a227cb96/ Log: Merge py3.3 in diff too long, truncating to 2000 out of 16442 lines diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -22,6 +22,7 @@ ^pypy/module/cpyext/test/.+\.obj$ ^pypy/module/cpyext/test/.+\.manifest$ ^pypy/module/test_lib_pypy/ctypes_tests/.+\.o$ +^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^pypy/module/cppyy/src/.+\.o$ ^pypy/module/cppyy/bench/.+\.so$ ^pypy/module/cppyy/bench/.+\.root$ @@ -35,7 +36,6 @@ ^pypy/module/test_lib_pypy/cffi_tests/__pycache__.+$ ^pypy/doc/.+\.html$ ^pypy/doc/config/.+\.rst$ -^pypy/doc/basicblock\.asc$ ^pypy/doc/.+\.svninfo$ ^rpython/translator/c/src/libffi_msvc/.+\.obj$ ^rpython/translator/c/src/libffi_msvc/.+\.dll$ @@ -45,53 +45,33 @@ ^rpython/translator/c/src/cjkcodecs/.+\.obj$ ^rpython/translator/c/src/stacklet/.+\.o$ ^rpython/translator/c/src/.+\.o$ -^rpython/translator/jvm/\.project$ -^rpython/translator/jvm/\.classpath$ -^rpython/translator/jvm/eclipse-bin$ -^rpython/translator/jvm/src/pypy/.+\.class$ -^rpython/translator/benchmark/docutils$ -^rpython/translator/benchmark/templess$ -^rpython/translator/benchmark/gadfly$ -^rpython/translator/benchmark/mako$ -^rpython/translator/benchmark/bench-custom\.benchmark_result$ -^rpython/translator/benchmark/shootout_benchmarks$ +^rpython/translator/llvm/.+\.so$ ^rpython/translator/goal/target.+-c$ ^rpython/translator/goal/.+\.exe$ ^rpython/translator/goal/.+\.dll$ ^pypy/goal/pypy-translation-snapshot$ ^pypy/goal/pypy-c -^pypy/goal/pypy-jvm -^pypy/goal/pypy-jvm.jar ^pypy/goal/.+\.exe$ ^pypy/goal/.+\.dll$ ^pypy/goal/.+\.lib$ ^pypy/_cache$ -^pypy/doc/statistic/.+\.html$ -^pypy/doc/statistic/.+\.eps$ -^pypy/doc/statistic/.+\.pdf$ -^rpython/translator/cli/src/pypylib\.dll$ -^rpython/translator/cli/src/query\.exe$ -^rpython/translator/cli/src/main\.exe$ +^lib-python/2.7/lib2to3/.+\.pickle$ ^lib_pypy/__pycache__$ ^lib_pypy/ctypes_config_cache/_.+_cache\.py$ ^lib_pypy/ctypes_config_cache/_.+_.+_\.py$ ^lib_pypy/_libmpdec/.+.o$ -^rpython/translator/cli/query-descriptions$ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ ^include/.+\.inl$ ^pypy/doc/_build/.*$ ^pypy/doc/config/.+\.html$ ^pypy/doc/config/style\.css$ -^pypy/doc/jit/.+\.html$ -^pypy/doc/jit/style\.css$ ^pypy/doc/image/lattice1\.png$ ^pypy/doc/image/lattice2\.png$ ^pypy/doc/image/lattice3\.png$ ^pypy/doc/image/stackless_informal\.png$ ^pypy/doc/image/parsing_example.+\.png$ ^rpython/doc/_build/.*$ -^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^compiled ^.git/ ^release/ diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -41,29 +41,29 @@ Amaury Forgeot d'Arc Antonio Cuni Samuele Pedroni + Matti Picus Alex Gaynor Brian Kearns - Matti Picus Philip Jenvey Michael Hudson + Ronan Lamy David Schneider + Manuel Jacob Holger Krekel Christian Tismer Hakan Ardo - Manuel Jacob - Ronan Lamy Benjamin Peterson + Richard Plangger Anders Chrigstrom Eric van Riet Paap Wim Lavrijsen - Richard Plangger Richard Emslie Alexander Schremmer Dan Villiom Podlaski Christiansen + Remi Meier Lukas Diekmann Sven Hager Anders Lehmann - Remi Meier Aurelien Campeas Niklaus Haldimann Camillo Bruni @@ -72,8 +72,8 @@ Romain Guillebert Leonardo Santagada Seo Sanghyeon + Ronny Pfannschmidt Justin Peel - Ronny Pfannschmidt David Edelsohn Anders Hammarquist Jakub Gustak @@ -95,6 +95,7 @@ Tyler Wade Michael Foord Stephan Diehl + Vincent Legoll Stefan Schwarzer Valentino Volonghi Tomek Meka @@ -105,9 +106,9 @@ Jean-Paul Calderone Timo Paulssen Squeaky + Marius Gedminas Alexandre Fayolle Simon Burton - Marius Gedminas Martin Matusiak Konstantin Lopuhin Wenzhu Man @@ -116,16 +117,20 @@ Ivan Sichmann Freitas Greg Price Dario Bertini + Stefano Rivera Mark Pearse Simon Cross Andreas Stührk - Stefano Rivera + Edd Barrett Jean-Philippe St. Pierre Guido van Rossum Pavel Vinogradov + Jeremy Thurgood Paweł Piotr Przeradowski + Spenser Bauman Paul deGrandis Ilya Osadchiy + marky1991 Tobias Oberstein Adrian Kuhn Boris Feigin @@ -134,14 +139,12 @@ Georg Brandl Bert Freudenberg Stian Andreassen - Edd Barrett + Tobias Pape Wanja Saatkamp Gerald Klix Mike Blume - Tobias Pape Oscar Nierstrasz Stefan H. Muller - Jeremy Thurgood Rami Chowdhury Eugene Oden Henry Mason @@ -153,6 +156,8 @@ Lukas Renggli Guenter Jantzen Ned Batchelder + Tim Felgentreff + Anton Gulenko Amit Regmi Ben Young Nicolas Chauvat @@ -162,12 +167,12 @@ Nicholas Riley Jason Chu Igor Trindade Oliveira - Tim Felgentreff + Yichao Yu Rocco Moretti Gintautas Miliauskas Michael Twomey Lucian Branescu Mihaila - Yichao Yu + Devin Jeanpierre Gabriel Lavoie Olivier Dormond Jared Grubb @@ -191,33 +196,33 @@ Stanislaw Halik Mikael Schönenberg Berkin Ilbeyi - Elmo M?ntynen + Elmo Mäntynen + Faye Zhao Jonathan David Riehl Anders Qvist Corbin Simpson Chirag Jadwani Beatrice During Alex Perry - Vincent Legoll + Vaibhav Sood Alan McIntyre - Spenser Bauman + William Leslie Alexander Sedov Attila Gobi + Jasper.Schulz Christopher Pope - Devin Jeanpierre - Vaibhav Sood Christian Tismer Marc Abramowitz Dan Stromberg Arjun Naik Valentina Mukhamedzhanova Stefano Parmesan + Mark Young Alexis Daboville Jens-Uwe Mager Carl Meyer Karl Ramm Pieter Zieschang - Anton Gulenko Gabriel Lukas Vacek Andrew Dalke @@ -225,6 +230,7 @@ Jakub Stasiak Nathan Taylor Vladimir Kryachko + Omer Katz Jacek Generowicz Alejandro J. Cura Jacob Oscarson @@ -239,6 +245,7 @@ Lars Wassermann Philipp Rustemeuer Henrik Vendelbo + Richard Lancaster Dan Buch Miguel de Val Borro Artur Lisiecki @@ -250,18 +257,18 @@ Tomo Cocoa Kim Jin Su Toni Mattis + Amber Brown Lucas Stadler Julian Berman Markus Holtermann roberto at goyle Yury V. Zaytsev Anna Katrina Dominguez - William Leslie Bobby Impollonia - Faye Zhao timo at eistee.fritz.box Andrew Thompson Yusei Tahara + Aaron Tubbs Ben Darnell Roberto De Ioris Juan Francisco Cantero Hurtado @@ -273,6 +280,7 @@ Christopher Armstrong Michael Hudson-Doyle Anders Sigfridsson + Nikolay Zinov Yasir Suhail Jason Michalski rafalgalczynski at gmail.com @@ -282,6 +290,7 @@ Gustavo Niemeyer Stephan Busemann Rafał Gałczyński + Matt Bogosian Christian Muirhead Berker Peksag James Lan @@ -316,9 +325,9 @@ Stefan Marr jiaaro Mads Kiilerich - Richard Lancaster opassembler.py Antony Lee + Jason Madden Yaroslav Fedevych Jim Hunziker Markus Unterwaditzer @@ -327,6 +336,7 @@ squeaky Zearin soareschen + Jonas Pfannschmidt Kurt Griffiths Mike Bayer Matthew Miller diff --git a/dotviewer/drawgraph.py b/dotviewer/drawgraph.py --- a/dotviewer/drawgraph.py +++ b/dotviewer/drawgraph.py @@ -14,12 +14,661 @@ FONT = os.path.join(this_dir, 'font', 'DroidSans.ttf') FIXEDFONT = os.path.join(this_dir, 'font', 'DroidSansMono.ttf') COLOR = { - 'black': (0,0,0), - 'white': (255,255,255), - 'red': (255,0,0), - 'green': (0,255,0), - 'blue': (0,0,255), - 'yellow': (255,255,0), + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'antiquewhite1': (255, 239, 219), + 'antiquewhite2': (238, 223, 204), + 'antiquewhite3': (205, 192, 176), + 'antiquewhite4': (139, 131, 120), + 'aquamarine': (127, 255, 212), + 'aquamarine1': (127, 255, 212), + 'aquamarine2': (118, 238, 198), + 'aquamarine3': (102, 205, 170), + 'aquamarine4': (69, 139, 116), + 'azure': (240, 255, 255), + 'azure1': (240, 255, 255), + 'azure2': (224, 238, 238), + 'azure3': (193, 205, 205), + 'azure4': (131, 139, 139), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'bisque1': (255, 228, 196), + 'bisque2': (238, 213, 183), + 'bisque3': (205, 183, 158), + 'bisque4': (139, 125, 107), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blue1': (0, 0, 255), + 'blue2': (0, 0, 238), + 'blue3': (0, 0, 205), + 'blue4': (0, 0, 139), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'brown1': (255, 64, 64), + 'brown2': (238, 59, 59), + 'brown3': (205, 51, 51), + 'brown4': (139, 35, 35), + 'burlywood': (222, 184, 135), + 'burlywood1': (255, 211, 155), + 'burlywood2': (238, 197, 145), + 'burlywood3': (205, 170, 125), + 'burlywood4': (139, 115, 85), + 'cadetblue': (95, 158, 160), + 'cadetblue1': (152, 245, 255), + 'cadetblue2': (142, 229, 238), + 'cadetblue3': (122, 197, 205), + 'cadetblue4': (83, 134, 139), + 'chartreuse': (127, 255, 0), + 'chartreuse1': (127, 255, 0), + 'chartreuse2': (118, 238, 0), + 'chartreuse3': (102, 205, 0), + 'chartreuse4': (69, 139, 0), + 'chocolate': (210, 105, 30), + 'chocolate1': (255, 127, 36), + 'chocolate2': (238, 118, 33), + 'chocolate3': (205, 102, 29), + 'chocolate4': (139, 69, 19), + 'coral': (255, 127, 80), + 'coral1': (255, 114, 86), + 'coral2': (238, 106, 80), + 'coral3': (205, 91, 69), + 'coral4': (139, 62, 47), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'cornsilk1': (255, 248, 220), + 'cornsilk2': (238, 232, 205), + 'cornsilk3': (205, 200, 177), + 'cornsilk4': (139, 136, 120), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'cyan1': (0, 255, 255), + 'cyan2': (0, 238, 238), + 'cyan3': (0, 205, 205), + 'cyan4': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgoldenrod1': (255, 185, 15), + 'darkgoldenrod2': (238, 173, 14), + 'darkgoldenrod3': (205, 149, 12), + 'darkgoldenrod4': (139, 101, 8), + 'darkgreen': (0, 100, 0), + 'darkkhaki': (189, 183, 107), + 'darkolivegreen': (85, 107, 47), + 'darkolivegreen1': (202, 255, 112), + 'darkolivegreen2': (188, 238, 104), + 'darkolivegreen3': (162, 205, 90), + 'darkolivegreen4': (110, 139, 61), + 'darkorange': (255, 140, 0), + 'darkorange1': (255, 127, 0), + 'darkorange2': (238, 118, 0), + 'darkorange3': (205, 102, 0), + 'darkorange4': (139, 69, 0), + 'darkorchid': (153, 50, 204), + 'darkorchid1': (191, 62, 255), + 'darkorchid2': (178, 58, 238), + 'darkorchid3': (154, 50, 205), + 'darkorchid4': (104, 34, 139), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkseagreen1': (193, 255, 193), + 'darkseagreen2': (180, 238, 180), + 'darkseagreen3': (155, 205, 155), + 'darkseagreen4': (105, 139, 105), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategray1': (151, 255, 255), + 'darkslategray2': (141, 238, 238), + 'darkslategray3': (121, 205, 205), + 'darkslategray4': (82, 139, 139), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deeppink1': (255, 20, 147), + 'deeppink2': (238, 18, 137), + 'deeppink3': (205, 16, 118), + 'deeppink4': (139, 10, 80), + 'deepskyblue': (0, 191, 255), + 'deepskyblue1': (0, 191, 255), + 'deepskyblue2': (0, 178, 238), + 'deepskyblue3': (0, 154, 205), + 'deepskyblue4': (0, 104, 139), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'dodgerblue1': (30, 144, 255), + 'dodgerblue2': (28, 134, 238), + 'dodgerblue3': (24, 116, 205), + 'dodgerblue4': (16, 78, 139), + 'firebrick': (178, 34, 34), + 'firebrick1': (255, 48, 48), + 'firebrick2': (238, 44, 44), + 'firebrick3': (205, 38, 38), + 'firebrick4': (139, 26, 26), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'gold1': (255, 215, 0), + 'gold2': (238, 201, 0), + 'gold3': (205, 173, 0), + 'gold4': (139, 117, 0), + 'goldenrod': (218, 165, 32), + 'goldenrod1': (255, 193, 37), + 'goldenrod2': (238, 180, 34), + 'goldenrod3': (205, 155, 29), + 'goldenrod4': (139, 105, 20), + 'gray': (192, 192, 192), + 'gray0': (0, 0, 0), + 'gray1': (3, 3, 3), + 'gray10': (26, 26, 26), + 'gray100': (255, 255, 255), + 'gray11': (28, 28, 28), + 'gray12': (31, 31, 31), + 'gray13': (33, 33, 33), + 'gray14': (36, 36, 36), + 'gray15': (38, 38, 38), + 'gray16': (41, 41, 41), + 'gray17': (43, 43, 43), + 'gray18': (46, 46, 46), + 'gray19': (48, 48, 48), + 'gray2': (5, 5, 5), + 'gray20': (51, 51, 51), + 'gray21': (54, 54, 54), + 'gray22': (56, 56, 56), + 'gray23': (59, 59, 59), + 'gray24': (61, 61, 61), + 'gray25': (64, 64, 64), + 'gray26': (66, 66, 66), + 'gray27': (69, 69, 69), + 'gray28': (71, 71, 71), + 'gray29': (74, 74, 74), + 'gray3': (8, 8, 8), + 'gray30': (77, 77, 77), + 'gray31': (79, 79, 79), + 'gray32': (82, 82, 82), + 'gray33': (84, 84, 84), + 'gray34': (87, 87, 87), + 'gray35': (89, 89, 89), + 'gray36': (92, 92, 92), + 'gray37': (94, 94, 94), + 'gray38': (97, 97, 97), + 'gray39': (99, 99, 99), + 'gray4': (10, 10, 10), + 'gray40': (102, 102, 102), + 'gray41': (105, 105, 105), + 'gray42': (107, 107, 107), + 'gray43': (110, 110, 110), + 'gray44': (112, 112, 112), + 'gray45': (115, 115, 115), + 'gray46': (117, 117, 117), + 'gray47': (120, 120, 120), + 'gray48': (122, 122, 122), + 'gray49': (125, 125, 125), + 'gray5': (13, 13, 13), + 'gray50': (127, 127, 127), + 'gray51': (130, 130, 130), + 'gray52': (133, 133, 133), + 'gray53': (135, 135, 135), + 'gray54': (138, 138, 138), + 'gray55': (140, 140, 140), + 'gray56': (143, 143, 143), + 'gray57': (145, 145, 145), + 'gray58': (148, 148, 148), + 'gray59': (150, 150, 150), + 'gray6': (15, 15, 15), + 'gray60': (153, 153, 153), + 'gray61': (156, 156, 156), + 'gray62': (158, 158, 158), + 'gray63': (161, 161, 161), + 'gray64': (163, 163, 163), + 'gray65': (166, 166, 166), + 'gray66': (168, 168, 168), + 'gray67': (171, 171, 171), + 'gray68': (173, 173, 173), + 'gray69': (176, 176, 176), + 'gray7': (18, 18, 18), + 'gray70': (179, 179, 179), + 'gray71': (181, 181, 181), + 'gray72': (184, 184, 184), + 'gray73': (186, 186, 186), + 'gray74': (189, 189, 189), + 'gray75': (191, 191, 191), + 'gray76': (194, 194, 194), + 'gray77': (196, 196, 196), + 'gray78': (199, 199, 199), + 'gray79': (201, 201, 201), + 'gray8': (20, 20, 20), + 'gray80': (204, 204, 204), + 'gray81': (207, 207, 207), + 'gray82': (209, 209, 209), + 'gray83': (212, 212, 212), + 'gray84': (214, 214, 214), + 'gray85': (217, 217, 217), + 'gray86': (219, 219, 219), + 'gray87': (222, 222, 222), + 'gray88': (224, 224, 224), + 'gray89': (227, 227, 227), + 'gray9': (23, 23, 23), + 'gray90': (229, 229, 229), + 'gray91': (232, 232, 232), + 'gray92': (235, 235, 235), + 'gray93': (237, 237, 237), + 'gray94': (240, 240, 240), + 'gray95': (242, 242, 242), + 'gray96': (245, 245, 245), + 'gray97': (247, 247, 247), + 'gray98': (250, 250, 250), + 'gray99': (252, 252, 252), + 'green': (0, 255, 0), + 'green1': (0, 255, 0), + 'green2': (0, 238, 0), + 'green3': (0, 205, 0), + 'green4': (0, 139, 0), + 'greenyellow': (173, 255, 47), + 'grey': (192, 192, 192), + 'grey0': (0, 0, 0), + 'grey1': (3, 3, 3), + 'grey10': (26, 26, 26), + 'grey100': (255, 255, 255), + 'grey11': (28, 28, 28), + 'grey12': (31, 31, 31), + 'grey13': (33, 33, 33), + 'grey14': (36, 36, 36), + 'grey15': (38, 38, 38), + 'grey16': (41, 41, 41), + 'grey17': (43, 43, 43), + 'grey18': (46, 46, 46), + 'grey19': (48, 48, 48), + 'grey2': (5, 5, 5), + 'grey20': (51, 51, 51), + 'grey21': (54, 54, 54), + 'grey22': (56, 56, 56), + 'grey23': (59, 59, 59), + 'grey24': (61, 61, 61), + 'grey25': (64, 64, 64), + 'grey26': (66, 66, 66), + 'grey27': (69, 69, 69), + 'grey28': (71, 71, 71), + 'grey29': (74, 74, 74), + 'grey3': (8, 8, 8), + 'grey30': (77, 77, 77), + 'grey31': (79, 79, 79), + 'grey32': (82, 82, 82), + 'grey33': (84, 84, 84), + 'grey34': (87, 87, 87), + 'grey35': (89, 89, 89), + 'grey36': (92, 92, 92), + 'grey37': (94, 94, 94), + 'grey38': (97, 97, 97), + 'grey39': (99, 99, 99), + 'grey4': (10, 10, 10), + 'grey40': (102, 102, 102), + 'grey41': (105, 105, 105), + 'grey42': (107, 107, 107), + 'grey43': (110, 110, 110), + 'grey44': (112, 112, 112), + 'grey45': (115, 115, 115), + 'grey46': (117, 117, 117), + 'grey47': (120, 120, 120), + 'grey48': (122, 122, 122), + 'grey49': (125, 125, 125), + 'grey5': (13, 13, 13), + 'grey50': (127, 127, 127), + 'grey51': (130, 130, 130), + 'grey52': (133, 133, 133), + 'grey53': (135, 135, 135), + 'grey54': (138, 138, 138), + 'grey55': (140, 140, 140), + 'grey56': (143, 143, 143), + 'grey57': (145, 145, 145), + 'grey58': (148, 148, 148), + 'grey59': (150, 150, 150), + 'grey6': (15, 15, 15), + 'grey60': (153, 153, 153), + 'grey61': (156, 156, 156), + 'grey62': (158, 158, 158), + 'grey63': (161, 161, 161), + 'grey64': (163, 163, 163), + 'grey65': (166, 166, 166), + 'grey66': (168, 168, 168), + 'grey67': (171, 171, 171), + 'grey68': (173, 173, 173), + 'grey69': (176, 176, 176), + 'grey7': (18, 18, 18), + 'grey70': (179, 179, 179), + 'grey71': (181, 181, 181), + 'grey72': (184, 184, 184), + 'grey73': (186, 186, 186), + 'grey74': (189, 189, 189), + 'grey75': (191, 191, 191), + 'grey76': (194, 194, 194), + 'grey77': (196, 196, 196), + 'grey78': (199, 199, 199), + 'grey79': (201, 201, 201), + 'grey8': (20, 20, 20), + 'grey80': (204, 204, 204), + 'grey81': (207, 207, 207), + 'grey82': (209, 209, 209), + 'grey83': (212, 212, 212), + 'grey84': (214, 214, 214), + 'grey85': (217, 217, 217), + 'grey86': (219, 219, 219), + 'grey87': (222, 222, 222), + 'grey88': (224, 224, 224), + 'grey89': (227, 227, 227), + 'grey9': (23, 23, 23), + 'grey90': (229, 229, 229), + 'grey91': (232, 232, 232), + 'grey92': (235, 235, 235), + 'grey93': (237, 237, 237), + 'grey94': (240, 240, 240), + 'grey95': (242, 242, 242), + 'grey96': (245, 245, 245), + 'grey97': (247, 247, 247), + 'grey98': (250, 250, 250), + 'grey99': (252, 252, 252), + 'honeydew': (240, 255, 240), + 'honeydew1': (240, 255, 240), + 'honeydew2': (224, 238, 224), + 'honeydew3': (193, 205, 193), + 'honeydew4': (131, 139, 131), + 'hotpink': (255, 105, 180), + 'hotpink1': (255, 110, 180), + 'hotpink2': (238, 106, 167), + 'hotpink3': (205, 96, 144), + 'hotpink4': (139, 58, 98), + 'indianred': (205, 92, 92), + 'indianred1': (255, 106, 106), + 'indianred2': (238, 99, 99), + 'indianred3': (205, 85, 85), + 'indianred4': (139, 58, 58), + 'indigo': (75, 0, 130), + 'invis': (255, 255, 254), + 'ivory': (255, 255, 240), + 'ivory1': (255, 255, 240), + 'ivory2': (238, 238, 224), + 'ivory3': (205, 205, 193), + 'ivory4': (139, 139, 131), + 'khaki': (240, 230, 140), + 'khaki1': (255, 246, 143), + 'khaki2': (238, 230, 133), + 'khaki3': (205, 198, 115), + 'khaki4': (139, 134, 78), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lavenderblush1': (255, 240, 245), + 'lavenderblush2': (238, 224, 229), + 'lavenderblush3': (205, 193, 197), + 'lavenderblush4': (139, 131, 134), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lemonchiffon1': (255, 250, 205), + 'lemonchiffon2': (238, 233, 191), + 'lemonchiffon3': (205, 201, 165), + 'lemonchiffon4': (139, 137, 112), + 'lightblue': (173, 216, 230), + 'lightblue1': (191, 239, 255), + 'lightblue2': (178, 223, 238), + 'lightblue3': (154, 192, 205), + 'lightblue4': (104, 131, 139), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightcyan1': (224, 255, 255), + 'lightcyan2': (209, 238, 238), + 'lightcyan3': (180, 205, 205), + 'lightcyan4': (122, 139, 139), + 'lightgoldenrod': (238, 221, 130), + 'lightgoldenrod1': (255, 236, 139), + 'lightgoldenrod2': (238, 220, 130), + 'lightgoldenrod3': (205, 190, 112), + 'lightgoldenrod4': (139, 129, 76), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightpink1': (255, 174, 185), + 'lightpink2': (238, 162, 173), + 'lightpink3': (205, 140, 149), + 'lightpink4': (139, 95, 101), + 'lightsalmon': (255, 160, 122), + 'lightsalmon1': (255, 160, 122), + 'lightsalmon2': (238, 149, 114), + 'lightsalmon3': (205, 129, 98), + 'lightsalmon4': (139, 87, 66), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightskyblue1': (176, 226, 255), + 'lightskyblue2': (164, 211, 238), + 'lightskyblue3': (141, 182, 205), + 'lightskyblue4': (96, 123, 139), + 'lightslateblue': (132, 112, 255), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightsteelblue1': (202, 225, 255), + 'lightsteelblue2': (188, 210, 238), + 'lightsteelblue3': (162, 181, 205), + 'lightsteelblue4': (110, 123, 139), + 'lightyellow': (255, 255, 224), + 'lightyellow1': (255, 255, 224), + 'lightyellow2': (238, 238, 209), + 'lightyellow3': (205, 205, 180), + 'lightyellow4': (139, 139, 122), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'magenta1': (255, 0, 255), + 'magenta2': (238, 0, 238), + 'magenta3': (205, 0, 205), + 'magenta4': (139, 0, 139), + 'maroon': (176, 48, 96), + 'maroon1': (255, 52, 179), + 'maroon2': (238, 48, 167), + 'maroon3': (205, 41, 144), + 'maroon4': (139, 28, 98), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumorchid1': (224, 102, 255), + 'mediumorchid2': (209, 95, 238), + 'mediumorchid3': (180, 82, 205), + 'mediumorchid4': (122, 55, 139), + 'mediumpurple': (147, 112, 219), + 'mediumpurple1': (171, 130, 255), + 'mediumpurple2': (159, 121, 238), + 'mediumpurple3': (137, 104, 205), + 'mediumpurple4': (93, 71, 139), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'mistyrose1': (255, 228, 225), + 'mistyrose2': (238, 213, 210), + 'mistyrose3': (205, 183, 181), + 'mistyrose4': (139, 125, 123), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navajowhite1': (255, 222, 173), + 'navajowhite2': (238, 207, 161), + 'navajowhite3': (205, 179, 139), + 'navajowhite4': (139, 121, 94), + 'navy': (0, 0, 128), + 'navyblue': (0, 0, 128), + 'none': (255, 255, 254), + 'oldlace': (253, 245, 230), + 'olivedrab': (107, 142, 35), + 'olivedrab1': (192, 255, 62), + 'olivedrab2': (179, 238, 58), + 'olivedrab3': (154, 205, 50), + 'olivedrab4': (105, 139, 34), + 'orange': (255, 165, 0), + 'orange1': (255, 165, 0), + 'orange2': (238, 154, 0), + 'orange3': (205, 133, 0), + 'orange4': (139, 90, 0), + 'orangered': (255, 69, 0), + 'orangered1': (255, 69, 0), + 'orangered2': (238, 64, 0), + 'orangered3': (205, 55, 0), + 'orangered4': (139, 37, 0), + 'orchid': (218, 112, 214), + 'orchid1': (255, 131, 250), + 'orchid2': (238, 122, 233), + 'orchid3': (205, 105, 201), + 'orchid4': (139, 71, 137), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'palegreen1': (154, 255, 154), + 'palegreen2': (144, 238, 144), + 'palegreen3': (124, 205, 124), + 'palegreen4': (84, 139, 84), + 'paleturquoise': (175, 238, 238), + 'paleturquoise1': (187, 255, 255), + 'paleturquoise2': (174, 238, 238), + 'paleturquoise3': (150, 205, 205), + 'paleturquoise4': (102, 139, 139), + 'palevioletred': (219, 112, 147), + 'palevioletred1': (255, 130, 171), + 'palevioletred2': (238, 121, 159), + 'palevioletred3': (205, 104, 137), + 'palevioletred4': (139, 71, 93), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peachpuff1': (255, 218, 185), + 'peachpuff2': (238, 203, 173), + 'peachpuff3': (205, 175, 149), + 'peachpuff4': (139, 119, 101), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'pink1': (255, 181, 197), + 'pink2': (238, 169, 184), + 'pink3': (205, 145, 158), + 'pink4': (139, 99, 108), + 'plum': (221, 160, 221), + 'plum1': (255, 187, 255), + 'plum2': (238, 174, 238), + 'plum3': (205, 150, 205), + 'plum4': (139, 102, 139), + 'powderblue': (176, 224, 230), + 'purple': (160, 32, 240), + 'purple1': (155, 48, 255), + 'purple2': (145, 44, 238), + 'purple3': (125, 38, 205), + 'purple4': (85, 26, 139), + 'red': (255, 0, 0), + 'red1': (255, 0, 0), + 'red2': (238, 0, 0), + 'red3': (205, 0, 0), + 'red4': (139, 0, 0), + 'rosybrown': (188, 143, 143), + 'rosybrown1': (255, 193, 193), + 'rosybrown2': (238, 180, 180), + 'rosybrown3': (205, 155, 155), + 'rosybrown4': (139, 105, 105), + 'royalblue': (65, 105, 225), + 'royalblue1': (72, 118, 255), + 'royalblue2': (67, 110, 238), + 'royalblue3': (58, 95, 205), + 'royalblue4': (39, 64, 139), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'salmon1': (255, 140, 105), + 'salmon2': (238, 130, 98), + 'salmon3': (205, 112, 84), + 'salmon4': (139, 76, 57), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seagreen1': (84, 255, 159), + 'seagreen2': (78, 238, 148), + 'seagreen3': (67, 205, 128), + 'seagreen4': (46, 139, 87), + 'seashell': (255, 245, 238), + 'seashell1': (255, 245, 238), + 'seashell2': (238, 229, 222), + 'seashell3': (205, 197, 191), + 'seashell4': (139, 134, 130), + 'sienna': (160, 82, 45), + 'sienna1': (255, 130, 71), + 'sienna2': (238, 121, 66), + 'sienna3': (205, 104, 57), + 'sienna4': (139, 71, 38), + 'skyblue': (135, 206, 235), + 'skyblue1': (135, 206, 255), + 'skyblue2': (126, 192, 238), + 'skyblue3': (108, 166, 205), + 'skyblue4': (74, 112, 139), + 'slateblue': (106, 90, 205), + 'slateblue1': (131, 111, 255), + 'slateblue2': (122, 103, 238), + 'slateblue3': (105, 89, 205), + 'slateblue4': (71, 60, 139), + 'slategray': (112, 128, 144), + 'slategray1': (198, 226, 255), + 'slategray2': (185, 211, 238), + 'slategray3': (159, 182, 205), + 'slategray4': (108, 123, 139), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'snow1': (255, 250, 250), + 'snow2': (238, 233, 233), + 'snow3': (205, 201, 201), + 'snow4': (139, 137, 137), + 'springgreen': (0, 255, 127), + 'springgreen1': (0, 255, 127), + 'springgreen2': (0, 238, 118), + 'springgreen3': (0, 205, 102), + 'springgreen4': (0, 139, 69), + 'steelblue': (70, 130, 180), + 'steelblue1': (99, 184, 255), + 'steelblue2': (92, 172, 238), + 'steelblue3': (79, 148, 205), + 'steelblue4': (54, 100, 139), + 'tan': (210, 180, 140), + 'tan1': (255, 165, 79), + 'tan2': (238, 154, 73), + 'tan3': (205, 133, 63), + 'tan4': (139, 90, 43), + 'thistle': (216, 191, 216), + 'thistle1': (255, 225, 255), + 'thistle2': (238, 210, 238), + 'thistle3': (205, 181, 205), + 'thistle4': (139, 123, 139), + 'tomato': (255, 99, 71), + 'tomato1': (255, 99, 71), + 'tomato2': (238, 92, 66), + 'tomato3': (205, 79, 57), + 'tomato4': (139, 54, 38), + 'transparent': (255, 255, 254), + 'turquoise': (64, 224, 208), + 'turquoise1': (0, 245, 255), + 'turquoise2': (0, 229, 238), + 'turquoise3': (0, 197, 205), + 'turquoise4': (0, 134, 139), + 'violet': (238, 130, 238), + 'violetred': (208, 32, 144), + 'violetred1': (255, 62, 150), + 'violetred2': (238, 58, 140), + 'violetred3': (205, 50, 120), + 'violetred4': (139, 34, 82), + 'wheat': (245, 222, 179), + 'wheat1': (255, 231, 186), + 'wheat2': (238, 216, 174), + 'wheat3': (205, 186, 150), + 'wheat4': (139, 126, 102), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellow1': (255, 255, 0), + 'yellow2': (238, 238, 0), + 'yellow3': (205, 205, 0), + 'yellow4': (139, 139, 0), + 'yellowgreen': (154, 205, 50), } re_nonword=re.compile(r'([^0-9a-zA-Z_.]+)') re_linewidth=re.compile(r'setlinewidth\((\d+(\.\d*)?|\.\d+)\)') diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py --- a/lib-python/2.7/distutils/command/build_ext.py +++ b/lib-python/2.7/distutils/command/build_ext.py @@ -188,7 +188,7 @@ # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'include')) + self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: diff --git a/lib-python/3/importlib/_bootstrap.py b/lib-python/3/importlib/_bootstrap.py --- a/lib-python/3/importlib/_bootstrap.py +++ b/lib-python/3/importlib/_bootstrap.py @@ -1496,7 +1496,7 @@ raise TypeError("module name must be str, not {}".format(type(name))) if level < 0: raise ValueError('level must be >= 0') - if package: + if level > 0: if not isinstance(package, str): raise TypeError("__package__ not set to a string") elif package not in sys.modules: diff --git a/lib-python/3/test/test_importlib/import_/test_relative_imports.py b/lib-python/3/test/test_importlib/import_/test_relative_imports.py --- a/lib-python/3/test/test_importlib/import_/test_relative_imports.py +++ b/lib-python/3/test/test_importlib/import_/test_relative_imports.py @@ -208,6 +208,11 @@ with self.assertRaises(KeyError): import_util.import_('sys', level=1) + def test_relative_import_no_package_exists_absolute(self): + with self.assertRaises(SystemError): + self.__import__('sys', {'__package__': '', '__spec__': None}, + level=1) + def test_main(): from test.support import run_unittest diff --git a/lib_pypy/_pypy_testcapi.py b/lib_pypy/_pypy_testcapi.py --- a/lib_pypy/_pypy_testcapi.py +++ b/lib_pypy/_pypy_testcapi.py @@ -8,6 +8,7 @@ content = fid.read() # from cffi's Verifier() key = '\x00'.join([sys.version[:3], content]) + key += 'cpyext-gc-support-2' # this branch requires recompilation! if sys.version_info >= (3,): key = key.encode('utf-8') k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) @@ -62,7 +63,7 @@ if sys.platform == 'win32': # XXX pyconfig.h uses a pragma to link to the import library, # which is currently python3.lib - library = os.path.join(thisdir, '..', 'include', 'python32') + library = os.path.join(thisdir, '..', 'libs', 'python32') if not os.path.exists(library + '.lib'): # For a local translation or nightly build library = os.path.join(thisdir, '..', 'pypy', 'goal', 'python32') diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO --- a/lib_pypy/cffi.egg-info/PKG-INFO +++ b/lib_pypy/cffi.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 1.5.0 +Version: 1.5.2 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py --- a/lib_pypy/cffi/__init__.py +++ b/lib_pypy/cffi/__init__.py @@ -4,8 +4,8 @@ from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "1.5.0" -__version_info__ = (1, 5, 0) +__version__ = "1.5.2" +__version_info__ = (1, 5, 2) # The verifier module file names are based on the CRC32 of a string that # contains the following version number. It may be older than __version__ diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h --- a/lib_pypy/cffi/_cffi_include.h +++ b/lib_pypy/cffi/_cffi_include.h @@ -231,6 +231,12 @@ ((got_nonpos) == (expected <= 0) && \ (got) == (unsigned long long)expected) +#ifdef MS_WIN32 +# define _cffi_stdcall __stdcall +#else +# define _cffi_stdcall /* nothing */ +#endif + #ifdef __cplusplus } #endif diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h new file mode 100644 --- /dev/null +++ b/lib_pypy/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.2" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py --- a/lib_pypy/cffi/api.py +++ b/lib_pypy/cffi/api.py @@ -544,28 +544,50 @@ def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # if '__pypy__' in sys.builtin_module_names: + import os + if sys.platform == "win32": + # we need 'libpypy-c.lib'. Current distributions of + # pypy (>= 4.1) contain it as 'libs/python27.lib'. + pythonlib = "python27" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'libs')) + else: + # we need 'libpypy-c.{so,dylib}', which should be by + # default located in 'sys.prefix/bin' for installed + # systems. + pythonlib = "pypy-c" + if hasattr(sys, 'prefix'): + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + # On uninstalled pypy's, the libpypy-c is typically found in + # .../pypy/goal/. if hasattr(sys, 'prefix'): - import os - libdir = os.path.join(sys.prefix, 'bin') - dirs = kwds.setdefault('library_dirs', []) - if libdir not in dirs: - dirs.append(libdir) - pythonlib = "pypy-c" + ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) else: if sys.platform == "win32": template = "python%d%d" - if sys.flags.debug: - template = template + '_d' + if hasattr(sys, 'gettotalrefcount'): + template += '_d' else: + try: + import sysconfig + except ImportError: # 2.6 + from distutils import sysconfig template = "python%d.%d" + if sysconfig.get_config_var('DEBUG_EXT'): + template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags - libraries = kwds.setdefault('libraries', []) - if pythonlib not in libraries: - libraries.append(pythonlib) + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') def set_source(self, module_name, source, source_extension='.c', **kwds): if hasattr(self, '_assigned_source'): @@ -631,7 +653,7 @@ compiled DLL. Use '*' to force distutils' choice, suitable for regular CPython C API modules. Use a file name ending in '.*' to ask for the system's default extension for dynamic libraries - (.so/.dll). + (.so/.dll/.dylib). The default is '*' when building a non-embedded C API extension, and (module_name + '.*') when building an embedded library. @@ -695,6 +717,10 @@ # self._embedding = pysource + def def_extern(self, *args, **kwds): + raise ValueError("ffi.def_extern() is only available on API-mode FFI " + "objects") + def _load_backend_lib(backend, name, flags): if name is None: diff --git a/lib_pypy/cffi/cparser.py b/lib_pypy/cffi/cparser.py --- a/lib_pypy/cffi/cparser.py +++ b/lib_pypy/cffi/cparser.py @@ -220,7 +220,7 @@ self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() - self._options = None + self._options = {} self._int_constants = {} self._recomplete = [] self._uses_new_feature = None @@ -374,7 +374,7 @@ def _declare_function(self, tp, quals, decl): tp = self._get_type_pointer(tp, quals) - if self._options['dllexport']: + if self._options.get('dllexport'): tag = 'dllexport_python ' elif self._inside_extern_python: tag = 'extern_python ' @@ -450,7 +450,7 @@ prevobj, prevquals = self._declarations[name] if prevobj is obj and prevquals == quals: return - if not self._options['override']: + if not self._options.get('override'): raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) @@ -729,7 +729,7 @@ if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) - tp.packed = self._options['packed'] + tp.packed = self._options.get('packed') if tp.completed: # must be re-completed: it is not opaque any more tp.completed = 0 self._recomplete.append(tp) diff --git a/lib_pypy/cffi/ffiplatform.py b/lib_pypy/cffi/ffiplatform.py --- a/lib_pypy/cffi/ffiplatform.py +++ b/lib_pypy/cffi/ffiplatform.py @@ -21,14 +21,12 @@ allsources.append(os.path.normpath(src)) return Extension(name=modname, sources=allsources, **kwds) -def compile(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def compile(tmpdir, ext, compiler_verbose=0): """Compile a C extension module using distutils.""" saved_environ = os.environ.copy() try: - outputfilename = _build(tmpdir, ext, compiler_verbose, - target_extension, embedding) + outputfilename = _build(tmpdir, ext, compiler_verbose) outputfilename = os.path.abspath(outputfilename) finally: # workaround for a distutils bugs where some env vars can @@ -38,32 +36,7 @@ os.environ[key] = value return outputfilename -def _save_val(name): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - return config_vars.get(name, Ellipsis) - -def _restore_val(name, value): - import distutils.sysconfig - config_vars = distutils.sysconfig.get_config_vars() - config_vars[name] = value - if value is Ellipsis: - del config_vars[name] - -def _win32_hack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - if not hasattr(MSVCCompiler, '_remove_visual_c_ref_CFFI_BAK'): - MSVCCompiler._remove_visual_c_ref_CFFI_BAK = \ - MSVCCompiler._remove_visual_c_ref - MSVCCompiler._remove_visual_c_ref = lambda self,manifest_file: manifest_file - -def _win32_unhack_for_embedding(): - from distutils.msvc9compiler import MSVCCompiler - MSVCCompiler._remove_visual_c_ref = \ - MSVCCompiler._remove_visual_c_ref_CFFI_BAK - -def _build(tmpdir, ext, compiler_verbose=0, target_extension=None, - embedding=False): +def _build(tmpdir, ext, compiler_verbose=0): # XXX compact but horrible :-( from distutils.core import Distribution import distutils.errors, distutils.log @@ -76,25 +49,14 @@ options['build_temp'] = ('ffiplatform', tmpdir) # try: - if sys.platform == 'win32' and embedding: - _win32_hack_for_embedding() old_level = distutils.log.set_threshold(0) or 0 - old_SO = _save_val('SO') - old_EXT_SUFFIX = _save_val('EXT_SUFFIX') try: - if target_extension is not None: - _restore_val('SO', target_extension) - _restore_val('EXT_SUFFIX', target_extension) distutils.log.set_verbosity(compiler_verbose) dist.run_command('build_ext') cmd_obj = dist.get_command_obj('build_ext') [soname] = cmd_obj.get_outputs() finally: distutils.log.set_threshold(old_level) - _restore_val('SO', old_SO) - _restore_val('EXT_SUFFIX', old_EXT_SUFFIX) - if sys.platform == 'win32' and embedding: - _win32_unhack_for_embedding() except (distutils.errors.CompileError, distutils.errors.LinkError) as e: raise VerificationError('%s: %s' % (e.__class__.__name__, e)) diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py --- a/lib_pypy/cffi/recompiler.py +++ b/lib_pypy/cffi/recompiler.py @@ -1170,6 +1170,8 @@ repr_arguments = ', '.join(arguments) repr_arguments = repr_arguments or 'void' name_and_arguments = '%s(%s)' % (name, repr_arguments) + if tp.abi == "__stdcall": + name_and_arguments = '_cffi_stdcall ' + name_and_arguments # def may_need_128_bits(tp): return (isinstance(tp, model.PrimitiveType) and @@ -1357,6 +1359,58 @@ parts[-1] += extension return os.path.join(outputdir, *parts), parts + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, c_file=None, source_extension='.c', extradir=None, compiler_verbose=1, target=None, **kwds): @@ -1382,36 +1436,22 @@ target = '%s.*' % module_name else: target = '*' - if target == '*': - target_module_name = module_name - target_extension = None # use default - else: - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - else: - target += '.so' - # split along the first '.' (not the last one, otherwise the - # preceeding dots are interpreted as splitting package names) - index = target.find('.') - if index < 0: - raise ValueError("target argument %r should be a file name " - "containing a '.'" % (target,)) - target_module_name = target[:index] - target_extension = target[index:] # - ext = ffiplatform.get_extension(ext_c_file, target_module_name, **kwds) + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) updated = make_c_source(ffi, module_name, preamble, c_file) if call_c_compiler: + patchlist = [] cwd = os.getcwd() try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, compiler_verbose, - target_extension, - embedding=embedding) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) finally: os.chdir(cwd) + _unpatch_meths(patchlist) return outputfilename else: return ext, updated diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py --- a/lib_pypy/cffi/vengine_cpy.py +++ b/lib_pypy/cffi/vengine_cpy.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, imp from . import model, ffiplatform diff --git a/lib_pypy/cffi/vengine_gen.py b/lib_pypy/cffi/vengine_gen.py --- a/lib_pypy/cffi/vengine_gen.py +++ b/lib_pypy/cffi/vengine_gen.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os import types diff --git a/lib_pypy/cffi/verifier.py b/lib_pypy/cffi/verifier.py --- a/lib_pypy/cffi/verifier.py +++ b/lib_pypy/cffi/verifier.py @@ -1,3 +1,6 @@ +# +# DEPRECATED: implementation for ffi.verify() +# import sys, os, binascii, shutil, io from . import __version_verifier_modules__ from . import ffiplatform diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -41,10 +41,13 @@ "faulthandler", ]) -if ((sys.platform.startswith('linux') or sys.platform == 'darwin') - and os.uname()[4] == 'x86_64' and sys.maxint > 2**32): - # it's not enough that we get x86_64 - working_modules.add('_vmprof') +from rpython.jit.backend import detect_cpu +try: + if detect_cpu.autodetect().startswith('x86'): + working_modules.add('_vmprof') +except detect_cpu.ProcessorAutodetectError: + pass + translation_modules = default_modules.copy() translation_modules.update([ @@ -171,9 +174,6 @@ cmdline="--translationmodules", suggests=[("objspace.allworkingmodules", False)]), - BoolOption("usepycfiles", "Write and read pyc files when importing", - default=True), - StrOption("soabi", "Tag to differentiate extension modules built for different Python interpreters", cmdline="--soabi", diff --git a/pypy/doc/discussion/rawrefcount.rst b/pypy/doc/discussion/rawrefcount.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/discussion/rawrefcount.rst @@ -0,0 +1,158 @@ +====================== +Rawrefcount and the GC +====================== + + From pypy.commits at gmail.com Sun Feb 28 20:27:45 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 28 Feb 2016 17:27:45 -0800 (PST) Subject: [pypy-commit] pypy py3k: Fix C API test. See comment for details. Message-ID: <56d39e91.13821c0a.f325a.ffff9d65@mx.google.com> Author: Manuel Jacob Branch: py3k Changeset: r82607:088e4a9efe72 Date: 2016-02-29 02:24 +0100 http://bitbucket.org/pypy/pypy/changeset/088e4a9efe72/ Log: Fix C API test. See comment for details. Thanks to Ronan and Armin for discussion and the suggestion to mask out bits. It would also be possible to mask out bits in the Py_REFCNT macro. There is a discussion whether this should be done (also on default). In this case this changeset wouldn't be needed and should be backed out. diff --git a/lib-python/3/test/test_capi.py b/lib-python/3/test/test_capi.py --- a/lib-python/3/test/test_capi.py +++ b/lib-python/3/test/test_capi.py @@ -201,7 +201,10 @@ # Bug #6012 class Test6012(unittest.TestCase): def test(self): - self.assertEqual(_testcapi.argparsing("Hello", "World"), 1) + # PyPy change: Mask out higher bits of reference count. PyPy increases + # the reference count by a high number if the object is linked to a + # PyPy object. + self.assertEqual(_testcapi.argparsing("Hello", "World") & 0xfffffff, 1) class EmbeddingTest(unittest.TestCase): From pypy.commits at gmail.com Sun Feb 28 20:27:47 2016 From: pypy.commits at gmail.com (mjacob) Date: Sun, 28 Feb 2016 17:27:47 -0800 (PST) Subject: [pypy-commit] pypy py3.3: hg merge py3k Message-ID: <56d39e93.0bdf1c0a.a5e6.ffff9d9b@mx.google.com> Author: Manuel Jacob Branch: py3.3 Changeset: r82608:f4302aeafbb3 Date: 2016-02-29 02:26 +0100 http://bitbucket.org/pypy/pypy/changeset/f4302aeafbb3/ Log: hg merge py3k diff --git a/lib-python/3/test/test_capi.py b/lib-python/3/test/test_capi.py --- a/lib-python/3/test/test_capi.py +++ b/lib-python/3/test/test_capi.py @@ -213,7 +213,10 @@ # Bug #6012 class Test6012(unittest.TestCase): def test(self): - self.assertEqual(_testcapi.argparsing("Hello", "World"), 1) + # PyPy change: Mask out higher bits of reference count. PyPy increases + # the reference count by a high number if the object is linked to a + # PyPy object. + self.assertEqual(_testcapi.argparsing("Hello", "World") & 0xfffffff, 1) class EmbeddingTest(unittest.TestCase): diff --git a/pypy/module/_continuation/test/test_zpickle.py b/pypy/module/_continuation/test/test_zpickle.py --- a/pypy/module/_continuation/test/test_zpickle.py +++ b/pypy/module/_continuation/test/test_zpickle.py @@ -1,6 +1,9 @@ import py +py.test.skip("XXX: crashes: https://bitbucket.org/pypy/pypy/issue/1773") + + class AppTestCopy: spaceconfig = dict(usemodules=['_continuation'], continuation=True) @@ -215,7 +218,6 @@ ''', mod.__dict__) def test_pickle_continulet_real_subclass(self): - skip("XXX: triggers a crash: https://bitbucket.org/pypy/pypy/issue/1773") import types, sys mod = types.ModuleType('test_pickle_continulet_real_subclass') sys.modules['test_pickle_continulet_real_subclass'] = mod From pypy.commits at gmail.com Sun Feb 28 23:45:51 2016 From: pypy.commits at gmail.com (mattip) Date: Sun, 28 Feb 2016 20:45:51 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: add test, passes -A, fails untranslated Message-ID: <56d3ccff.a151c20a.cf0a8.fffffa18@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82609:22fa8dfd0a2d Date: 2016-02-28 23:43 -0500 http://bitbucket.org/pypy/pypy/changeset/22fa8dfd0a2d/ Log: add test, passes -A, fails untranslated diff --git a/pypy/module/cpyext/test/test_stringobject.py b/pypy/module/cpyext/test/test_stringobject.py --- a/pypy/module/cpyext/test/test_stringobject.py +++ b/pypy/module/cpyext/test/test_stringobject.py @@ -89,7 +89,26 @@ assert len(s) == 4 assert s == 'ab\x00c' - + def test_string_tp_alloc(self): + module = self.import_extension('foo', [ + ("getstring", "METH_NOARGS", + """ + PyObject *base; + PyTypeObject * type; + PyStringObject *obj; + char * p_str; + base = PyString_FromString("test"); + type = base->ob_type; + obj = (PyStringObject*)type->tp_alloc(type, 10); + if (PyString_GET_SIZE(obj) == 0) + return PyLong_FromLong(-1); + memcpy(PyString_AS_STRING(obj), "works", 6); + Py_INCREF(obj); + return (PyObject*)obj; + """), + ]) + s = module.getstring() + assert s == 'works\x00\x00\x00\x00\x00' def test_AsString(self): module = self.import_extension('foo', [ From pypy.commits at gmail.com Mon Feb 29 04:52:06 2016 From: pypy.commits at gmail.com (fijal) Date: Mon, 29 Feb 2016 01:52:06 -0800 (PST) Subject: [pypy-commit] extradoc extradoc: add the result of discussions from leysin (unedited) Message-ID: <56d414c6.465ec20a.98f88.2707@mx.google.com> Author: fijal Branch: extradoc Changeset: r5612:de191b0da0b9 Date: 2016-02-29 10:51 +0100 http://bitbucket.org/pypy/extradoc/changeset/de191b0da0b9/ Log: add the result of discussions from leysin (unedited) diff --git a/planning/sprint-leysin-2016-notes.rst b/planning/sprint-leysin-2016-notes.rst new file mode 100644 --- /dev/null +++ b/planning/sprint-leysin-2016-notes.rst @@ -0,0 +1,323 @@ +Tasks +===== + +- mercurial benchmarks on PyPy runner exists, some benchmarks +- mercurial porting C extensions to cffi MORE PROGRESS (fijal) +- fix multiple inheritance resolution in cpyext (arigo, cfbolz around) +- py3k work FIXING EVEN MORE TESTS, MERGED DEFAULT (AGAIN) (manuel, ronan) +- register allocator, more information is now available, FIRST PROTOTYPE (remi, richard if remi has time), created an issue +- clean up project lists (ronan, fijal) +- test optimizeopt chain with hypothesis (cfbolz, fijal to discuss) +- try fix speed center (richard, fijal to get him access), created issue +- go skiing (marmoute) +- go shopping +- turn won't manage into issues (all) +- start with new binary jit-log-opt (richard, fijal) +- fixing stm (remi) +- fix buffer API (arigo, fijal) + + + + +won't manage +-------------------- + +- VMProf on OS X, fix bugs (can't reproduce) +- jit leaner frontend +- live ranges in JIT viewer +- fix logging to not take time MESS +- continuing to refactoring annotator +- add warmup time to VMProf +- use probablistic data structures for guard_value, WE HAVE A PLAN +- single-run branch progress +- update setup.py and upload rpython to pip + + +done: +--------- +- dict-proxy with cpyext DONE +- fix bug in FixedSizeArray DONE +- compress resume data more, play with hypothesis (cfbolz, arigo, fijal) DONE +- maps reordering DONE +- take funding calls off the website, write blog post DONE +- fix lxml on cpyext-gc-support-2 (ronan, arigo) DONE, MERGED +- apply vmprof to a non pypy lang (cfbolz, fijal around) DONE +- talk benchmark statistics (cfbolz, mattip, ronan) DONE +- merging default into stm (remi) LESS MESS, MERGING DONE +- cpyext-gc-support-2 blog post (mattip, arigo) DONE +- get data about test duration DONE +- start a bit of a dev process document +- merging cpyext-ext, numpy-on-cpyext NEXT NEXT SEGFAULT, IMPORTS NUMPY WITH ONE HACK +- fix tests +- a script to upload to bitbucket IN PROGRESS +- have a test in rpython that checks against imports from pypy (cfbolz) +- make snowperson (cfbolz, fijal) +- general wizardry (cfbolz, arigo, samuele not around) + + +too many bridges +------------------------- + +Problems: + - pypy py.test is slow + - most bridges come from guard_value(map) (then guard_class) + +Steps: + - detect the situation (cardinality estimation) + - trace a general version + - look at all promotes in pypy, to see whether the general version is good + - in particular, we need to general version for maps + - make maps give up if the object is too big + +Research: + - how to deal with method invocations of the same method on different classes + - + + +Python3 +========= + - add more rposix features, use less replacements of os.XXX + - merge py3.3 -> py3k and create py3.5 + - solve the speed issue + - utf8 & unicode problems + - list of things we suspect are slow on pypy3k: + * unicode & utf8 strings and dictionaries of those strings, potential solution + is not to use rpython unicode type + * itertools stuff is slower than python 3 + - manuel & ronan go and work and SFC + - what to do with crowdfunding + + +Idea around Mercurial +================== +(notes about "new" feature that could be useful in pypy + +- clone bundle, +- share.pool, +- people version, + + + +summer of code +============= + +- volunteers from the pypy side: fijal, ronan, richard, remi, backup: armin +- looking for students: richard, remi +- unicode stuff as project + + + +cpyext+numpy +============ + +- two approaches: + - micronumpy: basically works, but no story for cpyext, bit of a dead end + - using numpy code with cpyext, with hooks into micronumpy + +- safe (but maybe slow) default, everything just works +- hard part: hijack some of the functionality and replace it with micronumpy code +- ––> Bucharest? + + + +tooling +======= + +technical problems: +- too many tools (vmprof, jitviewer, stmlog) +- too many output formats (vmprof, jit-log-opt, stmlog*2) +- jit-log-opt output format is brittle +- parsing debug_merge_point is brittle +- not good fallbacks +- a lot of pypy-specific +- identifying traces is not unique + + +consolidation goals: +- better format for jit-log-opt (keeping a way to show the old ascii output) +- having a programmatic way to turn on trace dumps +- combining vmprof/jitviewer +- documentation/tutorial + +future cool features: +- memory +- warmup time +- extensible events +- web app changes respectively +- navigation in jitviewer +- way to compare runs +- rpython functions where ops are coming from +- threading and forking support + + +volunteers: +- Maciek +- Matti +- Richard +- Sebastian + + +steps: +- collect interesting examples +- embed jit-log-opt into vmprof-file +- web stuff + +buildbot: +- script/url to start/stop master +- account for matti + + + + +unstucking benchmarking +==================== + +problems: +- py3k what benchmarks are there, where would we run them (and store the results) +- split benchmark running +- comparisons are broken (javascript exception) +- old version with custom hacks that are not backed up?? +- access to raw data +- store all the raw data +- benchmarks are too quick on jit / too slow on interp +- non consistent approach to warmup +- we don't have errors +- what to do with historical data +- what to do with branch data + +simple steps to improve the situation: +- revive single run branch +- fix comparison (simple if you know JS) +- add an api to get the data +- upload json files to buildbot + +harder steps to improve the situation: +- idea: tooling sprint +- move to new machine +- rerun benchmarks +- upgrade benchmarks (particularly the libraries) +- larger bechmarks +- make unreliable benchmarks reliable +- automatic slowdown reporting + + +volunteer: +- fijal?, cfbolz?, arigo? +- start a bit during the sprint (Thursday) + + +code quality & failing tests +===================== + +problems: +- tests fail for too long +- general instability of recent releases (mostly the fault of unrolling) +- some non-modular impenetrable code: + - ll2ctypes + - unroll + - cpyext + - structure of the jit optimizing chain +- tests are slow + +solutions: +- ll2ctypes: use cffi (see other discussion) +- unroll: reducing features is the only idea we currently have +- on the process level: + - release candidates + - RC PPAs? + - don't merge default into release branch + - be more principled about bugfix releases + - do the bugfix also on the latest release branch + - reduce the overhead of doing bugfix releases: + - look into automated bitbucket uploading +- use hypothesis more! +- run our own tests on pypy! +- run tests in parallel + +Bitbucket related questions +==================== +Bitbucket: +- "We are not unhappy with bitbucket; Much better than anything we have before" +- API to upload binary [question asked] +- limited bandwidth to upload +- limited bandwidth to download +- push speed +- clone speed [cloning under a minute on the way] +- email notification "not usable": [improvement planned] + - a mail per push (not per commit) + - format → trimmed log message//trimmed diff. + - "From committer" wanted. +- blocking --force (prevent multiple heads) [on their roadmap] +- Comment on random commit/pull request. + +rffi discussion +========== + +what do we want in the end: +1) an interface like cffi used at the interpreter level and in rpython/*. +2) rtyping, gctransformer use lltype objects + +problems: + +- interface & implementation of ll2ctypes +- difference between translated pypy and test env +- deprecated api (rawffi, rawffi_alt) +- no special support for rffi in the annotator?? (seems unclear) + +how do we go forward: + +- create small examples (e.g. crypt module) that use cffi for testing and at the later point in see + how we can support full translation. + rffi.llexternal -> variants that release the gil, some don't. how do we readd the possibility + of doing the same using cffi? + +Example: + +sandbox_safe: where do we put the flag so that the annotator understands that? +- preprocess step in cdef + common agreement to use pragma to define this flags (e.g + #pragma sandboxmode on -> off + +volunteer for the first small module: +maybe scope for gsoc? manuel after merging py3.3 + + +Numpy Hijacking +------------------------ + +Start over with a different module that uses the multiarray type from numpy instead of W_NDimArray +Make it use indexing for the first step, start copying methods from micronumpy +use raw_virtual + + +a fast trace hook +------------------------ + + + +support virtualenvs natively +--------------------------------------- + * needed to implement venv module on Python 3.3+ + * consider backporting to 2.7 to help virtualenv + * poke Donal to mail rationale to pypy-dev + + +STM +-------- + +Problems: + - performance + - what kind of conflicts are reasonable? + - how many conflicts are still ok? + - very slow warmup + - too many major collections + - what's the overhead of tiny transactions? + - need more data! + - maybe shorter transactions? + - measure, measure, measure + - talk to Intel + +ideas: + - find an application that we can speed up + - write a framework for that + - try to find a real-world something From pypy.commits at gmail.com Mon Feb 29 06:32:14 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 29 Feb 2016 03:32:14 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: update the values Message-ID: <56d42c3e.6672c20a.1077d.ffff8297@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r711:74d29efbfbe9 Date: 2016-02-29 12:32 +0100 http://bitbucket.org/pypy/pypy.org/changeset/74d29efbfbe9/ Log: update the values diff --git a/don1.html b/don1.html --- a/don1.html +++ b/don1.html @@ -9,13 +9,13 @@ - $62936 of $105000 (59.9%) + $62984 of $105000 (60.0%)
    @@ -23,7 +23,7 @@
  • diff --git a/don4.html b/don4.html --- a/don4.html +++ b/don4.html @@ -17,7 +17,7 @@ 2nd call: - $30393 of $80000 (38.0%) + $30403 of $80000 (38.0%)
    @@ -25,7 +25,7 @@
  • From pypy.commits at gmail.com Mon Feb 29 08:31:27 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 29 Feb 2016 05:31:27 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: removed some old files that where moved in the last commit Message-ID: <56d4482f.d30e1c0a.e0020.79ff@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82611:4560bc0454eb Date: 2016-02-29 14:30 +0100 http://bitbucket.org/pypy/pypy/changeset/4560bc0454eb/ Log: removed some old files that where moved in the last commit diff --git a/rpython/jit/backend/llsupport/gcstress/__init__.py b/rpython/jit/backend/llsupport/gcstress/__init__.py deleted file mode 100644 diff --git a/rpython/jit/backend/llsupport/gcstress/code.py b/rpython/jit/backend/llsupport/gcstress/code.py deleted file mode 100644 --- a/rpython/jit/backend/llsupport/gcstress/code.py +++ /dev/null @@ -1,160 +0,0 @@ - -import struct - -class ByteCode(object): - def encode(self, ctx): - ctx.append_byte(self.BYTE_CODE) - -_c = 0 - -LIST_TYP = 'l' -INT_TYP = 'i' -OBJ_TYP = 'o' -STR_TYP = 's' -VAL_TYP = 'v' # either one of the earlier - -def unique_code(): - global _c - v = _c - _c = v + 1 - return v - -class Context(object): - def __init__(self): - self.consts = {} - self.const_idx = 0 - self.bytecode = [] - - def append_byte(self, byte): - self.bytecode.append(('b', byte)) - - def get_byte(self, i): - typ, byte = self.bytecode[i] - assert typ == 'b' - return byte - - def get_short(self, i): - typ, int = self.bytecode[i] - assert typ == 'h' - return int - - def append_short(self, byte): - self.bytecode.append(('h', byte)) - - def append_int(self, byte): - self.bytecode.append(('i', byte)) - - def const_str(self, str): - self.consts[self.const_idx] = str - self.append_short(self.const_idx) - self.const_idx += 1 - - def to_string(self): - code = [] - for typ, nmr in self.bytecode: - code.append(struct.pack(typ, nmr)) - return ''.join(code) - -def requires_stack(*types): - def method(clazz): - clazz._stack_types = tuple(types) - return clazz - - return method - - at requires_stack() -class CondJump(ByteCode): - BYTE_CODE = unique_code() - - COND_EQ = 0 - COND_LT = 1 - COND_GT = 2 - COND_LE = 3 - COND_GE = 4 - - def __init__(self, cond): - self.cond = cond - def encode(self, ctx): - ctx.append_byte(self.BYTE_CODE) - ctx.append_byte(self.cond) - - at requires_stack() -class Jump(ByteCode): - BYTE_CODE = unique_code() - def __init__(self): - pass - - at requires_stack() -class LoadStr(ByteCode): - BYTE_CODE = unique_code() - def __init__(self, string): - self.string = string - def encode(self, ctx): - ctx.append_byte(self.BYTE_CODE) - ctx.const_str(self.string) - - at requires_stack(STR_TYP, STR_TYP) -class AddStr(ByteCode): - BYTE_CODE = unique_code() - def __init__(self): - pass - - at requires_stack(LIST_TYP, LIST_TYP) -class AddList(ByteCode): - BYTE_CODE = unique_code() - def __init__(self): - pass - - at requires_stack() -class CreateList(ByteCode): - BYTE_CODE = unique_code() - def __init__(self, size=8): - self.size = size - def encode(self, ctx): - ctx.append_byte(self.BYTE_CODE) - ctx.append_short(self.size) - - at requires_stack() -class PutInt(ByteCode): - BYTE_CODE = unique_code() - def __init__(self, value): - self.integral = value - def encode(self, ctx): - ctx.append_byte(self.BYTE_CODE) - ctx.append_short(self.integral) - - at requires_stack(LIST_TYP, INT_TYP, VAL_TYP) -class InsertList(ByteCode): - BYTE_CODE = unique_code() - def __init__(self, index): - self.index = index - def encode(self, ctx): - ctx.append_byte(self.BYTE_CODE) - ctx.append_int(self.index) - - at requires_stack(LIST_TYP, INT_TYP) -class DelList(ByteCode): - BYTE_CODE = unique_code() - def __init__(self, index): - self.index = index - def encode(self, ctx): - ctx.append_byte(self.BYTE_CODE) - ctx.append_int(self.index) - - at requires_stack(LIST_TYP, INT_TYP, VAL_TYP) -class AppendList(ByteCode): - BYTE_CODE = unique_code() - def __init__(self): - pass - - at requires_stack(LIST_TYP) -class LenList(ByteCode): - BYTE_CODE = unique_code() - def __init__(self): - self.required_stack('l') - - at requires_stack(INT_TYP, INT_TYP) -class CompareInt(ByteCode): - BYTE_CODE = unique_code() - def __init__(self): - pass diff --git a/rpython/jit/backend/llsupport/gcstress/interp.py b/rpython/jit/backend/llsupport/gcstress/interp.py deleted file mode 100644 --- a/rpython/jit/backend/llsupport/gcstress/interp.py +++ /dev/null @@ -1,23 +0,0 @@ -class W_Root(object): - pass - -class W_ListObject(W_Root): - def __init__(self): - self.items = [] - -def entry_point(argv): - pass - #bytecode = argv[0] - #pc = 0 - #end = len(bytecode) - #stack = Stack(512) - #while i < end: - # opcode = ord(bytecode[i]) - # if opcode == 0x0: - # stack.push(space.new_list()) - # elif opcode == 0x1: - # w_elem = stack.pop() - # w_list = stack.pick(0) - # space.list_append(w_list, w_elem) - # i += 1 - #return 0 diff --git a/rpython/jit/backend/llsupport/gcstress/stack.py b/rpython/jit/backend/llsupport/gcstress/stack.py deleted file mode 100644 --- a/rpython/jit/backend/llsupport/gcstress/stack.py +++ /dev/null @@ -1,55 +0,0 @@ -from rpython.rlib.jit import JitDriver, hint, dont_look_inside, promote - -class Stack(object): - _virtualizable_ = ['stackpos', 'stack[*]'] - - def __init__(self, size): - self = hint(self, access_directly=True, fresh_virtualizable=True) - self.stack = [0] * size - self.stackpos = 0 # always store a known-nonneg integer here - - def append(self, elem): - self.stack[self.stackpos] = elem - self.stackpos += 1 - - def pop(self): - stackpos = self.stackpos - 1 - if stackpos < 0: - raise IndexError - self.stackpos = stackpos # always store a known-nonneg integer here - return self.stack[stackpos] - - def pick(self, i): - n = self.stackpos - i - 1 - assert n >= 0 - self.append(self.stack[n]) - - def put(self, i): - elem = self.pop() - n = self.stackpos - i - 1 - assert n >= 0 - self.stack[n] = elem - - @dont_look_inside - def roll(self, r): - if r < -1: - i = self.stackpos + r - if i < 0: - raise IndexError - n = self.stackpos - 1 - assert n >= 0 - elem = self.stack[n] - for j in range(self.stackpos - 2, i - 1, -1): - assert j >= 0 - self.stack[j + 1] = self.stack[j] - self.stack[i] = elem - elif r > 1: - i = self.stackpos - r - if i < 0: - raise IndexError - elem = self.stack[i] - for j in range(i, self.stackpos - 1): - self.stack[j] = self.stack[j + 1] - n = self.stackpos - 1 - assert n >= 0 - self.stack[n] = elem diff --git a/rpython/jit/backend/llsupport/gcstress/test/__init__.py b/rpython/jit/backend/llsupport/gcstress/test/__init__.py deleted file mode 100644 diff --git a/rpython/jit/backend/llsupport/gcstress/test/test_interp.py b/rpython/jit/backend/llsupport/gcstress/test/test_interp.py deleted file mode 100644 --- a/rpython/jit/backend/llsupport/gcstress/test/test_interp.py +++ /dev/null @@ -1,22 +0,0 @@ - -from rpython.jit.backend.llsupport.gcstress import code - -class TestByteCode(object): - def test_load_str(self): - c = code.Context() - code.LoadStr("hello world").encode(c) - assert c.consts[0] == "hello world" - assert c.get_byte(0) == code.LoadStr.BYTE_CODE - assert c.get_short(1) == 0 - - def test_str_add(self): - c = code.Context() - code.LoadStr("hello").encode(c) - code.LoadStr("world").encode(c) - code.AddStr().encode(c) - assert len(c.consts) == 2 - assert c.get_byte(4) == code.AddStr.BYTE_CODE - assert c.get_short(3) == 1 - -class TestInterp(object): - pass diff --git a/rpython/jit/backend/llsupport/gcstress/test/zrpy_gc_hypo_test.py b/rpython/jit/backend/llsupport/gcstress/test/zrpy_gc_hypo_test.py deleted file mode 100644 --- a/rpython/jit/backend/llsupport/gcstress/test/zrpy_gc_hypo_test.py +++ /dev/null @@ -1,33 +0,0 @@ -from rpython.jit.backend.detect_cpu import getcpuclass -from rpython.jit.tool.oparser import parse -from rpython.jit.metainterp.history import JitCellToken, NoStats -from rpython.jit.metainterp.history import BasicFinalDescr, BasicFailDescr -from rpython.jit.metainterp.gc import get_description -from rpython.jit.metainterp.optimize import SpeculativeError -from rpython.annotator.listdef import s_list_of_strings -from rpython.rtyper.lltypesystem import lltype, llmemory, rffi -from rpython.rtyper.rclass import getclassrepr, getinstancerepr -from rpython.translator.unsimplify import call_initial_function -from rpython.translator.translator import TranslationContext -from rpython.translator.c import genc -from rpython.jit.backend.llsupport.gcstress import interp - -class GCHypothesis(object): - def setup_class(self): - t = TranslationContext() - t.config.translation.gc = "incminimark" - t.config.translation.gcremovetypeptr = True - ann = t.buildannotator() - ann.build_types(interp.entry_point, [s_list_of_strings], main_entry_point=True) - rtyper = t.buildrtyper() - rtyper.specialize() - - cbuilder = genc.CStandaloneBuilder(t, f, t.config) - cbuilder.generate_source(defines=cbuilder.DEBUG_DEFINES) - cbuilder.compile() - - import pdb; pdb.set_trace() - - - def test_void(self): - pass From pypy.commits at gmail.com Mon Feb 29 08:28:42 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 29 Feb 2016 05:28:42 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: renamed module, first hypothesis test that ensures that each interp function works as expected Message-ID: <56d4478a.aa17c20a.8b68b.6dba@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82610:061da05db893 Date: 2016-02-29 14:27 +0100 http://bitbucket.org/pypy/pypy/changeset/061da05db893/ Log: renamed module, first hypothesis test that ensures that each interp function works as expected diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -94,5 +94,6 @@ ^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$ ^compiled ^.git/ +^.hypothesis/ ^release/ ^rpython/_cache$ diff --git a/rpython/jit/backend/llsupport/tl/__init__.py b/rpython/jit/backend/llsupport/tl/__init__.py new file mode 100644 diff --git a/rpython/jit/backend/llsupport/tl/code.py b/rpython/jit/backend/llsupport/tl/code.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/tl/code.py @@ -0,0 +1,219 @@ + +import struct + +class ByteCode(object): + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + + @classmethod + def create_from(self, draw, get_strategy_for): + pt = getattr(self.__init__, '_param_types', []) + return self(*[draw(get_strategy_for(t)) for t in pt]) + +_c = 0 + +LIST_TYP = 'l' +INT_TYP = 'i' +SHORT_TYP = 'h' +BYTE_TYP = 'b' +OBJ_TYP = 'o' +STR_TYP = 's' +COND_TYP = 'c' +VAL_TYP = 'v' # either one of the earlier + +all_types = [INT_TYP, LIST_TYP, STR_TYP] + + +def unique_code(): + global _c + v = _c + _c = v + 1 + return v + +class Context(object): + def __init__(self): + self.consts = {} + self.const_idx = 0 + self.bytecode = [] + + def append_byte(self, byte): + self.bytecode.append(('b', byte)) + + def get_byte(self, i): + typ, byte = self.bytecode[i] + assert typ == 'b' + return byte + + def get_short(self, i): + typ, int = self.bytecode[i] + assert typ == 'h' + return int + + def append_short(self, byte): + self.bytecode.append(('h', byte)) + + def append_int(self, byte): + self.bytecode.append(('i', byte)) + + def const_str(self, str): + self.consts[self.const_idx] = str + self.append_short(self.const_idx) + self.const_idx += 1 + + def to_string(self): + code = [] + for typ, nmr in self.bytecode: + code.append(struct.pack(typ, nmr)) + return ''.join(code) + + def transform(self, code_objs): + for code_obj in code_objs: + code_obj.encode(self) + + return self.to_string(), self.consts + + +def requires_stack(*types): + def method(clazz): + clazz._stack_types = tuple(types) + return clazz + return method + +def leaves_on_stack(*types): + def method(clazz): + clazz._return_on_stack_types = tuple(types) + return clazz + return method + + +def requires_param(*types): + def method(m): + m._param_types = tuple(types) + return m + return method + + at requires_stack() + at leaves_on_stack(INT_TYP) +class PutInt(ByteCode): + BYTE_CODE = unique_code() + @requires_param(INT_TYP) + def __init__(self, value): + self.integral = value + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + ctx.append_int(self.integral) + + at requires_stack(INT_TYP, INT_TYP) + at leaves_on_stack(INT_TYP) +class CompareInt(ByteCode): + BYTE_CODE = unique_code() + def __init__(self): + pass + + at requires_stack() + at leaves_on_stack(STR_TYP) +class LoadStr(ByteCode): + BYTE_CODE = unique_code() + @requires_param(STR_TYP) + def __init__(self, string): + self.string = string + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + ctx.const_str(self.string) + + at requires_stack(STR_TYP, STR_TYP) + at leaves_on_stack(STR_TYP) +class AddStr(ByteCode): + BYTE_CODE = unique_code() + def __init__(self): + pass + +# remove comment one by one! + +#@requires_stack() +#@leaves_on_stack(INT_TYP) +#class CondJump(ByteCode): +# BYTE_CODE = unique_code() +# +# COND_EQ = 0 +# COND_LT = 1 +# COND_GT = 2 +# COND_LE = 3 +# COND_GE = 4 +# +# @requires_param(COND_TYP) +# def __init__(self, cond): +# self.cond = cond +# +# def encode(self, ctx): +# ctx.append_byte(self.BYTE_CODE) +# ctx.append_byte(self.cond) +# +#@requires_stack() +#@leaves_on_stack() +#class Jump(ByteCode): +# BYTE_CODE = unique_code() +# def __init__(self): +# pass +# + +# +#@requires_stack(LIST_TYP, LIST_TYP) +#@leaves_on_stack(LIST_TYP) +#class AddList(ByteCode): +# BYTE_CODE = unique_code() +# def __init__(self): +# pass +# +#@requires_stack() +#class CreateList(ByteCode): +# BYTE_CODE = unique_code() +# @requires_param(BYTE_TYP) +# def __init__(self, size=8): +# self.size = size +# def encode(self, ctx): +# ctx.append_byte(self.BYTE_CODE) +# ctx.append_short(self.size) +# +#@requires_stack(LIST_TYP, INT_TYP, INT_TYP) # TODO VAL_TYP +#class InsertList(ByteCode): +# BYTE_CODE = unique_code() +# @requires_param(INT_TYP) +# def __init__(self, index): +# self.index = index +# def encode(self, ctx): +# ctx.append_byte(self.BYTE_CODE) +# ctx.append_int(self.index) +# +#@requires_stack(LIST_TYP, INT_TYP) +#@leaves_on_stack(LIST_TYP) +#class DelList(ByteCode): +# BYTE_CODE = unique_code() +# @requires_param(INT_TYP) +# def __init__(self, index): +# self.index = index +# def encode(self, ctx): +# ctx.append_byte(self.BYTE_CODE) +# ctx.append_int(self.index) +# +#@requires_stack(LIST_TYP, INT_TYP, INT_TYP) # TODO VAL_TYP) +#class AppendList(ByteCode): +# BYTE_CODE = unique_code() +# def __init__(self): +# pass +# +#@requires_stack(LIST_TYP) +#@leaves_on_stack(LIST_TYP, INT_TYP) +#class LenList(ByteCode): +# BYTE_CODE = unique_code() +# def __init__(self): +# pass +# +# +#@requires_stack(INT_TYP) # TODO VAL_TYP) +#@leaves_on_stack() +#class ReturnFrame(ByteCode): +# BYTE_CODE = unique_code() +# def __init__(self): +# pass +# diff --git a/rpython/jit/backend/llsupport/tl/interp.py b/rpython/jit/backend/llsupport/tl/interp.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/tl/interp.py @@ -0,0 +1,75 @@ +from rpython.rlib.rstruct.runpack import runpack +from rpython.rlib.objectmodel import specialize, always_inline +from rpython.jit.backend.llsupport.tl import code, stack + +class W_Root(object): + pass + +class W_ListObject(W_Root): + def __init__(self): + self.items = [] + +class W_IntObject(W_Root): + def __init__(self, value): + self.value = value + + def compare(self, w_int): + assert isinstance(w_int, W_IntObject) + return cmp(self.value, w_int.value) + +class W_StrObject(W_Root): + def __init__(self, value): + self.value = value + + def concat(self, w_str): + assert isinstance(w_str, W_StrObject) + return self.value + w_str.value + +class Space(object): + @specialize.argtype(1) + def wrap(self, val): + if isinstance(val, W_Root): + return val + if isinstance(val, int): + return W_IntObject(val) + if isinstance(val, str): + return W_StrObject(val) + if isinstance(val, unicode): + return W_StrObject(val.encode('utf-8')) + raise NotImplementedError("cannot handle: " + str(val) + str(type(val))) + +def entry_point(argv): + bytecode = argv[0] + pc = 0 + end = len(bytecode) + stack = Stack(16) + space = space.Space() + consts = [] + while i < end: + i = dispatch_once(space, i, bytecode, consts, stack) + return 0 + + at always_inline +def dispatch_once(space, i, bytecode, consts, stack): + opcode = ord(bytecode[i]) + if opcode == code.PutInt.BYTE_CODE: + integral = runpack('i', bytecode[i+1:i+5]) + stack.append(space.wrap(integral)) + i += 4 + elif opcode == code.CompareInt.BYTE_CODE: + w_int2 = stack.pop() + w_int1 = stack.pop() + w_int3 = space.wrap(w_int1.compare(w_int2)) + stack.append(w_int3) + elif opcode == code.LoadStr.BYTE_CODE: + pos = runpack('h', bytecode[i+1:i+3]) + w_str = space.wrap(consts[pos]) + stack.append(w_str) + i += 2 + elif opcode == code.AddStr.BYTE_CODE: + w_str2 = stack.pop() + w_str1 = stack.pop() + stack.append(space.wrap(w_str1.concat(w_str2))) + else: + raise NotImplementedError + return i + 1 diff --git a/rpython/jit/backend/llsupport/tl/stack.py b/rpython/jit/backend/llsupport/tl/stack.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/tl/stack.py @@ -0,0 +1,60 @@ +from rpython.rlib.jit import JitDriver, hint, dont_look_inside, promote + +class Stack(object): + _virtualizable_ = ['stackpos', 'stack[*]'] + + def __init__(self, size): + self = hint(self, access_directly=True, fresh_virtualizable=True) + self.stack = [0] * size + self.stackpos = 0 # always store a known-nonneg integer here + + def size(self): + return self.stackpos + + def append(self, elem): + while len(self.stack) <= self.stackpos: + self.stack.append(None) + self.stack[self.stackpos] = elem + self.stackpos += 1 + + def pop(self): + stackpos = self.stackpos - 1 + if stackpos < 0: + raise IndexError + self.stackpos = stackpos # always store a known-nonneg integer here + return self.stack[stackpos] + + def pick(self, i): + n = self.stackpos - i - 1 + assert n >= 0 + self.append(self.stack[n]) + + def put(self, i): + elem = self.pop() + n = self.stackpos - i - 1 + assert n >= 0 + self.stack[n] = elem + + @dont_look_inside + def roll(self, r): + if r < -1: + i = self.stackpos + r + if i < 0: + raise IndexError + n = self.stackpos - 1 + assert n >= 0 + elem = self.stack[n] + for j in range(self.stackpos - 2, i - 1, -1): + assert j >= 0 + self.stack[j + 1] = self.stack[j] + self.stack[i] = elem + elif r > 1: + i = self.stackpos - r + if i < 0: + raise IndexError + elem = self.stack[i] + for j in range(i, self.stackpos - 1): + self.stack[j] = self.stack[j + 1] + n = self.stackpos - 1 + assert n >= 0 + self.stack[n] = elem diff --git a/rpython/jit/backend/llsupport/tl/test/__init__.py b/rpython/jit/backend/llsupport/tl/test/__init__.py new file mode 100644 diff --git a/rpython/jit/backend/llsupport/tl/test/code_strategies.py b/rpython/jit/backend/llsupport/tl/test/code_strategies.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/tl/test/code_strategies.py @@ -0,0 +1,56 @@ +from hypothesis import strategies as st +from hypothesis.strategies import defines_strategy, composite +from rpython.jit.backend.llsupport.tl import code, interp, stack +from rpython.jit.backend.llsupport.tl.code import (all_types, + INT_TYP, STR_TYP, LIST_TYP, SHORT_TYP, BYTE_TYP, + COND_TYP) +from hypothesis.searchstrategy.strategies import OneOfStrategy +from hypothesis.searchstrategy.collections import TupleStrategy + +def get_strategy_for(typ): + if typ == INT_TYP: + return st.integers(min_value=-2**31, max_value=2**31-1) + elif typ == SHORT_TYP: + return st.integers(min_value=-2**15, max_value=2**15-1) + elif typ == BYTE_TYP: + return st.integers(min_value=-2**7, max_value=2**7-1) + elif typ == COND_TYP: + return st.integers(min_value=0, max_value=4) + elif typ == STR_TYP: + return st.text() + elif typ == LIST_TYP: + return st.lists(elements=st.one_of(st.integers())) # TODO must be recursive + else: + raise NotImplementedError("type: " + str(typ)) + + at defines_strategy +def wrapped_tl_objects(self, types=all_types): + if len(types) == 1: + return get_strategy_for(types[0]) + return OneOfStrategy([get_strategy_for(t) for t in types]) + +STD_SPACE = interp.Space() + + at composite +def runtime_stack(draw, clazz): + strats = [get_strategy_for(t) for t in clazz._stack_types] + st = stack.Stack(len(strats)) + for strat in strats: + st.append(STD_SPACE.wrap(draw(strat))) + return st + +def byte_code_classes(): + for name, clazz in code.__dict__.items(): + if hasattr(clazz, 'BYTE_CODE'): + yield clazz + + at composite +def single_bytecode(draw, clazzes=st.sampled_from(byte_code_classes()), + integrals=st.integers(), + texts=st.text()): + clazz = draw(clazzes) + inst = clazz.create_from(draw, get_strategy_for) + bytecode, consts = code.Context().transform([inst]) + _stack = draw(runtime_stack(clazz)) + return clazz, bytecode, consts, _stack + diff --git a/rpython/jit/backend/llsupport/tl/test/test_tl_interp.py b/rpython/jit/backend/llsupport/tl/test/test_tl_interp.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/tl/test/test_tl_interp.py @@ -0,0 +1,30 @@ +import py +from hypothesis import given +from rpython.jit.backend.llsupport.tl import code, stack, interp +from rpython.jit.backend.llsupport.tl.test import code_strategies as st + +class TestByteCode(object): + def test_load_str(self): + c = code.Context() + code.LoadStr("hello world").encode(c) + assert c.consts[0] == "hello world" + assert c.get_byte(0) == code.LoadStr.BYTE_CODE + assert c.get_short(1) == 0 + + def test_str_add(self): + c = code.Context() + code.LoadStr("hello").encode(c) + code.LoadStr("world").encode(c) + code.AddStr().encode(c) + assert len(c.consts) == 2 + assert c.get_byte(4) == code.AddStr.BYTE_CODE + assert c.get_short(3) == 1 + +class TestInterp(object): + @given(st.single_bytecode()) + def test_consume_stack(self, args): + clazz, bytecode, consts, stack = args + space = interp.Space() + i = interp.dispatch_once(space, 0, bytecode, consts, stack) + assert i == len(bytecode) + assert stack.size() == len(clazz._return_on_stack_types) diff --git a/rpython/jit/backend/llsupport/tl/test/zrpy_gc_hypo_test.py b/rpython/jit/backend/llsupport/tl/test/zrpy_gc_hypo_test.py new file mode 100644 --- /dev/null +++ b/rpython/jit/backend/llsupport/tl/test/zrpy_gc_hypo_test.py @@ -0,0 +1,33 @@ +from rpython.jit.backend.detect_cpu import getcpuclass +from rpython.jit.tool.oparser import parse +from rpython.jit.metainterp.history import JitCellToken, NoStats +from rpython.jit.metainterp.history import BasicFinalDescr, BasicFailDescr +from rpython.jit.metainterp.gc import get_description +from rpython.jit.metainterp.optimize import SpeculativeError +from rpython.annotator.listdef import s_list_of_strings +from rpython.rtyper.lltypesystem import lltype, llmemory, rffi +from rpython.rtyper.rclass import getclassrepr, getinstancerepr +from rpython.translator.unsimplify import call_initial_function +from rpython.translator.translator import TranslationContext +from rpython.translator.c import genc +from rpython.jit.backend.llsupport.gcstress import interp + +class GCHypothesis(object): + def setup_class(self): + t = TranslationContext() + t.config.translation.gc = "incminimark" + t.config.translation.gcremovetypeptr = True + ann = t.buildannotator() + ann.build_types(interp.entry_point, [s_list_of_strings], main_entry_point=True) + rtyper = t.buildrtyper() + rtyper.specialize() + + cbuilder = genc.CStandaloneBuilder(t, f, t.config) + cbuilder.generate_source(defines=cbuilder.DEBUG_DEFINES) + cbuilder.compile() + + import pdb; pdb.set_trace() + + + def test_void(self): + pass From pypy.commits at gmail.com Mon Feb 29 09:02:10 2016 From: pypy.commits at gmail.com (plan_rich) Date: Mon, 29 Feb 2016 06:02:10 -0800 (PST) Subject: [pypy-commit] pypy s390x-backend: list create, list concat Message-ID: <56d44f62.0775c20a.81e6.ffffb781@mx.google.com> Author: Richard Plangger Branch: s390x-backend Changeset: r82612:df3c1340c16f Date: 2016-02-29 14:58 +0100 http://bitbucket.org/pypy/pypy/changeset/df3c1340c16f/ Log: list create, list concat diff --git a/rpython/jit/backend/llsupport/tl/code.py b/rpython/jit/backend/llsupport/tl/code.py --- a/rpython/jit/backend/llsupport/tl/code.py +++ b/rpython/jit/backend/llsupport/tl/code.py @@ -128,6 +128,25 @@ def __init__(self): pass + at requires_stack(LIST_TYP, LIST_TYP) + at leaves_on_stack(LIST_TYP) +class AddList(ByteCode): + BYTE_CODE = unique_code() + def __init__(self): + pass + + at requires_stack() + at leaves_on_stack(LIST_TYP) +class CreateList(ByteCode): + BYTE_CODE = unique_code() + @requires_param(BYTE_TYP) + def __init__(self, size=8): + self.size = size + def encode(self, ctx): + ctx.append_byte(self.BYTE_CODE) + ctx.append_short(self.size) + + # remove comment one by one! #@requires_stack() @@ -157,24 +176,6 @@ # pass # -# -#@requires_stack(LIST_TYP, LIST_TYP) -#@leaves_on_stack(LIST_TYP) -#class AddList(ByteCode): -# BYTE_CODE = unique_code() -# def __init__(self): -# pass -# -#@requires_stack() -#class CreateList(ByteCode): -# BYTE_CODE = unique_code() -# @requires_param(BYTE_TYP) -# def __init__(self, size=8): -# self.size = size -# def encode(self, ctx): -# ctx.append_byte(self.BYTE_CODE) -# ctx.append_short(self.size) -# #@requires_stack(LIST_TYP, INT_TYP, INT_TYP) # TODO VAL_TYP #class InsertList(ByteCode): # BYTE_CODE = unique_code() diff --git a/rpython/jit/backend/llsupport/tl/interp.py b/rpython/jit/backend/llsupport/tl/interp.py --- a/rpython/jit/backend/llsupport/tl/interp.py +++ b/rpython/jit/backend/llsupport/tl/interp.py @@ -6,8 +6,12 @@ pass class W_ListObject(W_Root): - def __init__(self): - self.items = [] + def __init__(self, items): + self.items = items + + def concat(self, w_lst): + assert isinstance(w_lst, W_ListObject) + return self.items + w_lst.items class W_IntObject(W_Root): def __init__(self, value): @@ -36,6 +40,8 @@ return W_StrObject(val) if isinstance(val, unicode): return W_StrObject(val.encode('utf-8')) + if isinstance(val, list): + return W_ListObject(val) raise NotImplementedError("cannot handle: " + str(val) + str(type(val))) def entry_point(argv): @@ -70,6 +76,14 @@ w_str2 = stack.pop() w_str1 = stack.pop() stack.append(space.wrap(w_str1.concat(w_str2))) + elif opcode == code.AddList.BYTE_CODE: + w_lst2 = stack.pop() + w_lst1 = stack.pop() + stack.append(space.wrap(w_lst1.concat(w_lst2))) + elif opcode == code.CreateList.BYTE_CODE: + size = runpack('h', bytecode[i+1:i+3]) + stack.append(space.wrap([None] * size)) + i += 2 else: raise NotImplementedError return i + 1 From pypy.commits at gmail.com Mon Feb 29 11:07:35 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 29 Feb 2016 08:07:35 -0800 (PST) Subject: [pypy-commit] pypy default: Move execute_step() implementation to the Action subclasses Message-ID: <56d46cc7.84b61c0a.2e19f.ffffb1fe@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82613:a1d15704499c Date: 2016-02-29 15:24 +0000 http://bitbucket.org/pypy/pypy/changeset/a1d15704499c/ Log: Move execute_step() implementation to the Action subclasses diff --git a/rpython/rtyper/test/test_rdict.py b/rpython/rtyper/test/test_rdict.py --- a/rpython/rtyper/test/test_rdict.py +++ b/rpython/rtyper/test/test_rdict.py @@ -1284,6 +1284,13 @@ def __repr__(self): return 'SetItem(%r, %r)' % (self.key, self.value) + def execute(self, state): + ll_key = string_repr.convert_const(self.key) + ll_value = string_repr.convert_const(self.value) + rdict.ll_dict_setitem(state.l_dict, ll_key, ll_value) + state.reference[self.key] = self.value + assert rdict.ll_contains(state.l_dict, ll_key) + class DelItem(Action): def __init__(self, key): self.key = key @@ -1291,8 +1298,17 @@ def __repr__(self): return 'DelItem(%r)' % (self.key) + def execute(self, state): + ll_key = string_repr.convert_const(self.key) + rdict.ll_dict_delitem(state.l_dict, ll_key) + del state.reference[self.key] + assert not rdict.ll_contains(state.l_dict, ll_key) + class CompleteCheck(Action): - pass + def execute(self, state): + assert state.l_dict.num_items == len(state.reference) + for key, value in state.reference.iteritems(): + assert rdict.ll_dict_getitem(state.l_dict, _ll(key)) == _ll(value) st_keys = binary() st_values = binary() @@ -1320,22 +1336,6 @@ return (st_setitem | st_delitem(self.reference) | just(CompleteCheck())) if self.reference else (st_setitem | just(CompleteCheck())) def execute_step(self, action): - if isinstance(action, SetItem): - ll_key = string_repr.convert_const(action.key) - ll_value = string_repr.convert_const(action.value) - rdict.ll_dict_setitem(self.l_dict, ll_key, ll_value) - self.reference[action.key] = action.value - assert rdict.ll_contains(self.l_dict, ll_key) - elif isinstance(action, DelItem): - ll_key = string_repr.convert_const(action.key) - rdict.ll_dict_delitem(self.l_dict, ll_key) - del self.reference[action.key] - assert not rdict.ll_contains(self.l_dict, ll_key) - elif isinstance(action, CompleteCheck): - assert self.l_dict.num_items == len(self.reference) - for key, value in self.reference.iteritems(): - assert rdict.ll_dict_getitem(self.l_dict, _ll(key)) == _ll(value) - + action.execute(self) TestHyp = StressTest.TestCase - From pypy.commits at gmail.com Mon Feb 29 11:07:37 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 29 Feb 2016 08:07:37 -0800 (PST) Subject: [pypy-commit] pypy default: Add clear() and copy() actions Message-ID: <56d46cc9.d4e41c0a.41084.ffffb4fe@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82614:b212924f1c8d Date: 2016-02-29 16:06 +0000 http://bitbucket.org/pypy/pypy/changeset/b212924f1c8d/ Log: Add clear() and copy() actions diff --git a/rpython/rtyper/test/test_rdict.py b/rpython/rtyper/test/test_rdict.py --- a/rpython/rtyper/test/test_rdict.py +++ b/rpython/rtyper/test/test_rdict.py @@ -1304,6 +1304,15 @@ del state.reference[self.key] assert not rdict.ll_contains(state.l_dict, ll_key) +class CopyDict(Action): + def execute(self, state): + state.l_dict = rdict.ll_copy(state.l_dict) + +class ClearDict(Action): + def execute(self, state): + rdict.ll_clear(state.l_dict) + state.reference.clear() + class CompleteCheck(Action): def execute(self, state): assert state.l_dict.num_items == len(state.reference) @@ -1333,7 +1342,13 @@ self.reference = {} def steps(self): - return (st_setitem | st_delitem(self.reference) | just(CompleteCheck())) if self.reference else (st_setitem | just(CompleteCheck())) + global_actions = [CopyDict(), ClearDict(), CompleteCheck()] + if self.reference: + return ( + st_setitem | st_delitem(self.reference) | + sampled_from(global_actions)) + else: + return (st_setitem | sampled_from(global_actions)) def execute_step(self, action): action.execute(self) From pypy.commits at gmail.com Mon Feb 29 13:37:12 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 29 Feb 2016 10:37:12 -0800 (PST) Subject: [pypy-commit] pypy default: Improve hypothesis test: catch infinite loops (posix only) and do the complete check once and only once, at the end of the run. Message-ID: <56d48fd8.01adc20a.f6add.3891@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82615:721ef287647d Date: 2016-02-29 18:36 +0000 http://bitbucket.org/pypy/pypy/changeset/721ef287647d/ Log: Improve hypothesis test: catch infinite loops (posix only) and do the complete check once and only once, at the end of the run. diff --git a/rpython/rtyper/test/test_rdict.py b/rpython/rtyper/test/test_rdict.py --- a/rpython/rtyper/test/test_rdict.py +++ b/rpython/rtyper/test/test_rdict.py @@ -1,3 +1,6 @@ +from contextlib import contextmanager +import signal + from rpython.translator.translator import TranslationContext from rpython.annotator import model as annmodel from rpython.annotator.dictdef import DictKey, DictValue @@ -12,6 +15,27 @@ import py py.log.setconsumer("rtyper", py.log.STDOUT) +if hasattr(signal, 'alarm'): + @contextmanager + def signal_timeout(n): + """A flaky context manager that throws an exception if the body of the + `with` block runs for longer than `n` seconds. + """ + def handler(signum, frame): + raise RuntimeError('timeout') + signal.signal(signal.SIGALRM, handler) + signal.alarm(n) + try: + yield + finally: + signal.alarm(0) +else: + @contextmanager + def signal_timeout(n): + yield + + + def not_really_random(): """A random-ish generator, which also generates nice patterns from time to time. Could be useful to detect problems associated with specific usage patterns.""" @@ -1313,12 +1337,6 @@ rdict.ll_clear(state.l_dict) state.reference.clear() -class CompleteCheck(Action): - def execute(self, state): - assert state.l_dict.num_items == len(state.reference) - for key, value in state.reference.iteritems(): - assert rdict.ll_dict_getitem(state.l_dict, _ll(key)) == _ll(value) - st_keys = binary() st_values = binary() st_setitem = builds(SetItem, st_keys, st_values) @@ -1342,7 +1360,7 @@ self.reference = {} def steps(self): - global_actions = [CopyDict(), ClearDict(), CompleteCheck()] + global_actions = [CopyDict(), ClearDict()] if self.reference: return ( st_setitem | st_delitem(self.reference) | @@ -1351,6 +1369,13 @@ return (st_setitem | sampled_from(global_actions)) def execute_step(self, action): - action.execute(self) + with signal_timeout(1): # catches infinite loops + action.execute(self) + + def teardown(self): + assert rdict.ll_dict_len(self.l_dict) == len(self.reference) + for key, value in self.reference.iteritems(): + assert rdict.ll_dict_getitem(self.l_dict, _ll(key)) == _ll(value) + TestHyp = StressTest.TestCase From pypy.commits at gmail.com Mon Feb 29 15:19:54 2016 From: pypy.commits at gmail.com (arigo) Date: Mon, 29 Feb 2016 12:19:54 -0800 (PST) Subject: [pypy-commit] pypy.org extradoc: update the values Message-ID: <56d4a7ea.c3e01c0a.40ffb.1696@mx.google.com> Author: Armin Rigo Branch: extradoc Changeset: r712:545a2ef714d5 Date: 2016-02-29 21:19 +0100 http://bitbucket.org/pypy/pypy.org/changeset/545a2ef714d5/ Log: update the values diff --git a/don4.html b/don4.html --- a/don4.html +++ b/don4.html @@ -17,7 +17,7 @@ 2nd call: - $30403 of $80000 (38.0%) + $30423 of $80000 (38.0%)
    @@ -25,7 +25,7 @@
  • From pypy.commits at gmail.com Mon Feb 29 16:59:27 2016 From: pypy.commits at gmail.com (mattip) Date: Mon, 29 Feb 2016 13:59:27 -0800 (PST) Subject: [pypy-commit] pypy cpyext-ext: fix test from 22fa8dfd0a2d by creating yet more alloc() functions Message-ID: <56d4bf3f.46fac20a.5f8c7.775a@mx.google.com> Author: mattip Branch: cpyext-ext Changeset: r82616:d3f9504cf268 Date: 2016-02-29 16:57 -0500 http://bitbucket.org/pypy/pypy/changeset/d3f9504cf268/ Log: fix test from 22fa8dfd0a2d by creating yet more alloc() functions diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -952,6 +952,8 @@ cpyext_type_init = self.cpyext_type_init self.cpyext_type_init = None for pto, w_type in cpyext_type_init: + if space.is_w(w_type, space.w_str): + pto.c_tp_itemsize = 1 finish_type_1(space, pto) finish_type_2(space, pto, w_type) diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -92,7 +92,7 @@ if tp_alloc: def allocate(self, space, w_type, itemcount=0): - return tp_alloc(space, w_type) + return tp_alloc(space, w_type, itemcount) if tp_dealloc: def get_dealloc(self, space): diff --git a/pypy/module/cpyext/stringobject.py b/pypy/module/cpyext/stringobject.py --- a/pypy/module/cpyext/stringobject.py +++ b/pypy/module/cpyext/stringobject.py @@ -6,7 +6,7 @@ from pypy.module.cpyext.pyerrors import PyErr_BadArgument from pypy.module.cpyext.pyobject import ( PyObject, PyObjectP, Py_DecRef, make_ref, from_ref, track_reference, - make_typedescr, get_typedescr) + make_typedescr, get_typedescr, as_pyobj) ## ## Implementation of PyStringObject @@ -62,12 +62,43 @@ "Type description of PyStringObject" make_typedescr(space.w_str.layout.typedef, basestruct=PyStringObject.TO, + alloc = string_alloc, attach=string_attach, dealloc=string_dealloc, realize=string_realize) PyString_Check, PyString_CheckExact = build_type_checkers("String", "w_str") +def string_alloc(space, w_type, length): + ''' + Yet another way to allocate a PyObject, this time a + PyStringObject. The first bit is copied from + BaseCpyTypedescr.allocate, the bit after length>0 + from string_attach. This is used as the tp_alloc function + for PyStringObject + ''' + from pypy.module.cpyext.typeobjectdefs import PyTypeObjectPtr + pytype = as_pyobj(space, w_type) + pytype = rffi.cast(PyTypeObjectPtr, pytype) + assert pytype + size = pytype.c_tp_basicsize + buf = lltype.malloc(rffi.VOIDP.TO, size, + flavor='raw', zero=True) + py_str = rffi.cast(PyStringObject, buf) + py_str.c_ob_refcnt = 1 + py_str.c_ob_type = pytype + if length > 0: + py_str.c_buffer = lltype.malloc(rffi.CCHARP.TO, length+1, + flavor='raw', zero=True) + py_str.c_size = length + py_str.c_ob_sstate = rffi.cast(rffi.INT, 0) # SSTATE_NOT_INTERNED + s = rffi.charpsize2str(py_str.c_buffer, length+1) + w_obj = space.wrap(s) + py_str.c_ob_shash = space.hash_w(w_obj) + py_str.c_ob_sstate = rffi.cast(rffi.INT, 1) # SSTATE_INTERNED_MORTAL + track_reference(space, rffi.cast(PyObject, py_str), w_obj) + return rffi.cast(PyObject, py_str) + def new_empty_str(space, length): """ Allocate a PyStringObject and its buffer, but without a corresponding diff --git a/pypy/module/cpyext/test/test_stringobject.py b/pypy/module/cpyext/test/test_stringobject.py --- a/pypy/module/cpyext/test/test_stringobject.py +++ b/pypy/module/cpyext/test/test_stringobject.py @@ -99,6 +99,8 @@ char * p_str; base = PyString_FromString("test"); type = base->ob_type; + if (type->tp_itemsize != 1) + return PyLong_FromLong(type->tp_itemsize); obj = (PyStringObject*)type->tp_alloc(type, 10); if (PyString_GET_SIZE(obj) == 0) return PyLong_FromLong(-1); diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -494,7 +494,7 @@ PyObject_dealloc(space, obj) -def type_alloc(space, w_metatype): +def type_alloc(space, w_metatype, itemsize=0): metatype = rffi.cast(PyTypeObjectPtr, make_ref(space, w_metatype)) # Don't increase refcount for non-heaptypes if metatype: diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -30,6 +30,7 @@ def init_unicodeobject(space): make_typedescr(space.w_unicode.layout.typedef, basestruct=PyUnicodeObject.TO, + alloc = unicode_alloc, attach=unicode_attach, dealloc=unicode_dealloc, realize=unicode_realize) @@ -43,6 +44,30 @@ Py_UNICODE = lltype.UniChar +def unicode_alloc(space, w_type, length): + ''' + see comments with string_alloc in stringobject.py + ''' + from pypy.module.cpyext.typeobjectdefs import PyTypeObjectPtr + pytype = as_pyobj(space, w_type) + pytype = rffi.cast(PyTypeObjectPtr, pytype) + assert pytype + size = pytype.c_tp_basicsize + buf = lltype.malloc(rffi.VOIDP.TO, size, + flavor='raw', zero=True) + py_uni = rffi.cast(PyUnicodeObject, buf) + py_uni.c_ob_refcnt = 1 + py_uni.c_ob_type = pytype + if length > 0: + py_uni.c_str = lltype.malloc(rffi.CCHARP.TO, length+1, + flavor='raw', zero=True) + py_str.c_length = length + s = rffi.wcharpsize2unicode(py_uni.c_str, py_uni.c_length) + w_obj = space.wrap(s) + py_str.c_ob_shash = space.hash_w(w_obj) + track_reference(space, rffi.cast(PyObject, py_str), w_obj) + return rffi.cast(PyObject, py_str) + def new_empty_unicode(space, length): """ Allocate a PyUnicodeObject and its buffer, but without a corresponding From pypy.commits at gmail.com Mon Feb 29 17:03:58 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 29 Feb 2016 14:03:58 -0800 (PST) Subject: [pypy-commit] pypy default: Make hypothesis test parametric in the key and value types Message-ID: <56d4c04e.a118c20a.b1427.75a0@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82617:959fe6807849 Date: 2016-02-29 21:57 +0000 http://bitbucket.org/pypy/pypy/changeset/959fe6807849/ Log: Make hypothesis test parametric in the key and value types diff --git a/rpython/rtyper/test/test_rdict.py b/rpython/rtyper/test/test_rdict.py --- a/rpython/rtyper/test/test_rdict.py +++ b/rpython/rtyper/test/test_rdict.py @@ -1,8 +1,9 @@ +import sys from contextlib import contextmanager import signal from rpython.translator.translator import TranslationContext -from rpython.annotator import model as annmodel +from rpython.annotator.model import SomeInteger, SomeString from rpython.annotator.dictdef import DictKey, DictValue from rpython.rtyper.lltypesystem import lltype, rffi from rpython.rtyper.lltypesystem.rstr import string_repr @@ -1213,8 +1214,8 @@ def test_stress(): dictrepr = rdict.DictRepr(None, rint.signed_repr, rint.signed_repr, - DictKey(None, annmodel.SomeInteger()), - DictValue(None, annmodel.SomeInteger())) + DictKey(None, SomeInteger()), + DictValue(None, SomeInteger())) dictrepr.setup() l_dict = rdict.ll_newdict(dictrepr.DICT) reference = {} @@ -1257,8 +1258,8 @@ class PseudoRTyper: cache_dummy_values = {} dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, - DictKey(None, annmodel.SomeString(key_can_be_none)), - DictValue(None, annmodel.SomeString(value_can_be_none))) + DictKey(None, SomeString(key_can_be_none)), + DictValue(None, SomeString(value_can_be_none))) dictrepr.setup() l_dict = rdict.ll_newdict(dictrepr.DICT) reference = {} @@ -1295,87 +1296,114 @@ assert l_dict.num_items == len(reference) complete_check() -from hypothesis.strategies import builds, sampled_from, binary, just +from hypothesis.strategies import builds, sampled_from, binary, just, integers +from hypothesis.stateful import GenericStateMachine, run_state_machine_as_test + +def ann2strategy(s_value): + if isinstance(s_value, SomeString): + if s_value.can_be_None: + return binary() | just(None) + else: + return binary() + elif isinstance(s_value, SomeInteger): + return integers(min_value=~sys.maxint, max_value=sys.maxint) + else: + raise TypeError("Cannot convert annotation %s to a strategy" % s_value) + class Action(object): - pass + def __repr__(self): + return "%s()" % self.__class__.__name__ -class SetItem(Action): - def __init__(self, key, value): - self.key = key - self.value = value +class PseudoRTyper: + cache_dummy_values = {} - def __repr__(self): - return 'SetItem(%r, %r)' % (self.key, self.value) +# XXX: None keys crash the test, but translation sort-of allows it + at py.test.mark.parametrize('s_key', + [SomeString(), SomeInteger()]) + at py.test.mark.parametrize('s_value', + [SomeString(can_be_None=True), SomeString(), SomeInteger()]) +def test_hypothesis(s_key, s_value): + rtyper = PseudoRTyper() + r_key = s_key.rtyper_makerepr(rtyper) + r_value = s_value.rtyper_makerepr(rtyper) + dictrepr = rdict.DictRepr(rtyper, r_key, r_value, + DictKey(None, s_key), + DictValue(None, s_value)) + dictrepr.setup() - def execute(self, state): - ll_key = string_repr.convert_const(self.key) - ll_value = string_repr.convert_const(self.value) - rdict.ll_dict_setitem(state.l_dict, ll_key, ll_value) - state.reference[self.key] = self.value - assert rdict.ll_contains(state.l_dict, ll_key) + _ll_key = r_key.convert_const + _ll_value = r_value.convert_const -class DelItem(Action): - def __init__(self, key): - self.key = key + class SetItem(Action): + def __init__(self, key, value): + self.key = key + self.value = value - def __repr__(self): - return 'DelItem(%r)' % (self.key) + def __repr__(self): + return 'SetItem(%r, %r)' % (self.key, self.value) - def execute(self, state): - ll_key = string_repr.convert_const(self.key) - rdict.ll_dict_delitem(state.l_dict, ll_key) - del state.reference[self.key] - assert not rdict.ll_contains(state.l_dict, ll_key) + def execute(self, state): + ll_key = _ll_key(self.key) + ll_value = _ll_value(self.value) + rdict.ll_dict_setitem(state.l_dict, ll_key, ll_value) + state.reference[self.key] = self.value + assert rdict.ll_contains(state.l_dict, ll_key) -class CopyDict(Action): - def execute(self, state): - state.l_dict = rdict.ll_copy(state.l_dict) + class DelItem(Action): + def __init__(self, key): + self.key = key -class ClearDict(Action): - def execute(self, state): - rdict.ll_clear(state.l_dict) - state.reference.clear() + def __repr__(self): + return 'DelItem(%r)' % (self.key) -st_keys = binary() -st_values = binary() -st_setitem = builds(SetItem, st_keys, st_values) + def execute(self, state): + ll_key = _ll_key(self.key) + rdict.ll_dict_delitem(state.l_dict, ll_key) + del state.reference[self.key] + assert not rdict.ll_contains(state.l_dict, ll_key) -def st_delitem(keys): - return builds(DelItem, sampled_from(keys)) + class CopyDict(Action): + def execute(self, state): + state.l_dict = rdict.ll_copy(state.l_dict) -from hypothesis.stateful import GenericStateMachine + class ClearDict(Action): + def execute(self, state): + rdict.ll_clear(state.l_dict) + state.reference.clear() -_ll = string_repr.convert_const + st_keys = ann2strategy(s_key) + st_values = ann2strategy(s_value) + st_setitem = builds(SetItem, st_keys, st_values) -class StressTest(GenericStateMachine): - def __init__(self): - class PseudoRTyper: - cache_dummy_values = {} - dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, - DictKey(None, annmodel.SomeString(False)), - DictValue(None, annmodel.SomeString(False))) - dictrepr.setup() - self.l_dict = rdict.ll_newdict(dictrepr.DICT) - self.reference = {} + def st_delitem(keys): + return builds(DelItem, sampled_from(keys)) - def steps(self): - global_actions = [CopyDict(), ClearDict()] - if self.reference: - return ( - st_setitem | st_delitem(self.reference) | - sampled_from(global_actions)) - else: - return (st_setitem | sampled_from(global_actions)) + def st_updateitem(keys): + return builds(SetItem, sampled_from(keys), st_values) - def execute_step(self, action): - with signal_timeout(1): # catches infinite loops - action.execute(self) + class StressTest(GenericStateMachine): + def __init__(self): + self.l_dict = rdict.ll_newdict(dictrepr.DICT) + self.reference = {} - def teardown(self): - assert rdict.ll_dict_len(self.l_dict) == len(self.reference) - for key, value in self.reference.iteritems(): - assert rdict.ll_dict_getitem(self.l_dict, _ll(key)) == _ll(value) + def steps(self): + global_actions = [CopyDict(), ClearDict()] + if self.reference: + return ( + st_setitem | sampled_from(global_actions) | + st_updateitem(self.reference) | st_delitem(self.reference)) + else: + return (st_setitem | sampled_from(global_actions)) + def execute_step(self, action): + with signal_timeout(1): # catches infinite loops + action.execute(self) -TestHyp = StressTest.TestCase + def teardown(self): + assert rdict.ll_dict_len(self.l_dict) == len(self.reference) + for key, value in self.reference.iteritems(): + assert (rdict.ll_dict_getitem(self.l_dict, _ll_key(key)) == + _ll_value(value)) + + run_state_machine_as_test(StressTest) From pypy.commits at gmail.com Mon Feb 29 17:09:08 2016 From: pypy.commits at gmail.com (rlamy) Date: Mon, 29 Feb 2016 14:09:08 -0800 (PST) Subject: [pypy-commit] pypy default: kill dead and/or superseded tests Message-ID: <56d4c184.6614c20a.1d592.ffff8355@mx.google.com> Author: Ronan Lamy Branch: Changeset: r82618:51611a2b96f6 Date: 2016-02-29 22:08 +0000 http://bitbucket.org/pypy/pypy/changeset/51611a2b96f6/ Log: kill dead and/or superseded tests diff --git a/rpython/rtyper/test/test_rdict.py b/rpython/rtyper/test/test_rdict.py --- a/rpython/rtyper/test/test_rdict.py +++ b/rpython/rtyper/test/test_rdict.py @@ -14,7 +14,20 @@ from rpython.rlib.rarithmetic import r_int, r_uint, r_longlong, r_ulonglong import py -py.log.setconsumer("rtyper", py.log.STDOUT) +from hypothesis.strategies import builds, sampled_from, binary, just, integers +from hypothesis.stateful import GenericStateMachine, run_state_machine_as_test + +def ann2strategy(s_value): + if isinstance(s_value, SomeString): + if s_value.can_be_None: + return binary() | just(None) + else: + return binary() + elif isinstance(s_value, SomeInteger): + return integers(min_value=~sys.maxint, max_value=sys.maxint) + else: + raise TypeError("Cannot convert annotation %s to a strategy" % s_value) + if hasattr(signal, 'alarm'): @contextmanager @@ -36,7 +49,6 @@ yield - def not_really_random(): """A random-ish generator, which also generates nice patterns from time to time. Could be useful to detect problems associated with specific usage patterns.""" @@ -1032,28 +1044,6 @@ assert r_AB_dic.lowleveltype == r_BA_dic.lowleveltype - def test_dict_resize(self): - py.test.skip("test written for non-ordered dicts, update or kill") - # XXX we no longer automatically resize on 'del'. We need to - # hack a bit in this test to trigger a resize by continuing to - # fill the dict's table while keeping the actual size very low - # in order to force a resize to shrink the table back - def func(want_empty): - d = self.newdict() - for i in range(rdict.DICT_INITSIZE << 1): - d[chr(ord('a') + i)] = i - if want_empty: - for i in range(rdict.DICT_INITSIZE << 1): - del d[chr(ord('a') + i)] - for i in range(rdict.DICT_INITSIZE << 3): - d[chr(ord('A') - i)] = i - del d[chr(ord('A') - i)] - return d - res = self.interpret(func, [0]) - assert len(res.entries) > rdict.DICT_INITSIZE - res = self.interpret(func, [1]) - assert len(res.entries) == rdict.DICT_INITSIZE - def test_opt_dummykeymarker(self): def f(): d = {"hello": None} @@ -1145,170 +1135,6 @@ DICT = lltype.typeOf(llres.item1) assert sorted(DICT.TO.entries.TO.OF._flds) == ['f_hash', 'key', 'value'] - def test_deleted_entry_reusage_with_colliding_hashes(self): - py.test.skip("test written for non-ordered dicts, update or kill") - def lowlevelhash(value): - p = rstr.mallocstr(len(value)) - for i in range(len(value)): - p.chars[i] = value[i] - return rstr.LLHelpers.ll_strhash(p) - - def func(c1, c2): - c1 = chr(c1) - c2 = chr(c2) - d = self.newdict() - d[c1] = 1 - d[c2] = 2 - del d[c1] - return d[c2] - - char_by_hash = {} - base = rdict.DICT_INITSIZE - for y in range(0, 256): - y = chr(y) - y_hash = lowlevelhash(y) % base - char_by_hash.setdefault(y_hash, []).append(y) - - x, y = char_by_hash[0][:2] # find a collision - - res = self.interpret(func, [ord(x), ord(y)]) - assert res == 2 - - def func2(c1, c2): - c1 = chr(c1) - c2 = chr(c2) - d = self.newdict() - d[c1] = 1 - d[c2] = 2 - del d[c1] - d[c1] = 3 - return d - - res = self.interpret(func2, [ord(x), ord(y)]) - for i in range(len(res.entries)): - assert not (res.entries.everused(i) and not res.entries.valid(i)) - - def func3(c0, c1, c2, c3, c4, c5, c6, c7): - d = self.newdict() - c0 = chr(c0) ; d[c0] = 1; del d[c0] - c1 = chr(c1) ; d[c1] = 1; del d[c1] - c2 = chr(c2) ; d[c2] = 1; del d[c2] - c3 = chr(c3) ; d[c3] = 1; del d[c3] - c4 = chr(c4) ; d[c4] = 1; del d[c4] - c5 = chr(c5) ; d[c5] = 1; del d[c5] - c6 = chr(c6) ; d[c6] = 1; del d[c6] - c7 = chr(c7) ; d[c7] = 1; del d[c7] - return d - - if rdict.DICT_INITSIZE != 8: - py.test.skip("make dict tests more indepdent from initsize") - res = self.interpret(func3, [ord(char_by_hash[i][0]) - for i in range(rdict.DICT_INITSIZE)]) - count_frees = 0 - for i in range(len(res.entries)): - if not res.entries.everused(i): - count_frees += 1 - assert count_frees >= 3 - -N_KEYS = 400 - -def test_stress(): - dictrepr = rdict.DictRepr(None, rint.signed_repr, rint.signed_repr, - DictKey(None, SomeInteger()), - DictValue(None, SomeInteger())) - dictrepr.setup() - l_dict = rdict.ll_newdict(dictrepr.DICT) - reference = {} - value = 0 - - def check_value(n): - try: - gotvalue = rdict.ll_dict_getitem(l_dict, n) - except KeyError: - n not in reference - else: - assert gotvalue == reference[n] - - def complete_check(): - for n in range(N_KEYS): - check_value(n) - - for x in not_really_random(): - n = int(x*100.0) # 0 <= x < 400 - op = repr(x)[-1] - if op <= '2' and n in reference: - rdict.ll_dict_delitem(l_dict, n) - del reference[n] - elif op <= '6': - rdict.ll_dict_setitem(l_dict, n, value) - reference[n] = value - value += 1 - else: - check_value(n) - if 1.38 <= x <= 1.39: - complete_check() - print 'current dict length:', len(reference) - assert l_dict.num_items == len(reference) - complete_check() - - - at py.test.mark.parametrize('key_can_be_none', [True, False]) - at py.test.mark.parametrize('value_can_be_none', [True, False]) -def test_stress_2(key_can_be_none, value_can_be_none): - class PseudoRTyper: - cache_dummy_values = {} - dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, - DictKey(None, SomeString(key_can_be_none)), - DictValue(None, SomeString(value_can_be_none))) - dictrepr.setup() - l_dict = rdict.ll_newdict(dictrepr.DICT) - reference = {} - values = not_really_random() - keytable = [string_repr.convert_const("foo%d" % n) for n in range(N_KEYS)] - - def check_value(n): - try: - gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) - except KeyError: - assert n not in reference - else: - assert gotvalue == reference[n] - - def complete_check(): - for n in range(N_KEYS): - check_value(n) - - for x in not_really_random(): - n = int(x*100.0) # 0 <= x < 400 - op = repr(x)[-1] - if op <= '2' and n in reference: - rdict.ll_dict_delitem(l_dict, keytable[n]) - del reference[n] - elif op <= '6': - ll_value = string_repr.convert_const(str(values.next())) - rdict.ll_dict_setitem(l_dict, keytable[n], ll_value) - reference[n] = ll_value - else: - check_value(n) - if 1.38 <= x <= 1.39: - complete_check() - print 'current dict length:', len(reference) - assert l_dict.num_items == len(reference) - complete_check() - -from hypothesis.strategies import builds, sampled_from, binary, just, integers -from hypothesis.stateful import GenericStateMachine, run_state_machine_as_test - -def ann2strategy(s_value): - if isinstance(s_value, SomeString): - if s_value.can_be_None: - return binary() | just(None) - else: - return binary() - elif isinstance(s_value, SomeInteger): - return integers(min_value=~sys.maxint, max_value=sys.maxint) - else: - raise TypeError("Cannot convert annotation %s to a strategy" % s_value) class Action(object): From pypy.commits at gmail.com Mon Feb 29 17:22:19 2016 From: pypy.commits at gmail.com (fijal) Date: Mon, 29 Feb 2016 14:22:19 -0800 (PST) Subject: [pypy-commit] pypy default: fix embedding tests on OS X (they might or might not work on linux, too lazy to check), sem_init is deprecated Message-ID: <56d4c49b.463f1c0a.a9a90.3ab5@mx.google.com> Author: fijal Branch: Changeset: r82619:38deea741bed Date: 2016-02-29 23:20 +0100 http://bitbucket.org/pypy/pypy/changeset/38deea741bed/ Log: fix embedding tests on OS X (they might or might not work on linux, too lazy to check), sem_init is deprecated diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/perf-test.c b/pypy/module/test_lib_pypy/cffi_tests/embedding/perf-test.c --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/perf-test.c +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/perf-test.c @@ -1,10 +1,12 @@ #include #include +#include #include +#include #ifdef PTEST_USE_THREAD # include # include -static sem_t done; +sem_t *done; #endif @@ -54,7 +56,7 @@ printf("time per call: %.3g\n", t); #ifdef PTEST_USE_THREAD - int status = sem_post(&done); + int status = sem_post(done); assert(status == 0); #endif @@ -68,8 +70,8 @@ start_routine(0); #else pthread_t th; - int i, status = sem_init(&done, 0, 0); - assert(status == 0); + done = sem_open("perf-test", O_CREAT, 0777, 0); + int i, status; add1(0, 0); /* this is the main thread */ @@ -78,7 +80,9 @@ assert(status == 0); } for (i = 0; i < PTEST_USE_THREAD; i++) { - status = sem_wait(&done); + status = sem_wait(done); + if (status) + fprintf(stderr, "%s\n", strerror(errno)); assert(status == 0); } #endif diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/thread1-test.c b/pypy/module/test_lib_pypy/cffi_tests/embedding/thread1-test.c --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/thread1-test.c +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/thread1-test.c @@ -7,7 +7,7 @@ extern int add1(int, int); -static sem_t done; +static sem_t* done; static void *start_routine(void *arg) @@ -16,7 +16,7 @@ x = add1(40, 2); assert(x == 42); - status = sem_post(&done); + status = sem_post(done); assert(status == 0); return arg; @@ -25,8 +25,8 @@ int main(void) { pthread_t th; - int i, status = sem_init(&done, 0, 0); - assert(status == 0); + int i, status; + done = sem_open("thread1-test", O_CREAT, 0777, 0); printf("starting\n"); fflush(stdout); @@ -35,7 +35,7 @@ assert(status == 0); } for (i = 0; i < NTHREADS; i++) { - status = sem_wait(&done); + status = sem_wait(done); assert(status == 0); } printf("done\n"); diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/thread2-test.c b/pypy/module/test_lib_pypy/cffi_tests/embedding/thread2-test.c --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/thread2-test.c +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/thread2-test.c @@ -5,7 +5,7 @@ extern int add1(int, int); extern int add2(int, int, int); -static sem_t done; +static sem_t* done; static void *start_routine_1(void *arg) @@ -14,7 +14,7 @@ x = add1(40, 2); assert(x == 42); - status = sem_post(&done); + status = sem_post(done); assert(status == 0); return arg; @@ -29,7 +29,7 @@ x = add2(1000, 200, 30); assert(x == 1230); - status = sem_post(&done); + status = sem_post(done); assert(status == 0); return arg; @@ -38,8 +38,8 @@ int main(void) { pthread_t th; - int i, status = sem_init(&done, 0, 0); - assert(status == 0); + int i, status; + done = sem_open("thread2-test", O_CREAT, 0777, 0); printf("starting\n"); fflush(stdout); @@ -49,7 +49,7 @@ assert(status == 0); for (i = 0; i < 2; i++) { - status = sem_wait(&done); + status = sem_wait(done); assert(status == 0); } printf("done\n"); diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/thread3-test.c b/pypy/module/test_lib_pypy/cffi_tests/embedding/thread3-test.c --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/thread3-test.c +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/thread3-test.c @@ -5,7 +5,7 @@ extern int add2(int, int, int); extern int add3(int, int, int, int); -static sem_t done; +static sem_t* done; static void *start_routine_2(void *arg) @@ -14,7 +14,7 @@ x = add2(40, 2, 100); assert(x == 142); - status = sem_post(&done); + status = sem_post(done); assert(status == 0); return arg; @@ -26,7 +26,7 @@ x = add3(1000, 200, 30, 4); assert(x == 1234); - status = sem_post(&done); + status = sem_post(done); assert(status == 0); return arg; @@ -35,8 +35,8 @@ int main(void) { pthread_t th; - int i, status = sem_init(&done, 0, 0); - assert(status == 0); + int i, status; + done = sem_open("thread-test3", O_CREAT, 0777, 0); printf("starting\n"); fflush(stdout); @@ -47,7 +47,7 @@ assert(status == 0); } for (i = 0; i < 20; i++) { - status = sem_wait(&done); + status = sem_wait(done); assert(status == 0); } printf("done\n"); diff --git a/pypy/module/test_lib_pypy/cffi_tests/embedding/tlocal-test.c b/pypy/module/test_lib_pypy/cffi_tests/embedding/tlocal-test.c --- a/pypy/module/test_lib_pypy/cffi_tests/embedding/tlocal-test.c +++ b/pypy/module/test_lib_pypy/cffi_tests/embedding/tlocal-test.c @@ -7,7 +7,7 @@ extern int add1(int, int); -static sem_t done; +static sem_t *done; static void *start_routine(void *arg) @@ -22,7 +22,7 @@ assert(x == expected + 8 + i); } - status = sem_post(&done); + status = sem_post(done); assert(status == 0); return arg; @@ -31,7 +31,8 @@ int main(void) { pthread_t th; - int i, status = sem_init(&done, 0, 0); + int i, status; + done = sem_open("tlocal-test", O_CREAT, 0777, 0); assert(status == 0); for (i = 0; i < NTHREADS; i++) { @@ -39,7 +40,7 @@ assert(status == 0); } for (i = 0; i < NTHREADS; i++) { - status = sem_wait(&done); + status = sem_wait(done); assert(status == 0); } printf("done\n"); From pypy.commits at gmail.com Mon Feb 29 17:22:21 2016 From: pypy.commits at gmail.com (fijal) Date: Mon, 29 Feb 2016 14:22:21 -0800 (PST) Subject: [pypy-commit] pypy default: merge Message-ID: <56d4c49d.2179c20a.cf896.77e8@mx.google.com> Author: fijal Branch: Changeset: r82620:b72f032521e3 Date: 2016-02-29 23:21 +0100 http://bitbucket.org/pypy/pypy/changeset/b72f032521e3/ Log: merge diff --git a/rpython/rtyper/test/test_rdict.py b/rpython/rtyper/test/test_rdict.py --- a/rpython/rtyper/test/test_rdict.py +++ b/rpython/rtyper/test/test_rdict.py @@ -1,8 +1,9 @@ +import sys from contextlib import contextmanager import signal from rpython.translator.translator import TranslationContext -from rpython.annotator import model as annmodel +from rpython.annotator.model import SomeInteger, SomeString from rpython.annotator.dictdef import DictKey, DictValue from rpython.rtyper.lltypesystem import lltype, rffi from rpython.rtyper.lltypesystem.rstr import string_repr @@ -13,7 +14,20 @@ from rpython.rlib.rarithmetic import r_int, r_uint, r_longlong, r_ulonglong import py -py.log.setconsumer("rtyper", py.log.STDOUT) +from hypothesis.strategies import builds, sampled_from, binary, just, integers +from hypothesis.stateful import GenericStateMachine, run_state_machine_as_test + +def ann2strategy(s_value): + if isinstance(s_value, SomeString): + if s_value.can_be_None: + return binary() | just(None) + else: + return binary() + elif isinstance(s_value, SomeInteger): + return integers(min_value=~sys.maxint, max_value=sys.maxint) + else: + raise TypeError("Cannot convert annotation %s to a strategy" % s_value) + if hasattr(signal, 'alarm'): @contextmanager @@ -35,7 +49,6 @@ yield - def not_really_random(): """A random-ish generator, which also generates nice patterns from time to time. Could be useful to detect problems associated with specific usage patterns.""" @@ -1031,28 +1044,6 @@ assert r_AB_dic.lowleveltype == r_BA_dic.lowleveltype - def test_dict_resize(self): - py.test.skip("test written for non-ordered dicts, update or kill") - # XXX we no longer automatically resize on 'del'. We need to - # hack a bit in this test to trigger a resize by continuing to - # fill the dict's table while keeping the actual size very low - # in order to force a resize to shrink the table back - def func(want_empty): - d = self.newdict() - for i in range(rdict.DICT_INITSIZE << 1): - d[chr(ord('a') + i)] = i - if want_empty: - for i in range(rdict.DICT_INITSIZE << 1): - del d[chr(ord('a') + i)] - for i in range(rdict.DICT_INITSIZE << 3): - d[chr(ord('A') - i)] = i - del d[chr(ord('A') - i)] - return d - res = self.interpret(func, [0]) - assert len(res.entries) > rdict.DICT_INITSIZE - res = self.interpret(func, [1]) - assert len(res.entries) == rdict.DICT_INITSIZE - def test_opt_dummykeymarker(self): def f(): d = {"hello": None} @@ -1144,238 +1135,101 @@ DICT = lltype.typeOf(llres.item1) assert sorted(DICT.TO.entries.TO.OF._flds) == ['f_hash', 'key', 'value'] - def test_deleted_entry_reusage_with_colliding_hashes(self): - py.test.skip("test written for non-ordered dicts, update or kill") - def lowlevelhash(value): - p = rstr.mallocstr(len(value)) - for i in range(len(value)): - p.chars[i] = value[i] - return rstr.LLHelpers.ll_strhash(p) - def func(c1, c2): - c1 = chr(c1) - c2 = chr(c2) - d = self.newdict() - d[c1] = 1 - d[c2] = 2 - del d[c1] - return d[c2] - - char_by_hash = {} - base = rdict.DICT_INITSIZE - for y in range(0, 256): - y = chr(y) - y_hash = lowlevelhash(y) % base - char_by_hash.setdefault(y_hash, []).append(y) - - x, y = char_by_hash[0][:2] # find a collision - - res = self.interpret(func, [ord(x), ord(y)]) - assert res == 2 - - def func2(c1, c2): - c1 = chr(c1) - c2 = chr(c2) - d = self.newdict() - d[c1] = 1 - d[c2] = 2 - del d[c1] - d[c1] = 3 - return d - - res = self.interpret(func2, [ord(x), ord(y)]) - for i in range(len(res.entries)): - assert not (res.entries.everused(i) and not res.entries.valid(i)) - - def func3(c0, c1, c2, c3, c4, c5, c6, c7): - d = self.newdict() - c0 = chr(c0) ; d[c0] = 1; del d[c0] - c1 = chr(c1) ; d[c1] = 1; del d[c1] - c2 = chr(c2) ; d[c2] = 1; del d[c2] - c3 = chr(c3) ; d[c3] = 1; del d[c3] - c4 = chr(c4) ; d[c4] = 1; del d[c4] - c5 = chr(c5) ; d[c5] = 1; del d[c5] - c6 = chr(c6) ; d[c6] = 1; del d[c6] - c7 = chr(c7) ; d[c7] = 1; del d[c7] - return d - - if rdict.DICT_INITSIZE != 8: - py.test.skip("make dict tests more indepdent from initsize") - res = self.interpret(func3, [ord(char_by_hash[i][0]) - for i in range(rdict.DICT_INITSIZE)]) - count_frees = 0 - for i in range(len(res.entries)): - if not res.entries.everused(i): - count_frees += 1 - assert count_frees >= 3 - -N_KEYS = 400 - -def test_stress(): - dictrepr = rdict.DictRepr(None, rint.signed_repr, rint.signed_repr, - DictKey(None, annmodel.SomeInteger()), - DictValue(None, annmodel.SomeInteger())) - dictrepr.setup() - l_dict = rdict.ll_newdict(dictrepr.DICT) - reference = {} - value = 0 - - def check_value(n): - try: - gotvalue = rdict.ll_dict_getitem(l_dict, n) - except KeyError: - n not in reference - else: - assert gotvalue == reference[n] - - def complete_check(): - for n in range(N_KEYS): - check_value(n) - - for x in not_really_random(): - n = int(x*100.0) # 0 <= x < 400 - op = repr(x)[-1] - if op <= '2' and n in reference: - rdict.ll_dict_delitem(l_dict, n) - del reference[n] - elif op <= '6': - rdict.ll_dict_setitem(l_dict, n, value) - reference[n] = value - value += 1 - else: - check_value(n) - if 1.38 <= x <= 1.39: - complete_check() - print 'current dict length:', len(reference) - assert l_dict.num_items == len(reference) - complete_check() - - - at py.test.mark.parametrize('key_can_be_none', [True, False]) - at py.test.mark.parametrize('value_can_be_none', [True, False]) -def test_stress_2(key_can_be_none, value_can_be_none): - class PseudoRTyper: - cache_dummy_values = {} - dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, - DictKey(None, annmodel.SomeString(key_can_be_none)), - DictValue(None, annmodel.SomeString(value_can_be_none))) - dictrepr.setup() - l_dict = rdict.ll_newdict(dictrepr.DICT) - reference = {} - values = not_really_random() - keytable = [string_repr.convert_const("foo%d" % n) for n in range(N_KEYS)] - - def check_value(n): - try: - gotvalue = rdict.ll_dict_getitem(l_dict, keytable[n]) - except KeyError: - assert n not in reference - else: - assert gotvalue == reference[n] - - def complete_check(): - for n in range(N_KEYS): - check_value(n) - - for x in not_really_random(): - n = int(x*100.0) # 0 <= x < 400 - op = repr(x)[-1] - if op <= '2' and n in reference: - rdict.ll_dict_delitem(l_dict, keytable[n]) - del reference[n] - elif op <= '6': - ll_value = string_repr.convert_const(str(values.next())) - rdict.ll_dict_setitem(l_dict, keytable[n], ll_value) - reference[n] = ll_value - else: - check_value(n) - if 1.38 <= x <= 1.39: - complete_check() - print 'current dict length:', len(reference) - assert l_dict.num_items == len(reference) - complete_check() - -from hypothesis.strategies import builds, sampled_from, binary, just class Action(object): - pass + def __repr__(self): + return "%s()" % self.__class__.__name__ -class SetItem(Action): - def __init__(self, key, value): - self.key = key - self.value = value +class PseudoRTyper: + cache_dummy_values = {} - def __repr__(self): - return 'SetItem(%r, %r)' % (self.key, self.value) +# XXX: None keys crash the test, but translation sort-of allows it + at py.test.mark.parametrize('s_key', + [SomeString(), SomeInteger()]) + at py.test.mark.parametrize('s_value', + [SomeString(can_be_None=True), SomeString(), SomeInteger()]) +def test_hypothesis(s_key, s_value): + rtyper = PseudoRTyper() + r_key = s_key.rtyper_makerepr(rtyper) + r_value = s_value.rtyper_makerepr(rtyper) + dictrepr = rdict.DictRepr(rtyper, r_key, r_value, + DictKey(None, s_key), + DictValue(None, s_value)) + dictrepr.setup() - def execute(self, state): - ll_key = string_repr.convert_const(self.key) - ll_value = string_repr.convert_const(self.value) - rdict.ll_dict_setitem(state.l_dict, ll_key, ll_value) - state.reference[self.key] = self.value - assert rdict.ll_contains(state.l_dict, ll_key) + _ll_key = r_key.convert_const + _ll_value = r_value.convert_const -class DelItem(Action): - def __init__(self, key): - self.key = key + class SetItem(Action): + def __init__(self, key, value): + self.key = key + self.value = value - def __repr__(self): - return 'DelItem(%r)' % (self.key) + def __repr__(self): + return 'SetItem(%r, %r)' % (self.key, self.value) - def execute(self, state): - ll_key = string_repr.convert_const(self.key) - rdict.ll_dict_delitem(state.l_dict, ll_key) - del state.reference[self.key] - assert not rdict.ll_contains(state.l_dict, ll_key) + def execute(self, state): + ll_key = _ll_key(self.key) + ll_value = _ll_value(self.value) + rdict.ll_dict_setitem(state.l_dict, ll_key, ll_value) + state.reference[self.key] = self.value + assert rdict.ll_contains(state.l_dict, ll_key) -class CopyDict(Action): - def execute(self, state): - state.l_dict = rdict.ll_copy(state.l_dict) + class DelItem(Action): + def __init__(self, key): + self.key = key -class ClearDict(Action): - def execute(self, state): - rdict.ll_clear(state.l_dict) - state.reference.clear() + def __repr__(self): + return 'DelItem(%r)' % (self.key) -st_keys = binary() -st_values = binary() -st_setitem = builds(SetItem, st_keys, st_values) + def execute(self, state): + ll_key = _ll_key(self.key) + rdict.ll_dict_delitem(state.l_dict, ll_key) + del state.reference[self.key] + assert not rdict.ll_contains(state.l_dict, ll_key) -def st_delitem(keys): - return builds(DelItem, sampled_from(keys)) + class CopyDict(Action): + def execute(self, state): + state.l_dict = rdict.ll_copy(state.l_dict) -from hypothesis.stateful import GenericStateMachine + class ClearDict(Action): + def execute(self, state): + rdict.ll_clear(state.l_dict) + state.reference.clear() -_ll = string_repr.convert_const + st_keys = ann2strategy(s_key) + st_values = ann2strategy(s_value) + st_setitem = builds(SetItem, st_keys, st_values) -class StressTest(GenericStateMachine): - def __init__(self): - class PseudoRTyper: - cache_dummy_values = {} - dictrepr = rdict.DictRepr(PseudoRTyper(), string_repr, string_repr, - DictKey(None, annmodel.SomeString(False)), - DictValue(None, annmodel.SomeString(False))) - dictrepr.setup() - self.l_dict = rdict.ll_newdict(dictrepr.DICT) - self.reference = {} + def st_delitem(keys): + return builds(DelItem, sampled_from(keys)) - def steps(self): - global_actions = [CopyDict(), ClearDict()] - if self.reference: - return ( - st_setitem | st_delitem(self.reference) | - sampled_from(global_actions)) - else: - return (st_setitem | sampled_from(global_actions)) + def st_updateitem(keys): + return builds(SetItem, sampled_from(keys), st_values) - def execute_step(self, action): - with signal_timeout(1): # catches infinite loops - action.execute(self) + class StressTest(GenericStateMachine): + def __init__(self): + self.l_dict = rdict.ll_newdict(dictrepr.DICT) + self.reference = {} - def teardown(self): - assert rdict.ll_dict_len(self.l_dict) == len(self.reference) - for key, value in self.reference.iteritems(): - assert rdict.ll_dict_getitem(self.l_dict, _ll(key)) == _ll(value) + def steps(self): + global_actions = [CopyDict(), ClearDict()] + if self.reference: + return ( + st_setitem | sampled_from(global_actions) | + st_updateitem(self.reference) | st_delitem(self.reference)) + else: + return (st_setitem | sampled_from(global_actions)) + def execute_step(self, action): + with signal_timeout(1): # catches infinite loops + action.execute(self) -TestHyp = StressTest.TestCase + def teardown(self): + assert rdict.ll_dict_len(self.l_dict) == len(self.reference) + for key, value in self.reference.iteritems(): + assert (rdict.ll_dict_getitem(self.l_dict, _ll_key(key)) == + _ll_value(value)) + + run_state_machine_as_test(StressTest)