[pypy-commit] pypy install-rpython: hg merge default

rlamy pypy.commits at gmail.com
Sat Aug 5 09:39:02 EDT 2017


Author: Ronan Lamy <ronan.lamy at gmail.com>
Branch: install-rpython
Changeset: r92089:a1f3fe63fa46
Date: 2017-08-05 14:02 +0100
http://bitbucket.org/pypy/pypy/changeset/a1f3fe63fa46/

Log:	hg merge default

diff too long, truncating to 2000 out of 3400 lines

diff --git a/Makefile b/Makefile
--- a/Makefile
+++ b/Makefile
@@ -10,7 +10,7 @@
 RUNINTERP = $(PYPY_EXECUTABLE)
 endif
 
-.PHONY: cffi_imports
+.PHONY: pypy-c cffi_imports
 
 pypy-c:
 	@echo
@@ -32,7 +32,7 @@
 	@echo "===================================================================="
 	@echo
 	@sleep 5
-	$(RUNINTERP) rpython/bin/rpython -Ojit pypy/goal/targetpypystandalone.py
+	cd pypy/goal && $(RUNINTERP) ../../rpython/bin/rpython -Ojit targetpypystandalone.py
 
 # Note: the -jN option, or MAKEFLAGS=-jN, are not usable.  They are
 # replaced with an opaque --jobserver option by the time this Makefile
@@ -40,4 +40,4 @@
 # http://lists.gnu.org/archive/html/help-make/2010-08/msg00106.html
 
 cffi_imports: pypy-c
-	PYTHONPATH=. ./pypy-c pypy/tool/build_cffi_imports.py || /bin/true
+	PYTHONPATH=. pypy/goal/pypy-c pypy/tool/build_cffi_imports.py || /bin/true
diff --git a/lib-python/2.7/distutils/unixccompiler.py b/lib-python/2.7/distutils/unixccompiler.py
--- a/lib-python/2.7/distutils/unixccompiler.py
+++ b/lib-python/2.7/distutils/unixccompiler.py
@@ -226,7 +226,19 @@
         return "-L" + dir
 
     def _is_gcc(self, compiler_name):
-        return "gcc" in compiler_name or "g++" in compiler_name
+        # XXX PyPy workaround, look at the big comment below for more
+        # context. On CPython, the hack below works fine because
+        # `compiler_name` contains the name of the actual compiler which was
+        # used at compile time (e.g. 'x86_64-linux-gnu-gcc' on my machine).
+        # PyPy hardcodes it to 'cc', so the hack doesn't work, and the end
+        # result is that we pass the wrong option to the compiler.
+        #
+        # The workaround is to *always* pretend to be GCC if we are on Linux:
+        # this should cover the vast majority of real systems, including the
+        # ones which use clang (which understands the '-Wl,-rpath' syntax as
+        # well)
+        return (sys.platform == "linux2" or
+                "gcc" in compiler_name or "g++" in compiler_name)
 
     def runtime_library_dir_option(self, dir):
         # XXX Hackish, at the very least.  See Python bug #445902:
diff --git a/lib_pypy/cffi/api.py b/lib_pypy/cffi/api.py
--- a/lib_pypy/cffi/api.py
+++ b/lib_pypy/cffi/api.py
@@ -394,12 +394,17 @@
             replace_with = ' ' + replace_with
         return self._backend.getcname(cdecl, replace_with)
 
-    def gc(self, cdata, destructor):
+    def gc(self, cdata, destructor, size=0):
         """Return a new cdata object that points to the same
         data.  Later, when this new cdata object is garbage-collected,
         'destructor(old_cdata_object)' will be called.
+
+        The optional 'size' gives an estimate of the size, used to
+        trigger the garbage collection more eagerly.  So far only used
+        on PyPy.  It tells the GC that the returned object keeps alive
+        roughly 'size' bytes of external memory.
         """
-        return self._backend.gcp(cdata, destructor)
+        return self._backend.gcp(cdata, destructor, size)
 
     def _get_cached_btype(self, type):
         assert self._lock.acquire(False) is False
diff --git a/lib_pypy/cffi/backend_ctypes.py b/lib_pypy/cffi/backend_ctypes.py
--- a/lib_pypy/cffi/backend_ctypes.py
+++ b/lib_pypy/cffi/backend_ctypes.py
@@ -1002,7 +1002,7 @@
 
     _weakref_cache_ref = None
 
-    def gcp(self, cdata, destructor):
+    def gcp(self, cdata, destructor, size=0):
         if self._weakref_cache_ref is None:
             import weakref
             class MyRef(weakref.ref):
diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
--- a/pypy/config/pypyoption.py
+++ b/pypy/config/pypyoption.py
@@ -224,11 +224,6 @@
                    "use specialised tuples",
                    default=False),
 
-        BoolOption("withcelldict",
-                   "use dictionaries that are optimized for being used as module dicts",
-                   default=False,
-                   requires=[("objspace.honor__builtins__", False)]),
-
         BoolOption("withliststrategies",
                    "enable optimized ways to store lists of primitives ",
                    default=True),
@@ -288,7 +283,7 @@
 
     # extra optimizations with the JIT
     if level == 'jit':
-        config.objspace.std.suggest(withcelldict=True)
+        pass # none at the moment
 
 
 def enable_allworkingmodules(config):
diff --git a/pypy/doc/build.rst b/pypy/doc/build.rst
--- a/pypy/doc/build.rst
+++ b/pypy/doc/build.rst
@@ -10,6 +10,18 @@
 minutes on a fast machine -- and RAM-hungry.  You will need **at least** 2 GB
 of memory on a 32-bit machine and 4GB on a 64-bit machine.
 
+Before you start
+----------------
+
+Our normal development workflow avoids a full translation by using test-driven
+development. You can read more about how to develop PyPy here_, and latest
+translated (hopefully functional) binary packages are available on our
+buildbot's `nightly builds`_
+
+.. _here: getting-started-dev.html
+.. _`nightly builds`: http://buildbot.pypy.org/nightly
+
+You will need the build dependencies below to run the tests.
 
 Clone the repository
 --------------------
@@ -140,22 +152,61 @@
 Run the translation
 -------------------
 
+We usually translate in the ``pypy/goal`` directory, so all the following
+commands assume your ``$pwd`` is there.
+
 Translate with JIT::
 
-    cd pypy/goal
     pypy ../../rpython/bin/rpython --opt=jit
 
 Translate without JIT::
 
-    cd pypy/goal
     pypy ../../rpython/bin/rpython --opt=2
 
+Note this translates pypy via the ``targetpypystandalone.py`` file, so these
+are shorthand for::
+
+    pypy ../../rpython/bin/rpython <rpython options> targetpypystandalone.py <pypy options>
+
+More help is availabe via ``--help`` at either option position, and more info
+can be found in the :doc:`config/index` section.
+
 (You can use ``python`` instead of ``pypy`` here, which will take longer
 but works too.)
 
-If everything works correctly this will create an executable ``pypy-c`` in the
-current directory. The executable behaves mostly like a normal Python
-interpreter (see :doc:`cpython_differences`).
+If everything works correctly this will:
+
+1. Run the rpython `translation chain`_, producing a database of the
+   entire pypy interpreter. This step is currently singe threaded, and RAM
+   hungry. As part of this step,  the chain creates a large number of C code
+   files and a Makefile to compile them in a
+   directory controlled by the ``PYPY_USESSION_DIR`` environment variable.  
+2. Create an executable ``pypy-c`` by running the Makefile. This step can
+   utilize all possible cores on the machine.  
+3. Copy the needed binaries to the current directory.  
+4. Generate c-extension modules for any cffi-based stdlib modules.  
+
+
+The resulting executable behaves mostly like a normal Python
+interpreter (see :doc:`cpython_differences`), and is ready for testing, for
+use as a base interpreter for a new virtualenv, or for packaging into a binary
+suitable for installation on another machine running the same OS as the build
+machine. 
+
+Note that step 4 is merely done as a convenience, any of the steps may be rerun
+without rerunning the previous steps.
+
+.. _`translation chain`: https://rpython.readthedocs.io/en/latest/translation.html
+
+
+Making a debug build of PyPy
+----------------------------
+
+If the Makefile is rerun with the lldebug or lldebug0 target, appropriate
+compilation flags are added to add debug info and reduce compiler optimizations
+to ``-O0`` respectively. If you stop in a debugger, you will see the
+very wordy machine-generated C code from the rpython translation step, which
+takes a little bit of reading to relate back to the rpython code.
 
 Build cffi import libraries for the stdlib
 ------------------------------------------
@@ -169,14 +220,6 @@
 
 .. _`out-of-line API mode`: http://cffi.readthedocs.org/en/latest/overview.html#real-example-api-level-out-of-line
 
-Translating with non-standard options
--------------------------------------
-
-It is possible to have non-standard features enabled for translation,
-but they are not really tested any more.  Look, for example, at the
-:doc:`objspace proxies <objspace-proxies>` document.
-
-
 Packaging (preparing for installation)
 --------------------------------------
 
@@ -205,14 +248,16 @@
 
 * PyPy 2.5.1 or earlier: normal users would see permission errors.
   Installers need to run ``pypy -c "import gdbm"`` and other similar
-  commands at install time; the exact list is in `package.py`_.  Users
+  commands at install time; the exact list is in 
+  :source:`pypy/tool/release/package.py <package.py>`.  Users
   seeing a broken installation of PyPy can fix it after-the-fact if they
   have sudo rights, by running once e.g. ``sudo pypy -c "import gdbm``.
 
 * PyPy 2.6 and later: anyone would get ``ImportError: no module named
   _gdbm_cffi``.  Installers need to run ``pypy _gdbm_build.py`` in the
   ``lib_pypy`` directory during the installation process (plus others;
-  see the exact list in `package.py`_).  Users seeing a broken
+  see the exact list in :source:`pypy/tool/release/package.py <package.py>`).
+  Users seeing a broken
   installation of PyPy can fix it after-the-fact, by running ``pypy
   /path/to/lib_pypy/_gdbm_build.py``.  This command produces a file
   called ``_gdbm_cffi.pypy-41.so`` locally, which is a C extension
diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withcelldict.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Enable cell-dicts. This optimization is not helpful without the JIT. In the
-presence of the JIT, it greatly helps looking up globals.
diff --git a/pypy/doc/configuration.rst b/pypy/doc/configuration.rst
--- a/pypy/doc/configuration.rst
+++ b/pypy/doc/configuration.rst
@@ -188,4 +188,6 @@
 can be found on the ``config`` attribute of all ``TranslationContext``
 instances and are described in :source:`rpython/config/translationoption.py`. The interpreter options
 are attached to the object space, also under the name ``config`` and are
-described in :source:`pypy/config/pypyoption.py`.
+described in :source:`pypy/config/pypyoption.py`. Both set of options are
+documented in the :doc:`config/index` section.
+
diff --git a/pypy/doc/cppyy_example.rst b/pypy/doc/cppyy_example.rst
deleted file mode 100644
--- a/pypy/doc/cppyy_example.rst
+++ /dev/null
@@ -1,59 +0,0 @@
-File example.h
-==============
-
-::
-
-    #include <iostream>
-    #include <vector>
-
-    class AbstractClass {
-    public:
-        virtual ~AbstractClass() {}
-        virtual void abstract_method() = 0;
-    };
-
-    class ConcreteClass : AbstractClass {
-    public:
-        ConcreteClass(int n=42) : m_int(n) {}
-        ~ConcreteClass() {}
-
-        virtual void abstract_method() {
-            std::cout << "called concrete method" << std::endl;
-        }
-
-        void array_method(int* ad, int size) {
-            for (int i=0; i < size; ++i)
-                std::cout << ad[i] << ' ';
-            std::cout << std::endl;
-        }
-
-        void array_method(double* ad, int size) {
-            for (int i=0; i < size; ++i)
-                std::cout << ad[i] << ' ';
-            std::cout << std::endl;
-        }
-
-        AbstractClass* show_autocast() {
-            return this;
-        }
-
-        operator const char*() {
-            return "Hello operator const char*!";
-        }
-
-    public:
-        int m_int;
-    };
-
-    namespace Namespace {
-
-       class ConcreteClass {
-       public:
-          class NestedClass {
-          public:
-             std::vector<int> m_v;
-          };
-
-       };
-
-    } // namespace Namespace
diff --git a/pypy/doc/cpython_differences.rst b/pypy/doc/cpython_differences.rst
--- a/pypy/doc/cpython_differences.rst
+++ b/pypy/doc/cpython_differences.rst
@@ -330,6 +330,8 @@
 
  - ``frozenset`` (empty frozenset only)
 
+ - unbound method objects (for Python 2 only)
+
 This change requires some changes to ``id`` as well. ``id`` fulfills the
 following condition: ``x is y <=> id(x) == id(y)``. Therefore ``id`` of the
 above types will return a value that is computed from the argument, and can
diff --git a/pypy/doc/extending.rst b/pypy/doc/extending.rst
--- a/pypy/doc/extending.rst
+++ b/pypy/doc/extending.rst
@@ -12,7 +12,7 @@
 
 * Write them in pure Python and use ctypes_.
 
-* Write them in C++ and bind them through  :doc:`cppyy <cppyy>` using Cling.
+* Write them in C++ and bind them through  cppyy_ using Cling.
 
 * Write them as `RPython mixed modules`_.
 
@@ -64,9 +64,9 @@
 cppyy
 -----
 
-For C++, `cppyy`_ is an automated bindings generator available for both
+For C++, _cppyy_ is an automated bindings generator available for both
 PyPy and CPython.
-``cppyy`` relies on declarations from C++ header files to dynamically
+_cppyy_ relies on declarations from C++ header files to dynamically
 construct Python equivalent classes, functions, variables, etc.
 It is designed for use by large scale programs and supports modern C++.
 With PyPy, it leverages the built-in ``_cppyy`` module, allowing the JIT to
@@ -75,8 +75,7 @@
 To install, run ``pip install cppyy``.
 Further details are available in the `full documentation`_.
 
-.. _cppyy: http://cppyy.readthedocs.org/
-.. _`full documentation`: http://cppyy.readthedocs.org/
+.. _`full documentation`: https://cppyy.readthedocs.org/
 
 
 RPython Mixed Modules
diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst
--- a/pypy/doc/getting-started-dev.rst
+++ b/pypy/doc/getting-started-dev.rst
@@ -35,8 +35,8 @@
 
 * Edit things.  Use ``hg diff`` to see what you changed.  Use ``hg add``
   to make Mercurial aware of new files you added, e.g. new test files.
-  Use ``hg status`` to see if there are such files.  Run tests!  (See
-  the rest of this page.)
+  Use ``hg status`` to see if there are such files.  Write and run tests!
+  (See the rest of this page.)
 
 * Commit regularly with ``hg commit``.  A one-line commit message is
   fine.  We love to have tons of commits; make one as soon as you have
@@ -113,6 +113,10 @@
 make sure you have the correct version installed which
 you can find out with the ``--version`` switch.
 
+You will need the `build requirements`_ to run tests successfully, since many of
+them compile little pieces of PyPy and then run the tests inside that minimal
+interpreter
+
 Now on to running some tests.  PyPy has many different test directories
 and you can use shell completion to point at directories or files::
 
@@ -141,7 +145,7 @@
 
 .. _py.test testing tool: http://pytest.org
 .. _py.test usage and invocations: http://pytest.org/latest/usage.html#usage
-
+.. _`build requirements`: build.html#install-build-time-dependencies
 
 Special Introspection Features of the Untranslated Python Interpreter
 ---------------------------------------------------------------------
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -5,6 +5,14 @@
 .. this is a revision shortly after release-pypy2.7-v5.8.0
 .. startrev: 558bd00b3dd8
 
+In previous versions of PyPy, ``instance.method`` would return always
+the same bound method object, when gotten out of the same instance (as
+far as ``is`` and ``id()`` can tell).  CPython doesn't do that.  Now
+PyPy, like CPython, returns a different bound method object every time.
+For ``type.method``, PyPy2 still returns always the same *unbound*
+method object; CPython does it for built-in types but not for
+user-defined types.
+
 .. branch: cffi-complex
 .. branch: cffi-char16-char32
 
@@ -30,3 +38,25 @@
 
 Renaming of ``cppyy`` to ``_cppyy``.
 The former is now an external package installable with ``pip install cppyy``.
+
+.. branch: Enable_PGO_for_clang
+
+.. branch: nopax
+
+At the end of translation, run ``attr -q -s pax.flags -V m`` on
+PAX-enabled systems on the produced binary.  This seems necessary
+because PyPy uses a JIT.
+
+.. branch: pypy_bytearray
+
+Improve ``bytearray`` performance (backported from py3.5)
+
+.. branch: gc-del-limit-growth
+
+Fix the bounds in the GC when allocating a lot of objects with finalizers,
+fixes issue #2590
+
+.. branch: arrays-force-less
+
+Small improvement to optimize list accesses with constant indexes better by
+throwing away information about them less eagerly.
diff --git a/pypy/interpreter/argument.py b/pypy/interpreter/argument.py
--- a/pypy/interpreter/argument.py
+++ b/pypy/interpreter/argument.py
@@ -2,6 +2,7 @@
 Arguments objects.
 """
 from rpython.rlib.debug import make_sure_not_resized
+from rpython.rlib.objectmodel import not_rpython
 from rpython.rlib import jit
 
 from pypy.interpreter.error import OperationError, oefmt
@@ -46,8 +47,8 @@
         # behaviour but produces better error messages
         self.methodcall = methodcall
 
+    @not_rpython
     def __repr__(self):
-        """ NOT_RPYTHON """
         name = self.__class__.__name__
         if not self.keywords:
             return '%s(%s)' % (name, self.arguments_w,)
diff --git a/pypy/interpreter/error.py b/pypy/interpreter/error.py
--- a/pypy/interpreter/error.py
+++ b/pypy/interpreter/error.py
@@ -7,6 +7,7 @@
 
 from rpython.rlib import jit
 from rpython.rlib.objectmodel import we_are_translated, specialize
+from rpython.rlib.objectmodel import not_rpython
 from rpython.rlib import rstack, rstackovf
 
 from pypy.interpreter import debug
@@ -57,8 +58,9 @@
                 self.match(space, space.w_KeyboardInterrupt))
         # note: an extra case is added in OpErrFmtNoArgs
 
+    @not_rpython
     def __str__(self):
-        "NOT_RPYTHON: Convenience for tracebacks."
+        "Convenience for tracebacks."
         s = self._w_value
         space = getattr(self.w_type, 'space', None)
         if space is not None:
@@ -107,15 +109,16 @@
             if RECORD_INTERPLEVEL_TRACEBACK:
                 self.debug_excs.append(sys.exc_info())
 
+    @not_rpython
     def print_application_traceback(self, space, file=None):
-        "NOT_RPYTHON: Dump a standard application-level traceback."
+        "Dump a standard application-level traceback."
         if file is None:
             file = sys.stderr
         self.print_app_tb_only(file)
         print >> file, self.errorstr(space)
 
+    @not_rpython
     def print_app_tb_only(self, file):
-        "NOT_RPYTHON"
         tb = self._application_traceback
         if tb:
             import linecache
@@ -142,8 +145,9 @@
                     print >> file, l
                 tb = tb.next
 
+    @not_rpython
     def print_detailed_traceback(self, space=None, file=None):
-        """NOT_RPYTHON: Dump a nice detailed interpreter- and
+        """Dump a nice detailed interpreter- and
         application-level traceback, useful to debug the interpreter."""
         if file is None:
             file = sys.stderr
diff --git a/pypy/interpreter/executioncontext.py b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -1,7 +1,7 @@
 import sys
 from pypy.interpreter.error import OperationError, get_cleared_operation_error
 from rpython.rlib.unroll import unrolling_iterable
-from rpython.rlib.objectmodel import specialize
+from rpython.rlib.objectmodel import specialize, not_rpython
 from rpython.rlib import jit, rgc, objectmodel
 
 TICK_COUNTER_STEP = 100
@@ -423,8 +423,9 @@
             # to run at the next possible bytecode
             self.reset_ticker(-1)
 
+    @not_rpython
     def register_periodic_action(self, action, use_bytecode_counter):
-        """NOT_RPYTHON:
+        """
         Register the PeriodicAsyncAction action to be called whenever the
         tick counter becomes smaller than 0.  If 'use_bytecode_counter' is
         True, make sure that we decrease the tick counter at every bytecode.
diff --git a/pypy/interpreter/function.py b/pypy/interpreter/function.py
--- a/pypy/interpreter/function.py
+++ b/pypy/interpreter/function.py
@@ -559,21 +559,29 @@
         return space.newbool(space.eq_w(self.w_function, w_other.w_function))
 
     def is_w(self, space, other):
+        if self.w_instance is not None:
+            return W_Root.is_w(self, space, other)
+        # The following special-case is only for *unbound* method objects.
+        # Motivation: in CPython, it seems that no strange internal type
+        # exists where the equivalent of ``x.method is x.method`` would
+        # return True.  This is unlike unbound methods, where e.g.
+        # ``list.append is list.append`` returns True.  The following code
+        # is here to emulate that behaviour.  Unlike CPython, we return
+        # True for all equal unbound methods, not just for built-in types.
         if not isinstance(other, Method):
             return False
-        return (self.w_instance is other.w_instance and
+        return (other.w_instance is None and
                 self.w_function is other.w_function and
                 self.w_class is other.w_class)
 
     def immutable_unique_id(self, space):
-        from pypy.objspace.std.util import IDTAG_METHOD as tag
+        if self.w_instance is not None:
+            return W_Root.immutable_unique_id(self, space)
+        # the special-case is only for *unbound* method objects
+        #
+        from pypy.objspace.std.util import IDTAG_UNBOUND_METHOD as tag
         from pypy.objspace.std.util import IDTAG_SHIFT
-        if self.w_instance is not None:
-            id = space.bigint_w(space.id(self.w_instance))
-            id = id.lshift(LONG_BIT)
-        else:
-            id = rbigint.fromint(0)
-        id = id.or_(space.bigint_w(space.id(self.w_function)))
+        id = space.bigint_w(space.id(self.w_function))
         id = id.lshift(LONG_BIT).or_(space.bigint_w(space.id(self.w_class)))
         id = id.lshift(IDTAG_SHIFT).int_or_(tag)
         return space.newlong_from_rbigint(id)
diff --git a/pypy/interpreter/gateway.py b/pypy/interpreter/gateway.py
--- a/pypy/interpreter/gateway.py
+++ b/pypy/interpreter/gateway.py
@@ -23,7 +23,7 @@
     DescrMismatch)
 from pypy.interpreter.error import OperationError, oefmt
 from pypy.interpreter.function import ClassMethod, FunctionWithFixedCode
-from rpython.rlib.objectmodel import we_are_translated
+from rpython.rlib.objectmodel import we_are_translated, not_rpython
 from rpython.rlib.rarithmetic import r_longlong, r_int, r_ulonglong, r_uint
 from rpython.tool.sourcetools import func_with_new_name, compile2
 
@@ -64,8 +64,8 @@
     def _freeze_(self):
         return True
 
+    @not_rpython
     def unwrap(self, space, w_value):
-        """NOT_RPYTHON"""
         raise NotImplementedError
 
 
@@ -380,8 +380,8 @@
 class BuiltinActivation(object):
     _immutable_ = True
 
+    @not_rpython
     def __init__(self, behavior):
-        """NOT_RPYTHON"""
         self.behavior = behavior
 
     def _run(self, space, scope_w):
@@ -621,9 +621,9 @@
     # When a BuiltinCode is stored in a Function object,
     # you get the functionality of CPython's built-in function type.
 
+    @not_rpython
     def __init__(self, func, unwrap_spec=None, self_type=None,
                  descrmismatch=None, doc=None):
-        "NOT_RPYTHON"
         # 'implfunc' is the interpreter-level function.
         # Note that this uses a lot of (construction-time) introspection.
         Code.__init__(self, func.__name__)
@@ -969,10 +969,10 @@
 
     instancecache = {}
 
+    @not_rpython
     def __new__(cls, f, app_name=None, unwrap_spec=None, descrmismatch=None,
                 as_classmethod=False, doc=None):
 
-        "NOT_RPYTHON"
         # f must be a function whose name does NOT start with 'app_'
         self_type = None
         if hasattr(f, 'im_func'):
@@ -1013,8 +1013,8 @@
             self._staticdefs = zip(argnames[-len(defaults):], defaults)
         return self
 
+    @not_rpython
     def _getdefaults(self, space):
-        "NOT_RPYTHON"
         defs_w = []
         for name, defaultval in self._staticdefs:
             if name.startswith('w_'):
@@ -1070,8 +1070,8 @@
 
 
 class GatewayCache(SpaceCache):
+    @not_rpython
     def build(cache, gateway):
-        "NOT_RPYTHON"
         space = cache.space
         defs = gateway._getdefaults(space) # needs to be implemented by subclass
         code = gateway._code
@@ -1141,8 +1141,8 @@
         w_globals = self.getwdict(space)
         return space.getitem(w_globals, space.newtext(name))
 
+    @not_rpython
     def interphook(self, name):
-        "NOT_RPYTHON"
         def appcaller(space, *args_w):
             if not isinstance(space, ObjSpace):
                 raise TypeError("first argument must be a space instance.")
@@ -1179,15 +1179,16 @@
     """NOT_RPYTHON
     The cache mapping each applevel instance to its lazily built w_dict"""
 
+    @not_rpython
     def build(self, app):
-        "NOT_RPYTHON.  Called indirectly by Applevel.getwdict()."
+        "Called indirectly by Applevel.getwdict()."
         return build_applevel_dict(app, self.space)
 
 
 # __________ pure applevel version __________
 
+ at not_rpython
 def build_applevel_dict(self, space):
-    "NOT_RPYTHON"
     w_glob = space.newdict(module=True)
     space.setitem(w_glob, space.newtext('__name__'), space.newtext(self.modname))
     space.exec_(self.source, w_glob, w_glob,
@@ -1198,8 +1199,9 @@
 # ____________________________________________________________
 
 
+ at not_rpython
 def appdef(source, applevel=ApplevelClass, filename=None):
-    """ NOT_RPYTHON: build an app-level helper function, like for example:
+    """ build an app-level helper function, like for example:
     myfunc = appdef('''myfunc(x, y):
                            return x+y
                     ''')
@@ -1245,6 +1247,6 @@
 
 
 # app2interp_temp is used for testing mainly
+ at not_rpython
 def app2interp_temp(func, applevel_temp=applevel_temp, filename=None):
-    """ NOT_RPYTHON """
     return appdef(func, applevel_temp, filename=filename)
diff --git a/pypy/interpreter/miscutils.py b/pypy/interpreter/miscutils.py
--- a/pypy/interpreter/miscutils.py
+++ b/pypy/interpreter/miscutils.py
@@ -3,6 +3,7 @@
 """
 
 from rpython.rlib.listsort import make_timsort_class
+from rpython.rlib.objectmodel import not_rpython
 
 
 class ThreadLocals:
@@ -41,9 +42,8 @@
         # but in some corner cases it is not...  unsure why
         self._value = None
 
-
+ at not_rpython
 def make_weak_value_dictionary(space, keytype, valuetype):
-    "NOT_RPYTHON"
     if space.config.translation.rweakref:
         from rpython.rlib.rweakref import RWeakValueDictionary
         return RWeakValueDictionary(keytype, valuetype)
diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py
--- a/pypy/interpreter/mixedmodule.py
+++ b/pypy/interpreter/mixedmodule.py
@@ -3,6 +3,9 @@
 from pypy.interpreter import gateway
 from pypy.interpreter.error import OperationError
 from pypy.interpreter.baseobjspace import W_Root
+
+from rpython.rlib.objectmodel import not_rpython
+
 import sys
 
 class MixedModule(Module):
@@ -15,16 +18,17 @@
     lazy = False
     submodule_name = None
 
+    @not_rpython
     def __init__(self, space, w_name):
-        """ NOT_RPYTHON """
         Module.__init__(self, space, w_name)
         self.lazy = True
         self.__class__.buildloaders()
         self.loaders = self.loaders.copy()    # copy from the class to the inst
         self.submodules_w = []
 
+    @not_rpython
     def install(self):
-        """NOT_RPYTHON: install this module, and it's submodules into
+        """install this module, and it's submodules into
         space.builtin_modules"""
         Module.install(self)
         if hasattr(self, "submodules"):
@@ -61,8 +65,8 @@
         self.w_initialdict = self.space.call_method(self.w_dict, 'items')
 
     @classmethod
+    @not_rpython
     def get_applevel_name(cls):
-        """ NOT_RPYTHON """
         if cls.applevel_name is not None:
             return cls.applevel_name
         else:
@@ -130,8 +134,8 @@
         self._frozen = True
 
     @classmethod
+    @not_rpython
     def buildloaders(cls):
-        """ NOT_RPYTHON """
         if not hasattr(cls, 'loaders'):
             # build a constant dictionary out of
             # applevel/interplevel definitions
@@ -161,8 +165,8 @@
         return space.newtext_or_none(cls.__doc__)
 
 
+ at not_rpython
 def getinterpevalloader(pkgroot, spec):
-    """ NOT_RPYTHON """
     def ifileloader(space):
         d = {'space':space}
         # EVIL HACK (but it works, and this is not RPython :-)
@@ -202,8 +206,8 @@
     return ifileloader
 
 applevelcache = {}
+ at not_rpython
 def getappfileloader(pkgroot, appname, spec):
-    """ NOT_RPYTHON """
     # hum, it's a bit more involved, because we usually
     # want the import at applevel
     modname, attrname = spec.split('.')
diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py
--- a/pypy/interpreter/module.py
+++ b/pypy/interpreter/module.py
@@ -4,7 +4,7 @@
 
 from pypy.interpreter.baseobjspace import W_Root
 from pypy.interpreter.error import OperationError
-from rpython.rlib.objectmodel import we_are_translated
+from rpython.rlib.objectmodel import we_are_translated, not_rpython
 
 
 class Module(W_Root):
@@ -40,13 +40,15 @@
         except OperationError:
             pass
 
+    @not_rpython
     def install(self):
-        """NOT_RPYTHON: installs this module into space.builtin_modules"""
+        """installs this module into space.builtin_modules"""
         modulename = self.space.text0_w(self.w_name)
         self.space.builtin_modules[modulename] = self
 
+    @not_rpython
     def setup_after_space_initialization(self):
-        """NOT_RPYTHON: to allow built-in modules to do some more setup
+        """to allow built-in modules to do some more setup
         after the space is fully initialized."""
 
     def init(self, space):
diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py
--- a/pypy/interpreter/pyframe.py
+++ b/pypy/interpreter/pyframe.py
@@ -7,6 +7,7 @@
 from rpython.rlib.debug import ll_assert_not_none
 from rpython.rlib.jit import hint
 from rpython.rlib.objectmodel import instantiate, specialize, we_are_translated
+from rpython.rlib.objectmodel import not_rpython
 from rpython.rlib.rarithmetic import intmask, r_uint
 from rpython.tool.pairtype import extendabletype
 
@@ -144,8 +145,9 @@
             return None
         return d.w_locals
 
+    @not_rpython
     def __repr__(self):
-        # NOT_RPYTHON: useful in tracebacks
+        # useful in tracebacks
         return "<%s.%s executing %s at line %s" % (
             self.__class__.__module__, self.__class__.__name__,
             self.pycode, self.get_last_lineno())
diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py
--- a/pypy/interpreter/pyopcode.py
+++ b/pypy/interpreter/pyopcode.py
@@ -7,7 +7,7 @@
 from rpython.rlib import jit, rstackovf
 from rpython.rlib.debug import check_nonneg
 from rpython.rlib.objectmodel import (we_are_translated, always_inline,
-        dont_inline)
+        dont_inline, not_rpython)
 from rpython.rlib.rarithmetic import r_uint, intmask
 from rpython.tool.sourcetools import func_with_new_name
 
@@ -20,8 +20,8 @@
 from pypy.interpreter.pycode import PyCode, BytecodeCorruption
 from pypy.tool.stdlib_opcode import bytecode_spec
 
+ at not_rpython
 def unaryoperation(operationname):
-    """NOT_RPYTHON"""
     def opimpl(self, *ignored):
         operation = getattr(self.space, operationname)
         w_1 = self.popvalue()
@@ -31,8 +31,8 @@
 
     return func_with_new_name(opimpl, "opcode_impl_for_%s" % operationname)
 
+ at not_rpython
 def binaryoperation(operationname):
-    """NOT_RPYTHON"""
     def opimpl(self, *ignored):
         operation = getattr(self.space, operationname)
         w_2 = self.popvalue()
diff --git a/pypy/interpreter/test/test_function.py b/pypy/interpreter/test/test_function.py
--- a/pypy/interpreter/test/test_function.py
+++ b/pypy/interpreter/test/test_function.py
@@ -1,4 +1,4 @@
-import pytest
+import pytest, sys
 from pypy.interpreter import eval
 from pypy.interpreter.function import Function, Method, descr_function_get
 from pypy.interpreter.pycode import PyCode
@@ -342,6 +342,11 @@
         raises(ValueError, type(f).__setstate__, f, (1, 2, 3))
 
 class AppTestMethod:
+    def setup_class(cls):
+        cls.w_runappdirect_on_cpython = cls.space.wrap(
+            cls.runappdirect and
+            '__pypy__' not in sys.builtin_module_names)
+
     def test_simple_call(self):
         class A(object):
             def func(self, arg2):
@@ -572,7 +577,6 @@
         assert meth == meth
         assert meth == MethodType(func, object)
 
-    @pytest.mark.skipif("config.option.runappdirect")
     def test_method_identity(self):
         class A(object):
             def m(self):
@@ -589,19 +593,24 @@
 
         a = A()
         a2 = A()
-        assert a.m is a.m
-        assert id(a.m) == id(a.m)
-        assert a.m is not a.n
-        assert id(a.m) != id(a.n)
-        assert a.m is not a2.m
-        assert id(a.m) != id(a2.m)
+        x = a.m; y = a.m
+        assert x is not y
+        assert id(x) != id(y)
+        assert x == y
+        assert x is not a.n
+        assert id(x) != id(a.n)
+        assert x is not a2.m
+        assert id(x) != id(a2.m)
 
-        assert A.m is A.m
-        assert id(A.m) == id(A.m)
-        assert A.m is not A.n
-        assert id(A.m) != id(A.n)
-        assert A.m is not B.m
-        assert id(A.m) != id(B.m)
+        if not self.runappdirect_on_cpython:
+            assert A.m is A.m
+            assert id(A.m) == id(A.m)
+        assert A.m == A.m
+        x = A.m
+        assert x is not A.n
+        assert id(x) != id(A.n)
+        assert x is not B.m
+        assert id(x) != id(B.m)
 
 
 class TestMethod:
diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py
--- a/pypy/interpreter/typedef.py
+++ b/pypy/interpreter/typedef.py
@@ -8,14 +8,15 @@
 
 from rpython.rlib.jit import promote
 from rpython.rlib.objectmodel import compute_identity_hash, specialize
-from rpython.rlib.objectmodel import instantiate
+from rpython.rlib.objectmodel import instantiate, not_rpython
 from rpython.tool.sourcetools import compile2, func_with_new_name
 
 
 class TypeDef(object):
+    @not_rpython
     def __init__(self, __name, __base=None, __total_ordering__=None,
                  __buffer=None, **rawdict):
-        "NOT_RPYTHON: initialization-time only"
+        "initialization-time only"
         self.name = __name
         if __base is None:
             bases = []
@@ -113,8 +114,9 @@
 # register_finalizer() or not.
 
 @specialize.memo()
+ at not_rpython
 def get_unique_interplevel_subclass(space, cls):
-    "NOT_RPYTHON: initialization-time only"
+    "initialization-time only"
     assert cls.typedef.acceptable_as_base_class
     try:
         return _unique_subclass_cache[cls]
@@ -349,15 +351,17 @@
         return self
 
 
+ at not_rpython
 def interp_attrproperty(name, cls, doc=None, wrapfn=None):
-    "NOT_RPYTHON: initialization-time only"
+    "initialization-time only"
     assert wrapfn is not None
     def fget(space, obj):
         return getattr(space, wrapfn)(getattr(obj, name))
     return GetSetProperty(fget, cls=cls, doc=doc)
 
+ at not_rpython
 def interp_attrproperty_w(name, cls, doc=None):
-    "NOT_RPYTHON: initialization-time only"
+    "initialization-time only"
     def fget(space, obj):
         w_value = getattr(obj, name)
         if w_value is None:
diff --git a/pypy/module/__builtin__/test/test_classobj.py b/pypy/module/__builtin__/test/test_classobj.py
--- a/pypy/module/__builtin__/test/test_classobj.py
+++ b/pypy/module/__builtin__/test/test_classobj.py
@@ -1090,18 +1090,18 @@
     def setup_class(cls):
         if cls.runappdirect:
             py.test.skip("can only be run on py.py")
-        def is_strdict(space, w_class):
-            from pypy.objspace.std.dictmultiobject import BytesDictStrategy
+        def is_moduledict(space, w_class):
+            from pypy.objspace.std.celldict import ModuleDictStrategy
             w_d = w_class.getdict(space)
-            return space.wrap(isinstance(w_d.get_strategy(), BytesDictStrategy))
+            return space.wrap(isinstance(w_d.get_strategy(), ModuleDictStrategy))
 
-        cls.w_is_strdict = cls.space.wrap(gateway.interp2app(is_strdict))
+        cls.w_is_moduledict = cls.space.wrap(gateway.interp2app(is_moduledict))
 
-    def test_strdict(self):
+    def test_moduledict(self):
         class A:
             a = 1
             b = 2
-        assert self.is_strdict(A)
+        assert self.is_moduledict(A)
 
     def test_attr_slots(self):
         class C:
diff --git a/pypy/module/_cffi_backend/cdataobj.py b/pypy/module/_cffi_backend/cdataobj.py
--- a/pypy/module/_cffi_backend/cdataobj.py
+++ b/pypy/module/_cffi_backend/cdataobj.py
@@ -433,17 +433,22 @@
     def _sizeof(self):
         return self.ctype.size
 
-    def with_gc(self, w_destructor):
+    def with_gc(self, w_destructor, size=0):
         space = self.space
         if space.is_none(w_destructor):
             if isinstance(self, W_CDataGCP):
                 self.detach_destructor()
-                return space.w_None
-            raise oefmt(space.w_TypeError,
-                        "Can remove destructor only on a object "
-                        "previously returned by ffi.gc()")
-        with self as ptr:
-            return W_CDataGCP(space, ptr, self.ctype, self, w_destructor)
+                w_res = space.w_None
+            else:
+                raise oefmt(space.w_TypeError,
+                            "Can remove destructor only on a object "
+                            "previously returned by ffi.gc()")
+        else:
+            with self as ptr:
+                w_res = W_CDataGCP(space, ptr, self.ctype, self, w_destructor)
+        if size != 0:
+            rgc.add_memory_pressure(size)
+        return w_res
 
     def unpack(self, length):
         from pypy.module._cffi_backend.ctypeptr import W_CTypePtrOrArray
diff --git a/pypy/module/_cffi_backend/ffi_obj.py b/pypy/module/_cffi_backend/ffi_obj.py
--- a/pypy/module/_cffi_backend/ffi_obj.py
+++ b/pypy/module/_cffi_backend/ffi_obj.py
@@ -351,14 +351,14 @@
         return handle.from_handle(self.space, w_arg)
 
 
-    @unwrap_spec(w_cdata=W_CData)
-    def descr_gc(self, w_cdata, w_destructor):
+    @unwrap_spec(w_cdata=W_CData, size=int)
+    def descr_gc(self, w_cdata, w_destructor, size=0):
         """\
 Return a new cdata object that points to the same data.
 Later, when this new cdata object is garbage-collected,
 'destructor(old_cdata_object)' will be called."""
         #
-        return w_cdata.with_gc(w_destructor)
+        return w_cdata.with_gc(w_destructor, size)
 
 
     @unwrap_spec(replace_with='text')
diff --git a/pypy/module/_cffi_backend/func.py b/pypy/module/_cffi_backend/func.py
--- a/pypy/module/_cffi_backend/func.py
+++ b/pypy/module/_cffi_backend/func.py
@@ -257,6 +257,6 @@
 
 # ____________________________________________________________
 
- at unwrap_spec(w_cdata=cdataobj.W_CData)
-def gcp(space, w_cdata, w_destructor):
-    return w_cdata.with_gc(w_destructor)
+ at unwrap_spec(w_cdata=cdataobj.W_CData, size=int)
+def gcp(space, w_cdata, w_destructor, size=0):
+    return w_cdata.with_gc(w_destructor, size)
diff --git a/pypy/module/_cffi_backend/test/test_ffi_obj.py b/pypy/module/_cffi_backend/test/test_ffi_obj.py
--- a/pypy/module/_cffi_backend/test/test_ffi_obj.py
+++ b/pypy/module/_cffi_backend/test/test_ffi_obj.py
@@ -377,7 +377,7 @@
         raises(TypeError, ffi.gc, p, None)
         seen = []
         q1 = ffi.gc(p, lambda p: seen.append(1))
-        q2 = ffi.gc(q1, lambda p: seen.append(2))
+        q2 = ffi.gc(q1, lambda p: seen.append(2), size=123)
         import gc; gc.collect()
         assert seen == []
         assert ffi.gc(q1, None) is None
diff --git a/pypy/module/_codecs/__init__.py b/pypy/module/_codecs/__init__.py
--- a/pypy/module/_codecs/__init__.py
+++ b/pypy/module/_codecs/__init__.py
@@ -1,5 +1,6 @@
 from pypy.interpreter.mixedmodule import MixedModule
 from rpython.rlib import runicode
+from rpython.rlib.objectmodel import not_rpython
 from pypy.module._codecs import interp_codecs
 
 class Module(MixedModule):
@@ -86,9 +87,8 @@
          'unicode_internal_encode'   :  'interp_codecs.unicode_internal_encode',
     }
 
+    @not_rpython
     def __init__(self, space, *args):
-        "NOT_RPYTHON"
-
         # mbcs codec is Windows specific, and based on rffi.
         if (hasattr(runicode, 'str_decode_mbcs')):
             self.interpleveldefs['mbcs_encode'] = 'interp_codecs.mbcs_encode'
diff --git a/pypy/module/_codecs/interp_codecs.py b/pypy/module/_codecs/interp_codecs.py
--- a/pypy/module/_codecs/interp_codecs.py
+++ b/pypy/module/_codecs/interp_codecs.py
@@ -1,5 +1,5 @@
 from rpython.rlib import jit
-from rpython.rlib.objectmodel import we_are_translated
+from rpython.rlib.objectmodel import we_are_translated, not_rpython
 from rpython.rlib.rstring import UnicodeBuilder
 from rpython.rlib.runicode import code_to_unichr, MAXUNICODE
 
@@ -268,8 +268,8 @@
         raise oefmt(space.w_TypeError,
                     "don't know how to handle %T in error callback", w_exc)
 
+ at not_rpython
 def register_builtin_error_handlers(space):
-    "NOT_RPYTHON"
     state = space.fromcache(CodecState)
     for error in ("strict", "ignore", "replace", "xmlcharrefreplace",
                   "backslashreplace"):
diff --git a/pypy/module/_vmprof/test/test__vmprof.py b/pypy/module/_vmprof/test/test__vmprof.py
--- a/pypy/module/_vmprof/test/test__vmprof.py
+++ b/pypy/module/_vmprof/test/test__vmprof.py
@@ -1,3 +1,4 @@
+import sys
 from rpython.tool.udir import udir
 from pypy.tool.pytest.objspace import gettestobjspace
 
@@ -7,6 +8,8 @@
     def setup_class(cls):
         cls.w_tmpfilename = cls.space.wrap(str(udir.join('test__vmprof.1')))
         cls.w_tmpfilename2 = cls.space.wrap(str(udir.join('test__vmprof.2')))
+        cls.w_plain = cls.space.wrap(not cls.runappdirect and
+            '__pypy__' not in sys.builtin_module_names)
 
     def test_import_vmprof(self):
         tmpfile = open(self.tmpfilename, 'wb')
@@ -117,6 +120,8 @@
         assert _vmprof.get_profile_path() is None
 
     def test_stop_sampling(self):
+        if not self.plain:
+            skip("unreliable test except on CPython without -A")
         import os
         import _vmprof
         tmpfile = open(self.tmpfilename, 'wb')
diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
--- a/pypy/module/array/interp_array.py
+++ b/pypy/module/array/interp_array.py
@@ -118,6 +118,29 @@
         return space.w_True
     return space.w_False
 
+index_count_jd = jit.JitDriver(
+    greens = ['count', 'arrclass', 'tp_item'],
+    reds = 'auto', name = 'array.index_or_count')
+
+def index_count_array(arr, w_val, count=False):
+    space = arr.space
+    tp_item = space.type(w_val)
+    arrclass = arr.__class__
+    cnt = 0
+    for i in range(arr.len):
+        index_count_jd.jit_merge_point(
+            tp_item=tp_item, count=count,
+            arrclass=arrclass)
+        w_item = arr.w_getitem(space, i)
+        if space.eq_w(w_item, w_val):
+            if count:
+                cnt += 1
+            else:
+                return i
+    if count:
+        return cnt
+    return -1
+
 UNICODE_ARRAY = lltype.Ptr(lltype.Array(lltype.UniChar,
                                         hints={'nolength': True}))
 
@@ -257,17 +280,12 @@
         """
         self.extend(w_x)
 
-    def descr_count(self, space, w_val):
+    def descr_count(self, space, w_x):
         """ count(x)
 
         Return number of occurrences of x in the array.
         """
-        cnt = 0
-        for i in range(self.len):
-            # XXX jitdriver
-            w_item = self.w_getitem(space, i)
-            if space.eq_w(w_item, w_val):
-                cnt += 1
+        cnt = index_count_array(self, w_x, count=True)
         return space.newint(cnt)
 
     def descr_index(self, space, w_x):
@@ -275,10 +293,9 @@
 
         Return index of first occurrence of x in the array.
         """
-        for i in range(self.len):
-            w_item = self.w_getitem(space, i)
-            if space.eq_w(w_item, w_x):
-                return space.newint(i)
+        res = index_count_array(self, w_x, count=False)
+        if res >= 0:
+            return space.newint(res)
         raise oefmt(space.w_ValueError, "array.index(x): x not in list")
 
     def descr_reverse(self, space):
@@ -752,7 +769,9 @@
 
 class TypeCode(object):
     def __init__(self, itemtype, unwrap, canoverflow=False, signed=False,
-                 method='__int__'):
+                 method='__int__', errorname=None):
+        if errorname is None:
+            errorname = unwrap[:-2]
         self.itemtype = itemtype
         self.bytes = rffi.sizeof(itemtype)
         self.arraytype = lltype.Array(itemtype, hints={'nolength': True})
@@ -762,6 +781,7 @@
         self.canoverflow = canoverflow
         self.w_class = None
         self.method = method
+        self.errorname = errorname
 
     def _freeze_(self):
         # hint for the annotator: track individual constant instances
@@ -785,8 +805,8 @@
     'i': TypeCode(rffi.INT,           'int_w', True, True),
     'I': _UINTTypeCode,
     'l': TypeCode(rffi.LONG,          'int_w', True, True),
-    'L': TypeCode(rffi.ULONG,         'bigint_w'),  # Overflow handled by
-                                                    # rbigint.touint() which
+    'L': TypeCode(rffi.ULONG,         'bigint_w',   # Overflow handled by
+                  errorname="integer"),             # rbigint.touint() which
                                                     # corresponds to the
                                                     # C-type unsigned long
     'f': TypeCode(lltype.SingleFloat, 'float_w', method='__float__'),
@@ -864,7 +884,7 @@
                         item = unwrap(space.call_method(w_item, mytype.method))
                     except OperationError:
                         raise oefmt(space.w_TypeError,
-                                    "array item must be " + mytype.unwrap[:-2])
+                                    "array item must be " + mytype.errorname)
                 else:
                     raise
             if mytype.unwrap == 'bigint_w':
diff --git a/pypy/module/array/test/test_array.py b/pypy/module/array/test/test_array.py
--- a/pypy/module/array/test/test_array.py
+++ b/pypy/module/array/test/test_array.py
@@ -162,6 +162,11 @@
             raises(OverflowError, a.append, -1)
             raises(OverflowError, a.append, 2 ** (8 * b))
 
+    def test_errormessage(self):
+        a = self.array("L", [1, 2, 3])
+        excinfo = raises(TypeError, "a[0] = 'abc'")
+        assert str(excinfo.value) == "array item must be integer"
+
     def test_fromstring(self):
         import sys
 
diff --git a/pypy/module/cppyy/test/test_cint.py b/pypy/module/cppyy/test/test_cint.py
deleted file mode 100644
--- a/pypy/module/cppyy/test/test_cint.py
+++ /dev/null
@@ -1,710 +0,0 @@
-import py, os, sys
-
-# These tests are for the CINT backend only (they exercise ROOT features
-# and classes that are not loaded/available with the Reflex backend). At
-# some point, these tests are likely covered by the CLang/LLVM backend.
-from pypy.module.cppyy import capi
-if capi.identify() != 'CINT':
-    py.test.skip("backend-specific: CINT-only tests")
-
-# load _cffi_backend early, or its global vars are counted as leaks in the
-# test (note that the module is not otherwise used in the test itself)
-from pypy.module._cffi_backend import newtype
-
-currpath = py.path.local(__file__).dirpath()
-iotypes_dct = str(currpath.join("iotypesDict.so"))
-
-def setup_module(mod):
-    if sys.platform == 'win32':
-        py.test.skip("win32 not supported so far")
-    err = os.system("cd '%s' && make CINT=t iotypesDict.so" % currpath)
-    if err:
-        raise OSError("'make' failed (see stderr)")
-
-class AppTestCINT:
-    spaceconfig = dict(usemodules=['cppyy', '_rawffi', 'itertools'])
-
-    def test01_globals(self):
-        """Test the availability of ROOT globals"""
-
-        import cppyy
-
-        assert cppyy.gbl.gROOT
-        assert cppyy.gbl.gApplication
-        assert cppyy.gbl.gSystem
-        assert cppyy.gbl.TInterpreter.Instance()           # compiled
-        assert cppyy.gbl.TInterpreter                      # interpreted
-        assert cppyy.gbl.TDirectory.CurrentDirectory()     # compiled
-        assert cppyy.gbl.TDirectory                        # interpreted
-
-    def test02_write_access_to_globals(self):
-        """Test overwritability of ROOT globals"""
-
-        import cppyy
-
-        oldval = cppyy.gbl.gDebug
-        assert oldval != 3
-
-        proxy = cppyy.gbl.__class__.__dict__['gDebug']
-        cppyy.gbl.gDebug = 3
-        assert proxy.__get__(proxy, None) == 3
-
-        # this is where this test differs from test03_write_access_to_globals
-        # in test_pythonify.py
-        cppyy.gbl.gROOT.ProcessLine('int gDebugCopy = gDebug;')
-        assert cppyy.gbl.gDebugCopy == 3
-
-        cppyy.gbl.gDebug = oldval
-
-    def test03_create_access_to_globals(self):
-        """Test creation and access of new ROOT globals"""
-
-        import cppyy
-
-        cppyy.gbl.gROOT.ProcessLine('double gMyOwnGlobal = 3.1415')
-        assert cppyy.gbl.gMyOwnGlobal == 3.1415
-
-        proxy = cppyy.gbl.__class__.__dict__['gMyOwnGlobal']
-        assert proxy.__get__(proxy, None) == 3.1415
-
-    def test04_auto_loading(self):
-        """Test auto-loading by retrieving a non-preloaded class"""
-
-        import cppyy
-
-        l = cppyy.gbl.TLorentzVector()
-        assert isinstance(l, cppyy.gbl.TLorentzVector)
-
-    def test05_macro_loading(self):
-        """Test accessibility to macro classes"""
-
-        import cppyy
-
-        loadres = cppyy.gbl.gROOT.LoadMacro('simple_class.C')
-        assert loadres == 0
-
-        base = cppyy.gbl.MySimpleBase
-        simple = cppyy.gbl.MySimpleDerived
-        simple_t = cppyy.gbl.MySimpleDerived_t
-
-        assert issubclass(simple, base)
-        assert simple is simple_t
-
-        c = simple()
-        assert isinstance(c, simple)
-        assert c.m_data == c.get_data()
-
-        c.set_data(13)
-        assert c.m_data == 13
-        assert c.get_data() == 13
-
-
-class AppTestCINTPYTHONIZATIONS:
-    spaceconfig = dict(usemodules=['cppyy', '_rawffi', 'itertools'])
-
-    def test01_strings(self):
-        """Test TString/TObjString compatibility"""
-
-        import cppyy
-
-        pyteststr = "aap noot mies"
-        def test_string(s1, s2):
-            assert len(s1) == len(s2)
-            assert s1 == s1
-            assert s1 == s2
-            assert s1 == str(s1)
-            assert s1 == pyteststr
-            assert s1 != "aap"
-            assert s1 != ""
-            assert s1 < "noot"
-            assert repr(s1) == repr(s2)
-
-        s1 = cppyy.gbl.TString(pyteststr)
-        test_string(s1, pyteststr)
-
-        s3 = cppyy.gbl.TObjString(pyteststr)
-        test_string(s3, pyteststr)
-
-    def test03_TVector(self):
-        """Test TVector2/3/T behavior"""
-
-        import cppyy, math
-
-        N = 51
-
-        # TVectorF is a typedef of floats
-        v = cppyy.gbl.TVectorF(N)
-        for i in range(N):
-            v[i] = i*i
-
-        assert len(v) == N
-        for j in v:
-            assert round(v[int(math.sqrt(j)+0.5)]-j, 5) == 0.
-
-    def test04_TStringTObjString(self):
-        """Test string/TString interchangebility"""
-
-        import cppyy
-
-        test = "aap noot mies"
-
-        s1 = cppyy.gbl.TString(test )
-        s2 = str(s1)
-
-        assert s1 == test
-        assert test == s2
-        assert s1 == s2
-
-        s3 = cppyy.gbl.TObjString(s2)
-        assert s3 == test
-        assert s2 == s3
-
-        # force use of: TNamed(const TString &name, const TString &title)
-        n = cppyy.gbl.TNamed(test, cppyy.gbl.TString("title"))
-        assert n.GetTitle() == "title"
-        assert n.GetName() == test
-
-
-class AppTestCINTTTREE:
-    spaceconfig = dict(usemodules=['cppyy', '_rawffi', 'itertools'])
-
-    def setup_class(cls):
-        cls.w_N = cls.space.newint(5)
-        cls.w_M = cls.space.newint(10)
-        cls.w_fname = cls.space.newtext("test.root")
-        cls.w_tname = cls.space.newtext("test")
-        cls.w_title = cls.space.newtext("test tree")
-        cls.w_iotypes = cls.space.appexec([], """():
-            import cppyy
-            return cppyy.load_reflection_info(%r)""" % (iotypes_dct,))
-
-    def test01_write_stdvector(self):
-        """Test writing of a single branched TTree with an std::vector<double>"""
-
-        from cppyy import gbl               # bootstraps, only needed for tests
-        from cppyy.gbl import TFile, TTree
-        from cppyy.gbl.std import vector
-
-        f = TFile(self.fname, "RECREATE")
-        mytree = TTree(self.tname, self.title)
-        mytree._python_owns = False
-
-        v = vector("double")()
-        raises(TypeError, TTree.Branch, None, "mydata", v.__class__.__name__, v)
-        raises(TypeError, TTree.Branch, v, "mydata", v.__class__.__name__, v)
-
-        mytree.Branch("mydata", v.__class__.__name__, v)
-
-        for i in range(self.N):
-            for j in range(self.M):
-                v.push_back(i*self.M+j)
-            mytree.Fill()
-            v.clear()
-        f.Write()
-        f.Close()
-
-    def test02_file_open(self):
-
-        from cppyy import gbl
-
-        f = gbl.TFile.Open(self.fname)
-        s = str(f)            # should not raise
-        r = repr(f)
-
-        f.Close()
-
-    def test03_read_stdvector(self):
-        """Test reading of a single branched TTree with an std::vector<double>"""
-
-        from cppyy import gbl
-        from cppyy.gbl import TFile
-
-        f = TFile(self.fname)
-        mytree = f.Get(self.tname)
-
-        i = 0
-        for event in mytree:
-            assert len(event.mydata) == self.M
-            for entry in event.mydata:
-                assert i == int(entry)
-                i += 1
-        assert i == self.N * self.M
-
-        f.Close()
-
-    def test04_write_some_data_object(self):
-        """Test writing of a complex data object"""
-
-        from cppyy import gbl
-        from cppyy.gbl import TFile, TTree, IO
-        from cppyy.gbl.IO import SomeDataObject
-
-        f = TFile(self.fname, "RECREATE")
-        mytree = TTree(self.tname, self.title)
-
-        d = SomeDataObject()
-        b = mytree.Branch("data", d)
-        mytree._python_owns = False
-        assert b
-
-        for i in range(self.N):
-            for j in range(self.M):
-                d.add_float(i*self.M+j)
-            d.add_tuple(d.get_floats())
-
-            mytree.Fill()
-
-        f.Write()
-        f.Close()
-
-    def test05_read_some_data_object(self):
-        """Test reading of a complex data object"""
-
-        from cppyy import gbl
-        from cppyy.gbl import TFile
-
-        f = TFile(self.fname)
-        mytree = f.Get(self.tname)
-
-        j = 1
-        for event in mytree:
-            i = 0
-            assert len(event.data.get_floats()) == j*self.M
-            for entry in event.data.get_floats():
-                assert i == int(entry)
-                i += 1
-
-            k = 1
-            assert len(event.data.get_tuples()) == j
-            for mytuple in event.data.get_tuples():
-                i = 0
-                assert len(mytuple) == k*self.M
-                for entry in mytuple:
-                    assert i == int(entry)
-                    i += 1
-                k += 1
-            j += 1
-        assert j-1 == self.N
-        #
-        f.Close()
-
-    def test06_branch_activation(self):
-        """Test of automatic branch activation"""
-
-        from cppyy import gbl
-        from cppyy.gbl import TFile, TTree
-        from cppyy.gbl.std import vector
-
-        L = 5
-
-        # writing
-        f = TFile(self.fname, "RECREATE")
-        mytree = TTree(self.tname, self.title)
-        mytree._python_owns = False
-
-        for i in range(L):
-            v = vector("double")()
-            mytree.Branch("mydata_%d"%i, v.__class__.__name__, v)
-            mytree.__dict__["v_%d"%i] = v
-
-        for i in range(self.N):
-            for k in range(L):
-                v = mytree.__dict__["v_%d"%k]
-                for j in range(self.M):
-                    mytree.__dict__["v_%d"%k].push_back(i*self.M+j*L+k)
-            mytree.Fill()
-            for k in range(L):
-                v = mytree.__dict__["v_%d"%k]
-                v.clear()
-        f.Write()
-        f.Close()
-
-        del mytree, f
-        import gc
-        gc.collect()
-
-        # reading
-        f = TFile(self.fname)
-        mytree = f.Get(self.tname)
-
-        # force (initial) disabling of all branches
-        mytree.SetBranchStatus("*",0);
-
-        i = 0
-        for event in mytree:
-            for k in range(L):
-                j = 0
-                data = getattr(mytree, "mydata_%d"%k)
-                assert len(data) == self.M
-                for entry in data:
-                    assert entry == i*self.M+j*L+k
-                    j += 1
-                assert j == self.M
-            i += 1
-        assert i == self.N
-
-        f.Close()
-
-    def test07_write_builtin(self):
-        """Test writing of builtins"""
-
-        from cppyy import gbl               # bootstraps, only needed for tests
-        from cppyy.gbl import TFile, TTree
-        from cppyy.gbl.std import vector
-
-        f = TFile(self.fname, "RECREATE")
-        mytree = TTree(self.tname, self.title)
-        mytree._python_owns = False
-
-        import array
-        mytree.ba = array.array('c', [chr(0)])
-        mytree.ia = array.array('i', [0])
-        mytree.da = array.array('d', [0.])
-
-        mytree.Branch("my_bool",   mytree.ba, "my_bool/O")
-        mytree.Branch("my_int",    mytree.ia, "my_int/I")
-        mytree.Branch("my_int2",   mytree.ia, "my_int2/I")
-        mytree.Branch("my_double", mytree.da, "my_double/D")
-
-        for i in range(self.N):
-            # make sure value is different from default (0)
-            mytree.ba[0] = i%2 and chr(0) or chr(1)
-            mytree.ia[0] = i+1
-            mytree.da[0] = (i+1)/2.
-            mytree.Fill()
-        f.Write()
-        f.Close()
-
-    def test08_read_builtin(self):
-        """Test reading of builtins"""
-
-        from cppyy import gbl
-        from cppyy.gbl import TFile
-
-        f = TFile(self.fname)
-        mytree = f.Get(self.tname)
-
-        raises(AttributeError, getattr, mytree, "does_not_exist")
-
-        i = 1
-        for event in mytree:
-            assert event.my_bool   == (i-1)%2 and 0 or 1
-            assert event.my_int    == i
-            assert event.my_double == i/2.
-            i += 1
-        assert (i-1) == self.N
-
-        f.Close()
-
-    def test09_user_read_builtin(self):
-        """Test user-directed reading of builtins"""
-
-        from cppyy import gbl
-        from cppyy.gbl import TFile
-
-        f = TFile(self.fname)
-        mytree = f.Get(self.tname)
-
-        # note, this is an old, annoted tree from test08
-        for i in range(3, mytree.GetEntriesFast()):
-            mytree.GetEntry(i)
-            assert mytree.my_int  == i+1
-            assert mytree.my_int2 == i+1
-
-        f.Close()
-
-class AppTestCINTREGRESSION:
-    spaceconfig = dict(usemodules=['cppyy', '_rawffi', 'itertools'])
-
-    # these are tests that at some point in the past resulted in failures on
-    # PyROOT; kept here to confirm no regression from PyROOT
-
-    def test01_regression(self):
-        """TPaveText::AddText() used to result in KeyError"""
-
-        # This is where the original problem was discovered, and the test is
-        # left in. However, the detailed underlying problem, as well as the
-        # solution to it, is tested in test_fragile.py
-
-        from cppyy import gbl
-        from cppyy.gbl import TPaveText
-
-        hello = TPaveText( .1, .8, .9, .97 )
-        hello.AddText( 'Hello, World!' )
-
-
-class AppTestCINTFUNCTION:
-    spaceconfig = dict(usemodules=['cppyy', '_rawffi', 'itertools'])
-    _pypytest_leaks = None   # TODO: figure out the false positives
-
-    # test the function callbacks; this does not work with Reflex, as it can
-    # not generate functions on the fly (it might with cffi?)
-
-    @py.test.mark.dont_track_allocations("TODO: understand; initialization left-over?")
-    def test01_global_function_callback(self):
-        """Test callback of a python global function"""
-
-        import cppyy, gc
-        TF1 = cppyy.gbl.TF1
-
-        def identity(x):
-            return x[0]
-
-        f = TF1("pyf1", identity, -1., 1., 0)
-
-        assert f.Eval(0.5)  == 0.5
-        assert f.Eval(-10.) == -10.
-        assert f.Eval(1.0)  == 1.0
-
-        # check proper propagation of default value
-        f = TF1("pyf1d", identity, -1., 1.)
-
-        assert f.Eval(0.5) == 0.5
-
-        del f      # force here, to prevent leak-check complaints
-        gc.collect()
-
-    def test02_callable_object_callback(self):
-        """Test callback of a python callable object"""
-
-        import cppyy, gc
-        TF1 = cppyy.gbl.TF1
-
-        class Linear:
-            def __call__(self, x, par):
-                return par[0] + x[0]*par[1]
-
-        f = TF1("pyf2", Linear(), -1., 1., 2)
-        f.SetParameters(5., 2.)
-
-        assert f.Eval(-0.1) == 4.8
-        assert f.Eval(1.3)  == 7.6
-
-        del f      # force here, to prevent leak-check complaints
-        gc.collect()
-
-    def test03_fit_with_python_gaussian(self):
-        """Test fitting with a python global function"""
-
-        # note: this function is dread-fully slow when running testing un-translated
-
-        import cppyy, gc, math
-        TF1, TH1F = cppyy.gbl.TF1, cppyy.gbl.TH1F
-
-        def pygaus(x, par):
-            arg1 = 0
-            scale1 = 0
-            ddx = 0.01
-
-            if (par[2] != 0.0):
-                arg1 = (x[0]-par[1])/par[2]
-                scale1 = (ddx*0.39894228)/par[2]
-                h1 = par[0]/(1+par[3])
-
-                gauss = h1*scale1*math.exp(-0.5*arg1*arg1)
-            else:
-                gauss = 0.
-            return gauss
-
-        f = TF1("pygaus", pygaus, -4, 4, 4)
-        f.SetParameters(600, 0.43, 0.35, 600)
-
-        h = TH1F("h", "test", 100, -4, 4)
-        h.FillRandom("gaus", 200000)
-        h.Fit(f, "0Q")
-
-        assert f.GetNDF() == 96
-        result = f.GetParameters()
-        assert round(result[1] - 0., 1) == 0  # mean
-        assert round(result[2] - 1., 1) == 0  # s.d.
-
-        del f      # force here, to prevent leak-check complaints
-        gc.collect()
-
-
-class AppTestSURPLUS:
-    spaceconfig = dict(usemodules=['cppyy', '_rawffi', 'itertools'])
-
-    # these are tests that were historically exercised on ROOT classes and
-    # have twins on custom classes; kept here just in case differences crop
-    # up between the ROOT classes and the custom ones
-
-    def test01_class_enum(self):
-        """Test class enum access and values"""
-
-        import cppyy
-        TObject = cppyy.gbl.TObject
-        gROOT = cppyy.gbl.gROOT
-
-        assert TObject.kBitMask    == gROOT.ProcessLine("return TObject::kBitMask;")
-        assert TObject.kIsOnHeap   == gROOT.ProcessLine("return TObject::kIsOnHeap;")
-        assert TObject.kNotDeleted == gROOT.ProcessLine("return TObject::kNotDeleted;")
-        assert TObject.kZombie     == gROOT.ProcessLine("return TObject::kZombie;")
-
-        t = TObject()
-
-        assert TObject.kBitMask    == t.kBitMask
-        assert TObject.kIsOnHeap   == t.kIsOnHeap
-        assert TObject.kNotDeleted == t.kNotDeleted
-        assert TObject.kZombie     == t.kZombie
-
-    def test02_global_enum(self):
-        """Test global enums access and values"""
-
-        import cppyy
-        from cppyy import gbl
-
-        assert gbl.kRed   == gbl.gROOT.ProcessLine("return kRed;")
-        assert gbl.kGreen == gbl.gROOT.ProcessLine("return kGreen;")
-        assert gbl.kBlue  == gbl.gROOT.ProcessLine("return kBlue;")
-
-    def test03_copy_contructor(self):
-        """Test copy constructor"""
-
-        import cppyy
-        TLorentzVector = cppyy.gbl.TLorentzVector
-
-        t1 = TLorentzVector(1., 2., 3., -4.)
-        t2 = TLorentzVector(0., 0., 0.,  0.)
-        t3 = TLorentzVector(t1)
-
-        assert t1 == t3
-        assert t1 != t2
-
-        for i in range(4):
-            assert t1[i] == t3[i]
-
-    def test04_object_validity(self):
-        """Test object validity checking"""
-
-        import cppyy
-
-        t1 = cppyy.gbl.TObject()
-
-        assert t1
-        assert not not t1
-
-        t2 = cppyy.gbl.gROOT.FindObject("Nah, I don't exist")
-
-        assert not t2
-
-    def test05_element_access(self):
-        """Test access to elements in matrix and array objects."""
-
-        from cppyy import gbl
-
-        N = 3
-        v = gbl.TVectorF(N)
-        m = gbl.TMatrixD(N, N)
-
-        for i in range(N):
-            assert v[i] == 0.0
-
-            for j in range(N):
-                assert m[i][j] == 0.0
-
-    def test06_static_function_call( self ):
-        """Test call to static function."""
-
-        import cppyy
-        TROOT, gROOT = cppyy.gbl.TROOT, cppyy.gbl.gROOT
-
-        c1 = TROOT.Class()
-        assert not not c1
-
-        c2 = gROOT.Class()
-
-        assert c1 == c2
-
-        old = gROOT.GetDirLevel()
-        TROOT.SetDirLevel(2)
-        assert 2 == gROOT.GetDirLevel()
-        gROOT.SetDirLevel(old)
-
-        old = TROOT.GetDirLevel()
-        gROOT.SetDirLevel(3)
-        assert 3 == TROOT.GetDirLevel()
-        TROOT.SetDirLevel(old)
-
-    def test07_macro(self):
-        """Test access to cpp macro's"""
-
-        from cppyy import gbl
-
-        assert gbl.NULL == 0
-
-        gbl.gROOT.ProcessLine('#define aap "aap"')
-        gbl.gROOT.ProcessLine('#define noot 1')
-        gbl.gROOT.ProcessLine('#define mies 2.0')
-
-        # TODO: macro's assumed to always be of long type ...
-        #assert gbl.aap  == "aap"
-        assert gbl.noot == 1
-        #assert gbl.mies == 2.0
-
-    def test08_opaque_pointer_passing(self):
-        """Test passing around of opaque pointers"""
-
-        import cppyy
-
-        # TODO: figure out CObject (see also test_advanced.py)
-
-        s = cppyy.gbl.TString("Hello World!")
-        #cobj = cppyy.as_cobject(s)
-        addr = cppyy.addressof(s)
-
-        #assert s == cppyy.bind_object(cobj, s.__class__)
-        #assert s == cppyy.bind_object(cobj, "TString")
-        assert s == cppyy.bind_object(addr, s.__class__)
-        assert s == cppyy.bind_object(addr, "TString")
-
-    def test09_object_and_pointer_comparisons(self):
-        """Verify object and pointer comparisons"""
-
-        import cppyy
-        gbl = cppyy.gbl
-
-        c1 = cppyy.bind_object(0, gbl.TCanvas)
-        assert c1 == None
-        assert None == c1
-
-        c2 = cppyy.bind_object(0, gbl.TCanvas)
-        assert c1 == c2
-        assert c2 == c1
-
-        # TLorentzVector overrides operator==
-        l1 = cppyy.bind_object(0, gbl.TLorentzVector)
-        assert l1 == None
-        assert None == l1
-
-        assert c1 != l1
-        assert l1 != c1
-
-        l2 = cppyy.bind_object(0, gbl.TLorentzVector)
-        assert l1 == l2
-        assert l2 == l1 
-
-        l3 = gbl.TLorentzVector(1, 2, 3, 4)
-        l4 = gbl.TLorentzVector(1, 2, 3, 4)
-        l5 = gbl.TLorentzVector(4, 3, 2, 1)
-        assert l3 == l4
-        assert l4 == l3
-
-        assert l3 != None                 # like this to ensure __ne__ is called
-        assert None != l3                 # id.
-        assert l3 != l5
-        assert l5 != l3
-
-    def test10_recursive_remove(self):
-        """Verify that objects are recursively removed when destroyed"""
-
-        import cppyy
-
-        c = cppyy.gbl.TClass.GetClass("TObject")
-


More information about the pypy-commit mailing list