[pypy-commit] pypy winapi: merge py3.5 into branch

mattip pypy.commits at gmail.com
Thu Feb 15 07:54:56 EST 2018


Author: Matti Picus <matti.picus at gmail.com>
Branch: winapi
Changeset: r93822:bc815f3e455d
Date: 2018-02-15 14:53 +0200
http://bitbucket.org/pypy/pypy/changeset/bc815f3e455d/

Log:	merge py3.5 into branch

diff too long, truncating to 2000 out of 5761 lines

diff --git a/get_externals.py b/get_externals.py
new file mode 100644
--- /dev/null
+++ b/get_externals.py
@@ -0,0 +1,69 @@
+'''Get external dependencies for building PyPy
+they will end up in the platform.host().basepath, something like repo-root/external
+'''
+
+from __future__ import print_function
+
+import argparse
+import os
+import zipfile
+from subprocess import Popen, PIPE
+from rpython.translator.platform import host
+
+def runcmd(cmd, verbose):
+    stdout = stderr = ''
+    report = False
+    try:
+        p = Popen(cmd, stdout=PIPE, stderr=PIPE)
+        stdout, stderr = p.communicate()
+        if p.wait() != 0 or verbose:
+            report = True
+    except Exception as e:
+        stderr = str(e) + '\n' + stderr
+        report = True
+    if report:
+        print('running "%s" returned\n%s\n%s' % (' '.join(cmd), stdout, stderr))
+    if stderr:
+        raise RuntimeError(stderr)
+
+def checkout_repo(dest='externals', org='pypy', branch='default', verbose=False):
+    url = 'https://bitbucket.org/{}/externals'.format(org)
+    if not os.path.exists(dest):
+        cmd = ['hg','clone',url,dest]
+        runcmd(cmd, verbose)
+    cmd = ['hg','-R', dest, 'update',branch]
+    runcmd(cmd, verbose)
+
+def extract_zip(externals_dir, zip_path):
+    with zipfile.ZipFile(os.fspath(zip_path)) as zf:
+        zf.extractall(os.fspath(externals_dir))
+        return externals_dir / zf.namelist()[0].split('/')[0]
+
+def parse_args():
+    p = argparse.ArgumentParser()
+    p.add_argument('-v', '--verbose', action='store_true')
+    p.add_argument('-O', '--organization',
+                   help='Organization owning the deps repos', default='pypy')
+    p.add_argument('-e', '--externals', default=host.externals,
+                   help='directory in which to store dependencies',
+                   )
+    p.add_argument('-b', '--branch', default=host.externals_branch,
+                   help='branch to check out',
+                   )
+    p.add_argument('-p', '--platform', default=None,
+                   help='someday support cross-compilation, ignore for now',
+                   )
+    return p.parse_args()
+
+
+def main():
+    args = parse_args()
+    checkout_repo(
+        dest=args.externals,
+        org=args.organization,
+        branch=args.branch,
+        verbose=args.verbose,
+    )
+
+if __name__ == '__main__':
+    main()
diff --git a/lib-python/3/distutils/unixccompiler.py b/lib-python/3/distutils/unixccompiler.py
--- a/lib-python/3/distutils/unixccompiler.py
+++ b/lib-python/3/distutils/unixccompiler.py
@@ -222,6 +222,10 @@
         return "-L" + dir
 
     def _is_gcc(self, compiler_name):
+        if "__pypy__" in sys.builtin_module_names:   # issue #2747
+            if (compiler_name.startswith('cc') or
+                compiler_name.startswith('c++')):
+                return True
         return "gcc" in compiler_name or "g++" in compiler_name
 
     def runtime_library_dir_option(self, dir):
diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py
--- a/lib_pypy/_sqlite3.py
+++ b/lib_pypy/_sqlite3.py
@@ -155,9 +155,10 @@
     factory = Connection if not factory else factory
     # an sqlite3 db seems to be around 100 KiB at least (doesn't matter if
     # backed by :memory: or a file)
+    res = factory(database, timeout, detect_types, isolation_level,
+                    check_same_thread, factory, cached_statements, uri)
     add_memory_pressure(100 * 1024)
-    return factory(database, timeout, detect_types, isolation_level,
-                    check_same_thread, factory, cached_statements, uri)
+    return res
 
 
 def _unicode_text_factory(x):
diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO
--- a/lib_pypy/cffi.egg-info/PKG-INFO
+++ b/lib_pypy/cffi.egg-info/PKG-INFO
@@ -1,11 +1,12 @@
 Metadata-Version: 1.1
 Name: cffi
-Version: 1.11.3
+Version: 1.11.4
 Summary: Foreign Function Interface for Python calling C code.
 Home-page: http://cffi.readthedocs.org
 Author: Armin Rigo, Maciej Fijalkowski
 Author-email: python-cffi at googlegroups.com
 License: MIT
+Description-Content-Type: UNKNOWN
 Description: 
         CFFI
         ====
@@ -27,5 +28,7 @@
 Classifier: Programming Language :: Python :: 3.2
 Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py
--- a/lib_pypy/cffi/__init__.py
+++ b/lib_pypy/cffi/__init__.py
@@ -4,8 +4,8 @@
 from .api import FFI
 from .error import CDefError, FFIError, VerificationError, VerificationMissing
 
-__version__ = "1.11.3"
-__version_info__ = (1, 11, 3)
+__version__ = "1.11.4"
+__version_info__ = (1, 11, 4)
 
 # The verifier module file names are based on the CRC32 of a string that
 # contains the following version number.  It may be older than __version__
diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h
--- a/lib_pypy/cffi/_cffi_include.h
+++ b/lib_pypy/cffi/_cffi_include.h
@@ -8,37 +8,20 @@
    the same works for the other two macros.  Py_DEBUG implies them,
    but not the other way around.
 
-   Issue #350: more mess: on Windows, with _MSC_VER, we have to define
-   Py_LIMITED_API even before including pyconfig.h.  In that case, we
-   guess what pyconfig.h will do to the macros above, and check our
-   guess after the #include.
+   Issue #350 is still open: on Windows, the code here causes it to link
+   with PYTHON36.DLL (for example) instead of PYTHON3.DLL.  A fix was
+   attempted in 164e526a5515 and 14ce6985e1c3, but reverted: virtualenv
+   does not make PYTHON3.DLL available, and so the "correctly" compiled
+   version would not run inside a virtualenv.  We will re-apply the fix
+   after virtualenv has been fixed for some time.  For explanation, see
+   issue #355.  For a workaround if you want PYTHON3.DLL and don't worry
+   about virtualenv, see issue #350.  See also 'py_limited_api' in
+   setuptools_ext.py.
 */
 #if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
-#  ifdef _MSC_VER
-#    if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
-#      define Py_LIMITED_API
-#    endif
-#    include <pyconfig.h>
-     /* sanity-check: Py_LIMITED_API will cause crashes if any of these
-        are also defined.  Normally, the Python file PC/pyconfig.h does not
-        cause any of these to be defined, with the exception that _DEBUG
-        causes Py_DEBUG.  Double-check that. */
-#    ifdef Py_LIMITED_API
-#      if defined(Py_DEBUG)
-#        error "pyconfig.h unexpectedly defines Py_DEBUG but _DEBUG is not set"
-#      endif
-#      if defined(Py_TRACE_REFS)
-#        error "pyconfig.h unexpectedly defines Py_TRACE_REFS"
-#      endif
-#      if defined(Py_REF_DEBUG)
-#        error "pyconfig.h unexpectedly defines Py_REF_DEBUG"
-#      endif
-#    endif
-#  else
-#    include <pyconfig.h>
-#    if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
-#      define Py_LIMITED_API
-#    endif
+#  include <pyconfig.h>
+#  if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
+#    define Py_LIMITED_API
 #  endif
 #endif
 
diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h
--- a/lib_pypy/cffi/_embedding.h
+++ b/lib_pypy/cffi/_embedding.h
@@ -247,7 +247,7 @@
 
         if (f != NULL && f != Py_None) {
             PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
-                               "\ncompiled with cffi version: 1.11.3"
+                               "\ncompiled with cffi version: 1.11.4"
                                "\n_cffi_backend module: ", f);
             modules = PyImport_GetModuleDict();
             mod = PyDict_GetItemString(modules, "_cffi_backend");
diff --git a/lib_pypy/greenlet.egg-info b/lib_pypy/greenlet.egg-info
--- a/lib_pypy/greenlet.egg-info
+++ b/lib_pypy/greenlet.egg-info
@@ -1,6 +1,6 @@
 Metadata-Version: 1.0
 Name: greenlet
-Version: 0.4.12
+Version: 0.4.13
 Summary: Lightweight in-process concurrent programming
 Home-page: https://github.com/python-greenlet/greenlet
 Author: Ralf Schmitt (for CPython), PyPy team
diff --git a/lib_pypy/greenlet.py b/lib_pypy/greenlet.py
--- a/lib_pypy/greenlet.py
+++ b/lib_pypy/greenlet.py
@@ -2,7 +2,7 @@
 import __pypy__
 import _continuation
 
-__version__ = "0.4.12"
+__version__ = "0.4.13"
 
 # ____________________________________________________________
 # Exceptions
diff --git a/pypy/doc/project-ideas.rst b/pypy/doc/project-ideas.rst
--- a/pypy/doc/project-ideas.rst
+++ b/pypy/doc/project-ideas.rst
@@ -1,26 +1,41 @@
 Potential Project List
 ======================
 
-Google Summer of Code 2017
---------------------------
+Getting involved
+----------------
 
-PyPy is generally open to new ideas for Google Summer of Code. We are happy to accept good ideas around the PyPy ecosystem. If you need more information about the ideas we propose for this year please join us on irc, channel #pypy (freenode). If you are unsure, but still think that you can make a valuable contribution to PyPy, dont hesitate to contact us on #pypy or on our mailing list.
-
+We are happy to discuss ideas around the PyPy ecosystem.
+If you are interested in palying with RPython or PyPy, or have a new idea not
+mentioned here please join us on irc, channel #pypy (freenode). If you are unsure,
+but still think that you can make a valuable contribution to PyPy, dont
+hesitate to contact us on #pypy or on our mailing list. Here are some ideas
+to get you thinking:
 
 * **Optimize PyPy Memory Usage**:  Sometimes PyPy consumes more memory than CPython.
-  Two examples: 1) PyPy seems to allocate and keep alive more strings when importing a big Python modules.
-  2) The base interpreter size (cold VM started from a console) of PyPy is bigger than the one of CPython.
-  The general procedure of this project is: Run both CPython and PyPy of the same Python version and
-  compare the memory usage (using Massif or other tools).
+  Two examples: 1) PyPy seems to allocate and keep alive more strings when
+  importing a big Python modules.  2) The base interpreter size (cold VM started
+  from a console) of PyPy is bigger than the one of CPython. The general
+  procedure of this project is: Run both CPython and PyPy of the same Python
+  version and compare the memory usage (using Massif or other tools).
   If PyPy consumes a lot more memory then find and resolve the issue.
 
-* **VMProf + memory profiler**: vmprof by now has a memory profiler that can be used already. We want extend it with more features and resolve some current limitations.
+* **VMProf + memory profiler**: vmprof is a statistical memory profiler. We
+  want extend it with new features and resolve some current limitations.
 
-* **VMProf visualisations**: vmprof just shows a flame graph of the statistical profile and some more information about specific call sites. It would be very interesting to experiment with different information (such as memory, or even information generated by our jit compiler).
+* **VMProf visualisations**: vmprof shows a flame graph of the statistical
+  profile and some more information about specific call sites. It would be
+  very interesting to experiment with different information (such as memory,
+  or even information generated by our jit compiler).
 
-* **Explicit typing in RPython**: PyPy wants to have better ways to specify the signature and class attribute types in RPython. See more information about this topic below on this page.
+* **Explicit typing in RPython**: PyPy wants to have better ways to specify
+  the signature and class attribute types in RPython. See more information
+  about this topic below on this page.
 
-* **Virtual Reality (VR) visualisations for vmprof**: This is a very open topic with lots of freedom to explore data visualisation for profiles. No VR hardware would be needed for this project. Either universities provide such hardware or in any other case we potentially can lend the VR hardware setup.
+* **Virtual Reality (VR) visualisations for vmprof**: This is a very open
+  topic with lots of freedom to explore data visualisation for profiles. No
+  VR hardware would be needed for this project. Either universities provide
+  such hardware or in any other case we potentially can lend the VR hardware
+  setup.
 
 Simple tasks for newcomers
 --------------------------
@@ -34,6 +49,11 @@
 * Implement AF_XXX packet types of sockets:
   https://bitbucket.org/pypy/pypy/issue/1942/support-for-af_xxx-sockets
 
+* Help with documentation. One task would be to document rpython configuration
+  options currently listed only on :doc:`this site <configuration>` also on the
+  RPython_ documentation site.
+
+.. _RPython: http://rpython.readthedocs.io
 
 Mid-to-large tasks
 ------------------
@@ -201,7 +221,9 @@
 Introduce new benchmarks
 ------------------------
 
-We're usually happy to introduce new benchmarks. Please consult us
+Our benchmark runner_ is showing its age. We should merge with the `CPython site`_
+
+Additionally, we're usually happy to introduce new benchmarks. Please consult us
 before, but in general something that's real-world python code
 and is not already represented is welcome. We need at least a standalone
 script that can run without parameters. Example ideas (benchmarks need
@@ -209,6 +231,8 @@
 
 * `hg`
 
+.. _runner: http://speed.pypy.org
+.. _`CPython site`: https://speed.python.org/
 
 ======================================
 Make more python modules pypy-friendly
@@ -238,15 +262,6 @@
 using more pypy-friendly technologies, e.g. cffi. Here is a partial list of
 good work that needs to be finished:
 
-**matplotlib** https://github.com/matplotlib/matplotlib
-
-    Status: using the matplotlib branch of PyPy and the tkagg-cffi branch of
-    matplotlib from https://github.com/mattip/matplotlib/tree/tkagg-cffi, the
-    tkagg backend can function.
-
-    TODO: the matplotlib branch passes numpy arrays by value (copying all the
-    data), this proof-of-concept needs help to become completely compliant
-
 **wxPython** https://bitbucket.org/amauryfa/wxpython-cffi
 
     Status: A project by a PyPy developer to adapt the Phoenix sip build system to cffi
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -14,3 +14,33 @@
 .. branch: cpyext-datetime2
 
 Support ``tzinfo`` field on C-API datetime objects, fixes latest pandas HEAD
+
+
+.. branch: mapdict-size-limit
+
+Fix a corner case of mapdict: When an instance is used like a dict (using
+``setattr`` and ``getattr``, or ``.__dict__``) and a lot of attributes are
+added, then the performance using mapdict is linear in the number of
+attributes. This is now fixed (by switching to a regular dict after 80
+attributes).
+
+
+.. branch: cpyext-faster-arg-passing
+
+When using cpyext, improve the speed of passing certain objects from PyPy to C
+code, most notably None, True, False, types, all instances of C-defined types.
+Before, a dict lookup was needed every time such an object crossed over, now it
+is just a field read.
+
+
+.. branch: 2634_datetime_timedelta_performance
+
+Improve datetime + timedelta performance.
+
+.. branch: memory-accounting
+
+Improve way to describe memory
+
+.. branch: msvc14
+
+Allow compilaiton with Visual Studio 2017 compiler suite on windows
diff --git a/pypy/doc/windows.rst b/pypy/doc/windows.rst
--- a/pypy/doc/windows.rst
+++ b/pypy/doc/windows.rst
@@ -39,10 +39,24 @@
 
 .. _Microsoft Visual C++ Compiler for Python 2.7: https://www.microsoft.com/en-us/download/details.aspx?id=44266
 
+Installing "Build Tools for Visual Studio 2017" (for Python 3)
+--------------------------------------------------------------
+
+As documented in the CPython Wiki_, CPython now recommends Visual C++ version
+14.0. A compact version of the compiler suite can be obtained from Microsoft_
+downloads, search the page for "Build Tools for Visual Studio 2017".
+
+You will also need to install the the `Windows SDK`_ in order to use the 
+`mt.exe` mainfest compiler.
+
+.. _Wiki: https://wiki.python.org/moin/WindowsCompilers
+.. _Microsoft: https://www.visualstudio.com/downloads
+.. _`Windows SDK`: https://developer.microsoft.com/en-us/windows/downloads/windows-10-sdk
+
 Translating PyPy with Visual Studio
 -----------------------------------
 
-We routinely test translation using v9, also known as Visual Studio 2008.
+We routinely test translation of PyPy 2.7 using v9 and PyPy 3 with vc14.
 Other configurations may work as well.
 
 The translation scripts will set up the appropriate environment variables
@@ -82,8 +96,8 @@
 
 .. _build instructions: http://pypy.org/download.html#building-from-source
 
-Setting Up Visual Studio for building SSL in Python3
-----------------------------------------------------
+Setting Up Visual Studio 9.0 for building SSL in Python3
+--------------------------------------------------------
 
 On Python3, the ``ssl`` module is based on ``cffi``, and requires a build step after
 translation. However ``distutils`` does not support the Micorosft-provided Visual C
@@ -132,243 +146,14 @@
 Installing external packages
 ----------------------------
 
-On Windows, there is no standard place where to download, build and
-install third-party libraries.  We recommend installing them in the parent
-directory of the pypy checkout.  For example, if you installed pypy in
-``d:\pypy\trunk\`` (This directory contains a README file), the base
-directory is ``d:\pypy``. You must then set the
-INCLUDE, LIB and PATH (for DLLs) environment variables appropriately.
+We uses a `repository` parallel to pypy to hold binary compiled versions of the
+build dependencies for windows. As part of the `rpython` setup stage, environment
+variables will be set to use these dependencies. The repository has a README
+file on how to replicate, and a branch for each supported platform. You may run
+ the `get_externals.py` utility to checkout the proper branch for your platform
+and PyPy version.
 
-
-Abridged method (using Visual Studio 2008)
-------------------------------------------
-
-Download the versions of all the external packages from
-https://bitbucket.org/pypy/pypy/downloads/local_59.zip
-(for post-5.8 builds) with sha256 checksum
-``6344230e90ab7a9cb84efbae1ba22051cdeeb40a31823e0808545b705aba8911``
-https://bitbucket.org/pypy/pypy/downloads/local_5.8.zip
-(to reproduce 5.8 builds) with sha256 checksum 
-``fbe769bf3a4ab6f5a8b0a05b61930fc7f37da2a9a85a8f609cf5a9bad06e2554`` or
-https://bitbucket.org/pypy/pypy/downloads/local_2.4.zip
-(for 2.4 release and later) or
-https://bitbucket.org/pypy/pypy/downloads/local.zip
-(for pre-2.4 versions)
-Then expand it into the base directory (base_dir) and modify your environment
-to reflect this::
-
-    set PATH=<base_dir>\bin;%PATH%
-    set INCLUDE=<base_dir>\include;%INCLUDE%
-    set LIB=<base_dir>\lib;%LIB%
-
-Now you should be good to go. If you choose this method, you do not need
-to download/build anything else. 
-
-Nonabridged method (building from scratch)
-------------------------------------------
-
-If you want to, you can rebuild everything from scratch by continuing.
-
-
-The Boehm garbage collector
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-This library is needed if you plan to use the ``--gc=boehm`` translation
-option (this is the default at some optimization levels like ``-O1``,
-but unneeded for high-performance translations like ``-O2``).
-You may get it at
-http://hboehm.info/gc/gc_source/gc-7.1.tar.gz
-
-Versions 7.0 and 7.1 are known to work; the 6.x series won't work with
-RPython. Unpack this folder in the base directory.
-The default GC_abort(...) function in misc.c will try to open a MessageBox.
-You may want to disable this with the following patch::
-
-    --- a/misc.c    Sun Apr 20 14:08:27 2014 +0300
-    +++ b/misc.c    Sun Apr 20 14:08:37 2014 +0300
-    @@ -1058,7 +1058,7 @@
-     #ifndef PCR
-      void GC_abort(const char *msg)
-       {
-       -#   if defined(MSWIN32)
-       +#   if 0 && defined(MSWIN32)
-              (void) MessageBoxA(NULL, msg, "Fatal error in gc", MB_ICONERROR|MB_OK);
-               #   else
-                      GC_err_printf("%s\n", msg);
-
-Then open a command prompt::
-
-    cd gc-7.1
-    nmake -f NT_THREADS_MAKEFILE
-    copy Release\gc.dll <somewhere in the PATH>
-
-
-The zlib compression library
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Download http://www.gzip.org/zlib/zlib-1.2.11.tar.gz and extract it in
-the base directory.  Then compile::
-
-    cd zlib-1.2.11
-    nmake -f win32\Makefile.msc
-    copy zlib.lib <somewhere in LIB>
-    copy zlib.h zconf.h <somewhere in INCLUDE>
-    copy zlib1.dll <in PATH> # (needed for tests via ll2ctypes)
-
-
-The bz2 compression library
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Get the same version of bz2 used by python and compile as a static library::
-
-    svn export http://svn.python.org/projects/external/bzip2-1.0.6
-    cd bzip2-1.0.6
-    nmake -f makefile.msc
-    copy libbz2.lib <somewhere in LIB>
-    copy bzlib.h <somewhere in INCLUDE>
-
-
-The sqlite3 database library
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-PyPy uses cffi to interact with sqlite3.dll. Only the dll is needed, the cffi
-wrapper is compiled when the module is imported for the first time.
-The sqlite3.dll should be version 3.8.11 for CPython2.7 compatablility.
-
-
-The expat XML parser
-~~~~~~~~~~~~~~~~~~~~
-
-CPython compiles expat from source as part of the build. PyPy uses the same
-code base, but expects to link to a static lib of expat. Here are instructions
-to reproduce the static lib in version 2.2.4.
-
-Download the source code of expat: https://github.com/libexpat/libexpat. 
-``git checkout`` the proper tag, in this case ``R_2_2_4``. Run
-``vcvars.bat`` to set up the visual compiler tools, and CD into the source
-directory. Create a file ``stdbool.h`` with the content
-
-.. code-block:: c
-
-    #pragma once
-
-    #define false   0
-    #define true    1
-
-    #define bool int
-
-and put it in a place on the ``INCLUDE`` path, or create it in the local
-directory and add ``.`` to the ``INCLUDE`` path::
-
-    SET INCLUDE=%INCLUDE%;.
-
-Then compile all the ``*.c`` file into ``*.obj``::
-
-    cl.exe /nologo /MD  /O2 *c /c
-    rem for debug
-    cl.exe /nologo /MD  /O0 /Ob0 /Zi *c /c
-
-You may need to move some variable declarations to the beginning of the
-function, to be compliant with C89 standard. Here is the diff for version 2.2.4
-
-.. code-block:: diff
-
-    diff --git a/expat/lib/xmltok.c b/expat/lib/xmltok.c
-    index 007aed0..a2dcaad 100644
-    --- a/expat/lib/xmltok.c
-    +++ b/expat/lib/xmltok.c
-    @@ -399,19 +399,21 @@ utf8_toUtf8(const ENCODING *UNUSED_P(enc),
-       /* Avoid copying partial characters (due to limited space). */
-       const ptrdiff_t bytesAvailable = fromLim - *fromP;
-       const ptrdiff_t bytesStorable = toLim - *toP;
-    +  const char * fromLimBefore;
-    +  ptrdiff_t bytesToCopy;
-       if (bytesAvailable > bytesStorable) {
-         fromLim = *fromP + bytesStorable;
-         output_exhausted = true;
-       }
-
-       /* Avoid copying partial characters (from incomplete input). */
-    -  const char * const fromLimBefore = fromLim;
-    +  fromLimBefore = fromLim;
-       align_limit_to_full_utf8_characters(*fromP, &fromLim);
-       if (fromLim < fromLimBefore) {
-         input_incomplete = true;
-       }
-
-    -  const ptrdiff_t bytesToCopy = fromLim - *fromP;
-    +  bytesToCopy = fromLim - *fromP;
-       memcpy((void *)*toP, (const void *)*fromP, (size_t)bytesToCopy);
-       *fromP += bytesToCopy;
-       *toP += bytesToCopy;
-
-
-Create ``libexpat.lib`` (for translation) and ``libexpat.dll`` (for tests)::
-
-    cl /LD *.obj libexpat.def /Felibexpat.dll 
-    rem for debug
-    rem cl /LDd /Zi *.obj libexpat.def /Felibexpat.dll
-
-    rem this will override the export library created in the step above
-    rem but tests do not need the export library, they load the dll dynamically
-    lib *.obj /out:libexpat.lib
-
-Then, copy 
-
-- ``libexpat.lib`` into LIB
-- both ``lib\expat.h`` and ``lib\expat_external.h`` in INCLUDE
-- ``libexpat.dll`` into PATH
-
-
-The OpenSSL library
-~~~~~~~~~~~~~~~~~~~
-
-OpenSSL needs a Perl interpreter to configure its makefile.  You may
-use the one distributed by ActiveState, or the one from cygwin.::
-
-    svn export http://svn.python.org/projects/external/openssl-1.0.2k
-    cd openssl-1.0.2k
-    perl Configure VC-WIN32 no-idea no-mdc2
-    ms\do_ms.bat
-    nmake -f ms\nt.mak install
-    copy out32\*.lib <somewhere in LIB>
-    xcopy /S include\openssl <somewhere in INCLUDE>
-
-For tests you will also need the dlls::
-    nmake -f ms\ntdll.mak install
-    copy out32dll\*.dll <somewhere in PATH>
-
-TkInter module support
-~~~~~~~~~~~~~~~~~~~~~~
-
-Note that much of this is taken from the cpython build process.
-Tkinter is imported via cffi, so the module is optional. To recreate the tcltk
-directory found for the release script, create the dlls, libs, headers and
-runtime by running::
-
-    svn export http://svn.python.org/projects/external/tcl-8.5.2.1 tcl85
-    svn export http://svn.python.org/projects/external/tk-8.5.2.0 tk85
-    cd tcl85\win
-    nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 DEBUG=0 INSTALLDIR=..\..\tcltk clean all
-    nmake -f makefile.vc DEBUG=0 INSTALLDIR=..\..\tcltk install
-    cd ..\..\tk85\win
-    nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 OPTS=noxp DEBUG=1 INSTALLDIR=..\..\tcltk TCLDIR=..\..\tcl85 clean all
-    nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 OPTS=noxp DEBUG=1 INSTALLDIR=..\..\tcltk TCLDIR=..\..\tcl85 install
-    copy ..\..\tcltk\bin\* <somewhere in PATH>
-    copy ..\..\tcltk\lib\*.lib <somewhere in LIB>
-    xcopy /S ..\..\tcltk\include <somewhere in INCLUDE>
-
-The lzma compression library
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Python 3.3 ship with CFFI wrappers for the lzma library, which can be
-downloaded from this site http://tukaani.org/xz. Python 3.3-3.5 use version
-5.0.5, a prebuilt version can be downloaded from
-http://tukaani.org/xz/xz-5.0.5-windows.zip, check the signature
-http://tukaani.org/xz/xz-5.0.5-windows.zip.sig
-
-Then copy the headers to the include directory, rename ``liblzma.a`` to 
-``lzma.lib`` and copy it to the lib directory
-
+.. _repository:  https://bitbucket.org/pypy/external
 
 Using the mingw compiler
 ------------------------
diff --git a/pypy/interpreter/astcompiler/test/test_astbuilder.py b/pypy/interpreter/astcompiler/test/test_astbuilder.py
--- a/pypy/interpreter/astcompiler/test/test_astbuilder.py
+++ b/pypy/interpreter/astcompiler/test/test_astbuilder.py
@@ -1408,3 +1408,11 @@
         exc = py.test.raises(SyntaxError, self.get_ast, input).value
         assert exc.msg == ("(unicode error) 'unicodeescape' codec can't decode"
                            " bytes in position 0-2: truncated \\xXX escape")
+
+    def test_decode_error_in_string_literal_correct_line(self):
+        input = "u'a' u'b'\\\n u'c' u'\\x'"
+        exc = py.test.raises(SyntaxError, self.get_ast, input).value
+        assert exc.msg == ("(unicode error) 'unicodeescape' codec can't decode"
+                           " bytes in position 0-1: truncated \\xXX escape")
+        assert exc.lineno == 2
+        assert exc.offset == 6
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -207,6 +207,21 @@
     def _set_mapdict_storage_and_map(self, storage, map):
         raise NotImplementedError
 
+
+    # -------------------------------------------------------------------
+    # cpyext support
+    # these functions will only be seen by the annotator if we translate
+    # with the cpyext module
+
+    def _cpyext_as_pyobj(self, space):
+        from pypy.module.cpyext.pyobject import w_root_as_pyobj
+        return w_root_as_pyobj(self, space)
+
+    def _cpyext_attach_pyobj(self, space, py_obj):
+        from pypy.module.cpyext.pyobject import w_root_attach_pyobj
+        return w_root_attach_pyobj(self, space, py_obj)
+
+
     # -------------------------------------------------------------------
 
     def is_w(self, space, w_other):
diff --git a/pypy/interpreter/test/test_unicodehelper.py b/pypy/interpreter/test/test_unicodehelper.py
--- a/pypy/interpreter/test/test_unicodehelper.py
+++ b/pypy/interpreter/test/test_unicodehelper.py
@@ -1,6 +1,7 @@
 import py
 import pytest
 import struct
+import sys
 from pypy.interpreter.unicodehelper import (
     encode_utf8, decode_utf8, unicode_encode_utf_32_be, str_decode_utf_32_be)
 from pypy.interpreter.unicodehelper import encode_utf8sp, decode_utf8sp
@@ -51,7 +52,10 @@
     py.test.raises(Hit, decode_utf8, space, "\xed\xb0\x80")
     py.test.raises(Hit, decode_utf8, space, "\xed\xa0\x80\xed\xb0\x80")
     got = decode_utf8(space, "\xf0\x90\x80\x80")
-    assert map(ord, got) == [0x10000]
+    if sys.maxunicode > 65535:
+        assert map(ord, got) == [0x10000]
+    else:
+        assert map(ord, got) == [55296, 56320]
 
 def test_decode_utf8_allow_surrogates():
     sp = FakeSpace()
diff --git a/pypy/module/__pypy__/__init__.py b/pypy/module/__pypy__/__init__.py
--- a/pypy/module/__pypy__/__init__.py
+++ b/pypy/module/__pypy__/__init__.py
@@ -87,7 +87,6 @@
         'hidden_applevel'           : 'interp_magic.hidden_applevel',
         'lookup_special'            : 'interp_magic.lookup_special',
         'do_what_I_mean'            : 'interp_magic.do_what_I_mean',
-        'validate_fd'               : 'interp_magic.validate_fd',
         'resizelist_hint'           : 'interp_magic.resizelist_hint',
         'newlist_hint'              : 'interp_magic.newlist_hint',
         'add_memory_pressure'       : 'interp_magic.add_memory_pressure',
diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py
--- a/pypy/module/__pypy__/interp_magic.py
+++ b/pypy/module/__pypy__/interp_magic.py
@@ -105,14 +105,6 @@
         raise oefmt(space.w_TypeError, "expecting dict or list or set object")
     return space.newtext(name)
 
-
- at unwrap_spec(fd='c_int')
-def validate_fd(space, fd):
-    try:
-        rposix.validate_fd(fd)
-    except OSError as e:
-        raise wrap_oserror(space, e)
-
 @unwrap_spec(sizehint=int)
 def resizelist_hint(space, w_list, sizehint):
     """ Reallocate the underlying storage of the argument list to sizehint """
@@ -134,7 +126,7 @@
                   space.newbool(debug))
 
 @unwrap_spec(estimate=int)
-def add_memory_pressure(estimate):
+def add_memory_pressure(space, estimate):
     """ Add memory pressure of estimate bytes. Useful when calling a C function
     that internally allocates a big chunk of memory. This instructs the GC to
     garbage collect sooner than it would otherwise."""
diff --git a/pypy/module/_cffi_backend/__init__.py b/pypy/module/_cffi_backend/__init__.py
--- a/pypy/module/_cffi_backend/__init__.py
+++ b/pypy/module/_cffi_backend/__init__.py
@@ -3,7 +3,7 @@
 from rpython.rlib import rdynload, clibffi
 from rpython.rtyper.lltypesystem import rffi
 
-VERSION = "1.11.3"
+VERSION = "1.11.4"
 
 FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI
 try:
diff --git a/pypy/module/_cffi_backend/allocator.py b/pypy/module/_cffi_backend/allocator.py
--- a/pypy/module/_cffi_backend/allocator.py
+++ b/pypy/module/_cffi_backend/allocator.py
@@ -21,13 +21,13 @@
         if self.w_alloc is None:
             if self.should_clear_after_alloc:
                 ptr = lltype.malloc(rffi.CCHARP.TO, datasize,
-                                    flavor='raw', zero=True,
-                                    add_memory_pressure=True)
+                                    flavor='raw', zero=True)
             else:
                 ptr = lltype.malloc(rffi.CCHARP.TO, datasize,
-                                    flavor='raw', zero=False,
-                                    add_memory_pressure=True)
-            return cdataobj.W_CDataNewStd(space, ptr, ctype, length)
+                                    flavor='raw', zero=False)
+            w_res = cdataobj.W_CDataNewStd(space, ptr, ctype, length)
+            rgc.add_memory_pressure(datasize, w_res)
+            return w_res
         else:
             w_raw_cdata = space.call_function(self.w_alloc,
                                               space.newint(datasize))
@@ -53,7 +53,7 @@
             if self.w_free is not None:
                 res.w_free = self.w_free
                 res.register_finalizer(space)
-            rgc.add_memory_pressure(datasize)
+            rgc.add_memory_pressure(datasize, res)
             return res
 
     @unwrap_spec(w_init=WrappedDefault(None))
diff --git a/pypy/module/_cffi_backend/cdataobj.py b/pypy/module/_cffi_backend/cdataobj.py
--- a/pypy/module/_cffi_backend/cdataobj.py
+++ b/pypy/module/_cffi_backend/cdataobj.py
@@ -447,7 +447,10 @@
             with self as ptr:
                 w_res = W_CDataGCP(space, ptr, self.ctype, self, w_destructor)
         if size != 0:
-            rgc.add_memory_pressure(size)
+            if isinstance(w_res, W_CDataGCP):
+                rgc.add_memory_pressure(size, w_res)
+            else:
+                rgc.add_memory_pressure(size, self)
         return w_res
 
     def unpack(self, length):
diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py b/pypy/module/_cffi_backend/test/_backend_test_c.py
--- a/pypy/module/_cffi_backend/test/_backend_test_c.py
+++ b/pypy/module/_cffi_backend/test/_backend_test_c.py
@@ -1,7 +1,7 @@
 # ____________________________________________________________
 
 import sys
-assert __version__ == "1.11.3", ("This test_c.py file is for testing a version"
+assert __version__ == "1.11.4", ("This test_c.py file is for testing a version"
                                  " of cffi that differs from the one that we"
                                  " get from 'import _cffi_backend'")
 if sys.version_info < (3,):
diff --git a/pypy/module/_codecs/test/test_codecs.py b/pypy/module/_codecs/test/test_codecs.py
--- a/pypy/module/_codecs/test/test_codecs.py
+++ b/pypy/module/_codecs/test/test_codecs.py
@@ -589,12 +589,17 @@
         assert b'\x00'.decode('unicode-internal', 'ignore') == ''
 
     def test_backslashreplace(self):
+        import sys
         import codecs
         sin = u"a\xac\u1234\u20ac\u8000\U0010ffff"
-        expected = b"a\\xac\\u1234\\u20ac\\u8000\\U0010ffff"
-        assert sin.encode('ascii', 'backslashreplace') == expected
-        expected = b"a\xac\\u1234\xa4\\u8000\\U0010ffff"
-        assert sin.encode("iso-8859-15", "backslashreplace") == expected
+        if sys.maxunicode > 65535:
+            expected_ascii = b"a\\xac\\u1234\\u20ac\\u8000\\U0010ffff"
+            expected_8859 = b"a\xac\\u1234\xa4\\u8000\\U0010ffff"
+        else:
+            expected_ascii = b"a\\xac\\u1234\\u20ac\\u8000\\udbff\\udfff"
+            expected_8859 = b"a\xac\\u1234\xa4\\u8000\\udbff\\udfff"
+        assert sin.encode('ascii', 'backslashreplace') == expected_ascii
+        assert sin.encode("iso-8859-15", "backslashreplace") == expected_8859
 
         assert 'a\xac\u1234\u20ac\u8000'.encode('ascii', 'backslashreplace') == b'a\\xac\u1234\u20ac\u8000'
         assert b'\x00\x60\x80'.decode(
diff --git a/pypy/module/_io/test/test_interp_textio.py b/pypy/module/_io/test/test_interp_textio.py
--- a/pypy/module/_io/test/test_interp_textio.py
+++ b/pypy/module/_io/test/test_interp_textio.py
@@ -1,6 +1,6 @@
 import pytest
 try:
-    from hypothesis import given, strategies as st
+    from hypothesis import given, strategies as st, settings
 except ImportError:
     pytest.skip("hypothesis required")
 import os
@@ -29,6 +29,7 @@
 
 @given(data=st_readline(),
        mode=st.sampled_from(['\r', '\n', '\r\n', '']))
+ at settings(deadline=None)
 def test_readline(space, data, mode):
     txt, limits = data
     w_stream = W_BytesIO(space)
diff --git a/pypy/module/_multiprocessing/__init__.py b/pypy/module/_multiprocessing/__init__.py
--- a/pypy/module/_multiprocessing/__init__.py
+++ b/pypy/module/_multiprocessing/__init__.py
@@ -16,4 +16,4 @@
         interpleveldefs['recv'] = 'interp_win32_py3.multiprocessing_recv'
         interpleveldefs['send'] = 'interp_win32_py3.multiprocessing_send'
     else:
-        interpleveldefs['sem_unlink'] = 'interp_semaphore.semaphore_unlink',
+        interpleveldefs['sem_unlink'] = 'interp_semaphore.semaphore_unlink'
diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
--- a/pypy/module/array/interp_array.py
+++ b/pypy/module/array/interp_array.py
@@ -1202,12 +1202,16 @@
             start, stop, step, size = self.space.decode_index4(w_idx, self.len)
             assert step != 0
             if w_item.len != size or self is w_item:
-                # XXX this is a giant slow hack
-                w_lst = self.descr_tolist(space)
-                w_item = space.call_method(w_item, 'tolist')
-                space.setitem(w_lst, w_idx, w_item)
-                self.setlen(0)
-                self.fromsequence(w_lst)
+                if start == self.len and step > 0:
+                    # we actually want simply extend()
+                    self.extend(w_item)
+                else:
+                    # XXX this is a giant slow hack
+                    w_lst = self.descr_tolist(space)
+                    w_item = space.call_method(w_item, 'tolist')
+                    space.setitem(w_lst, w_idx, w_item)
+                    self.setlen(0)
+                    self.fromsequence(w_lst)
             else:
                 j = 0
                 buf = self.get_buffer()
diff --git a/pypy/module/array/test/test_array.py b/pypy/module/array/test/test_array.py
--- a/pypy/module/array/test/test_array.py
+++ b/pypy/module/array/test/test_array.py
@@ -300,6 +300,12 @@
         b = self.array('u', u'hi')
         assert len(b) == 2 and b[0] == 'h' and b[1] == 'i'
 
+    def test_setslice_to_extend(self):
+        a = self.array('i')
+        a[0:1] = self.array('i', [9])
+        a[1:5] = self.array('i', [99])
+        assert list(a) == [9, 99]
+
     def test_sequence(self):
         a = self.array('i', [1, 2, 3, 4])
         assert len(a) == 4
diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py
--- a/pypy/module/cpyext/api.py
+++ b/pypy/module/cpyext/api.py
@@ -31,7 +31,7 @@
 from pypy.module.__builtin__.descriptor import W_Property
 #from pypy.module.micronumpy.base import W_NDimArray
 from rpython.rlib.entrypoint import entrypoint_lowlevel
-from rpython.rlib.rposix import is_valid_fd, validate_fd
+from rpython.rlib.rposix import FdValidator
 from rpython.rlib.unroll import unrolling_iterable
 from rpython.rlib.objectmodel import specialize
 from pypy.module import exceptions
@@ -97,25 +97,24 @@
     dash = ''
 
 def fclose(fp):
-    if not is_valid_fd(c_fileno(fp)):
+    try:
+        with FdValidator(c_fileno(fp)):
+            return c_fclose(fp)
+    except IOError:
         return -1
-    return c_fclose(fp)
 
 def fwrite(buf, sz, n, fp):
-    validate_fd(c_fileno(fp))
-    return c_fwrite(buf, sz, n, fp)
+    with FdValidator(c_fileno(fp)):
+        return c_fwrite(buf, sz, n, fp)
 
 def fread(buf, sz, n, fp):
-    validate_fd(c_fileno(fp))
-    return c_fread(buf, sz, n, fp)
+    with FdValidator(c_fileno(fp)):
+        return c_fread(buf, sz, n, fp)
 
 _feof = rffi.llexternal('feof', [FILEP], rffi.INT)
 def feof(fp):
-    validate_fd(c_fileno(fp))
-    return _feof(fp)
-
-def is_valid_fp(fp):
-    return is_valid_fd(c_fileno(fp))
+    with FdValidator(c_fileno(fp)):
+        return _feof(fp)
 
 pypy_decl = 'pypy_decl.h'
 udir.join(pypy_decl).write("/* Will be filled later */\n")
diff --git a/pypy/module/cpyext/eval.py b/pypy/module/cpyext/eval.py
--- a/pypy/module/cpyext/eval.py
+++ b/pypy/module/cpyext/eval.py
@@ -5,7 +5,7 @@
 from rpython.rlib.rarithmetic import widen
 from pypy.module.cpyext.api import (
     cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, fread, feof, Py_ssize_tP,
-    cpython_struct, is_valid_fp)
+    cpython_struct)
 from pypy.module.cpyext.pyobject import PyObject
 from pypy.module.cpyext.pyerrors import PyErr_SetFromErrno
 from pypy.module.cpyext.funcobject import PyCodeObject
@@ -155,22 +155,19 @@
     BUF_SIZE = 8192
     source = ""
     filename = rffi.charp2str(filename)
-    buf = lltype.malloc(rffi.CCHARP.TO, BUF_SIZE, flavor='raw')
-    if not is_valid_fp(fp):
-        lltype.free(buf, flavor='raw')
-        PyErr_SetFromErrno(space, space.w_IOError)
-        return None
-    try:
+    with rffi.scoped_alloc_buffer(BUF_SIZE) as buf:
         while True:
-            count = fread(buf, 1, BUF_SIZE, fp)
+            try:
+                count = fread(buf.raw, 1, BUF_SIZE, fp)
+            except OSError:
+                PyErr_SetFromErrno(space, space.w_IOError)
+                return
             count = rffi.cast(lltype.Signed, count)
-            source += rffi.charpsize2str(buf, count)
+            source += rffi.charpsize2str(buf.raw, count)
             if count < BUF_SIZE:
                 if feof(fp):
                     break
                 PyErr_SetFromErrno(space, space.w_IOError)
-    finally:
-        lltype.free(buf, flavor='raw')
     return run_string(space, source, filename, start, w_globals, w_locals)
 
 # Undocumented function!
diff --git a/pypy/module/cpyext/include/longobject.h b/pypy/module/cpyext/include/longobject.h
--- a/pypy/module/cpyext/include/longobject.h
+++ b/pypy/module/cpyext/include/longobject.h
@@ -20,6 +20,9 @@
 
 #define PyLong_AS_LONG(op) PyLong_AsLong(op)
 
+#define _PyLong_AsByteArray(v, bytes, n, little_endian, is_signed)   \
+    _PyLong_AsByteArrayO((PyObject *)(v), bytes, n, little_endian, is_signed)
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/pypy/module/cpyext/longobject.py b/pypy/module/cpyext/longobject.py
--- a/pypy/module/cpyext/longobject.py
+++ b/pypy/module/cpyext/longobject.py
@@ -3,8 +3,8 @@
     cpython_api, PyObject, build_type_checkers_flags, Py_ssize_t,
     CONST_STRING, ADDR, CANNOT_FAIL)
 from pypy.objspace.std.longobject import W_LongObject
-from pypy.interpreter.error import OperationError
-from rpython.rlib.rbigint import rbigint
+from pypy.interpreter.error import OperationError, oefmt
+from rpython.rlib.rbigint import rbigint, InvalidSignednessError
 
 PyLong_Check, PyLong_CheckExact = build_type_checkers_flags("Long")
 
@@ -251,3 +251,26 @@
         byteorder = 'big'
     result = rbigint.frombytes(s, byteorder, signed != 0)
     return space.newlong_from_rbigint(result)
+
+ at cpython_api([PyObject, rffi.UCHARP, rffi.SIZE_T,
+              rffi.INT_real, rffi.INT_real], rffi.INT_real, error=-1)
+def _PyLong_AsByteArrayO(space, w_v, bytes, n, little_endian, is_signed):
+    n = rffi.cast(lltype.Signed, n)
+    little_endian = rffi.cast(lltype.Signed, little_endian)
+    signed = rffi.cast(lltype.Signed, is_signed) != 0
+    byteorder = 'little' if little_endian else 'big'
+    bigint = space.bigint_w(w_v)
+    try:
+        digits = bigint.tobytes(n, byteorder, signed)
+    except InvalidSignednessError:     # < 0 but not 'signed'
+        # in this case, CPython raises OverflowError even though the C
+        # comments say it should raise TypeError
+        raise oefmt(space.w_OverflowError,
+                    "can't convert negative long to unsigned")
+    except OverflowError:
+        raise oefmt(space.w_OverflowError,
+                    "long too big to convert")
+    assert len(digits) == n
+    for i in range(n):
+        bytes[i] = rffi.cast(rffi.UCHAR, digits[i])
+    return 0
diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py
--- a/pypy/module/cpyext/pyobject.py
+++ b/pypy/module/cpyext/pyobject.py
@@ -10,6 +10,8 @@
     PyVarObject, Py_ssize_t, init_function, cts)
 from pypy.module.cpyext.state import State
 from pypy.objspace.std.typeobject import W_TypeObject
+from pypy.objspace.std.noneobject import W_NoneObject
+from pypy.objspace.std.boolobject import W_BoolObject
 from pypy.objspace.std.objectobject import W_ObjectObject
 from rpython.rlib.objectmodel import specialize, we_are_translated
 from rpython.rlib.objectmodel import keepalive_until_here
@@ -21,6 +23,52 @@
 #________________________________________________________
 # type description
 
+class W_BaseCPyObject(W_ObjectObject):
+    """ A subclass of W_ObjectObject that has one field for directly storing
+    the link from the w_obj to the cpy ref. This is only used for C-defined
+    types. """
+
+
+def check_true(s_arg, bookeeper):
+    assert s_arg.const is True
+
+def w_root_as_pyobj(w_obj, space):
+    from rpython.rlib.debug import check_annotation
+    # make sure that translation crashes if we see this while not translating
+    # with cpyext
+    check_annotation(space.config.objspace.usemodules.cpyext, check_true)
+    # default implementation of _cpyext_as_pyobj
+    return rawrefcount.from_obj(PyObject, w_obj)
+
+def w_root_attach_pyobj(w_obj, space, py_obj):
+    from rpython.rlib.debug import check_annotation
+    check_annotation(space.config.objspace.usemodules.cpyext, check_true)
+    assert space.config.objspace.usemodules.cpyext
+    # default implementation of _cpyext_attach_pyobj
+    rawrefcount.create_link_pypy(w_obj, py_obj)
+
+
+def add_direct_pyobj_storage(cls):
+    """ Add the necessary methods to a class to store a reference to the py_obj
+    on its instances directly. """
+
+    cls._cpy_ref = lltype.nullptr(PyObject.TO)
+
+    def _cpyext_as_pyobj(self, space):
+        return self._cpy_ref
+    cls._cpyext_as_pyobj = _cpyext_as_pyobj
+
+    def _cpyext_attach_pyobj(self, space, py_obj):
+        self._cpy_ref = py_obj
+        rawrefcount.create_link_pyobj(self, py_obj)
+    cls._cpyext_attach_pyobj = _cpyext_attach_pyobj
+
+add_direct_pyobj_storage(W_BaseCPyObject)
+add_direct_pyobj_storage(W_TypeObject)
+add_direct_pyobj_storage(W_NoneObject)
+add_direct_pyobj_storage(W_BoolObject)
+
+
 class BaseCpyTypedescr(object):
     basestruct = PyObject.TO
     W_BaseObject = W_ObjectObject
@@ -66,8 +114,12 @@
 
     def realize(self, space, obj):
         w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type))
+        assert isinstance(w_type, W_TypeObject)
         try:
-            w_obj = space.allocate_instance(self.W_BaseObject, w_type)
+            if w_type.flag_cpytype:
+                w_obj = space.allocate_instance(W_BaseCPyObject, w_type)
+            else:
+                w_obj = space.allocate_instance(self.W_BaseObject, w_type)
         except OperationError as e:
             if e.match(space, space.w_TypeError):
                 raise oefmt(space.w_SystemError,
@@ -76,6 +128,9 @@
                             w_type)
             raise
         track_reference(space, obj, w_obj)
+        if w_type.flag_cpytype:
+            assert isinstance(w_obj, W_BaseCPyObject)
+            w_obj._cpy_ref = obj
         return w_obj
 
 typedescr_cache = {}
@@ -186,12 +241,12 @@
     Ties together a PyObject and an interpreter object.
     The PyObject's refcnt is increased by REFCNT_FROM_PYPY.
     The reference in 'py_obj' is not stolen!  Remember to decref()
-    it is you need to.
+    it if you need to.
     """
     # XXX looks like a PyObject_GC_TRACK
     assert py_obj.c_ob_refcnt < rawrefcount.REFCNT_FROM_PYPY
     py_obj.c_ob_refcnt += rawrefcount.REFCNT_FROM_PYPY
-    rawrefcount.create_link_pypy(w_obj, py_obj)
+    w_obj._cpyext_attach_pyobj(space, py_obj)
 
 
 w_marker_deallocating = W_Root()
@@ -237,7 +292,7 @@
 @jit.dont_look_inside
 def as_pyobj(space, w_obj, w_userdata=None, immortal=False):
     """
-    Returns a 'PyObject *' representing the given intepreter object.
+    Returns a 'PyObject *' representing the given interpreter object.
     This doesn't give a new reference, but the returned 'PyObject *'
     is valid at least as long as 'w_obj' is.  **To be safe, you should
     use keepalive_until_here(w_obj) some time later.**  In case of
@@ -245,7 +300,7 @@
     """
     assert not is_pyobj(w_obj)
     if w_obj is not None:
-        py_obj = rawrefcount.from_obj(PyObject, w_obj)
+        py_obj = w_obj._cpyext_as_pyobj(space)
         if not py_obj:
             py_obj = create_ref(space, w_obj, w_userdata, immortal=immortal)
         #
diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py
--- a/pypy/module/cpyext/sequence.py
+++ b/pypy/module/cpyext/sequence.py
@@ -267,7 +267,7 @@
     raise oefmt(space.w_ValueError, "sequence.index(x): x not in sequence")
 
 class CPyListStrategy(ListStrategy):
-    erase, unerase = rerased.new_erasing_pair("empty")
+    erase, unerase = rerased.new_erasing_pair("cpylist")
     erase = staticmethod(erase)
     unerase = staticmethod(unerase)
 
diff --git a/pypy/module/cpyext/stubs.py b/pypy/module/cpyext/stubs.py
--- a/pypy/module/cpyext/stubs.py
+++ b/pypy/module/cpyext/stubs.py
@@ -1577,13 +1577,6 @@
     """
     raise NotImplementedError
 
- at cpython_api([PyObject], PyObject)
-def PyUnicode_AsUTF32String(space, unicode):
-    """Return a Python byte string using the UTF-32 encoding in native byte
-    order. The string always starts with a BOM mark.  Error handling is "strict".
-    Return NULL if an exception was raised by the codec."""
-    raise NotImplementedError
-
 @cpython_api([rffi.CCHARP, Py_ssize_t, rffi.CCHARP, rffi.INTP, Py_ssize_t], PyObject)
 def PyUnicode_DecodeUTF16Stateful(space, s, size, errors, byteorder, consumed):
     """If consumed is NULL, behave like PyUnicode_DecodeUTF16(). If
@@ -1612,13 +1605,6 @@
     Return NULL if an exception was raised by the codec."""
     raise NotImplementedError
 
- at cpython_api([PyObject], PyObject)
-def PyUnicode_AsUTF16String(space, unicode):
-    """Return a Python byte string using the UTF-16 encoding in native byte
-    order. The string always starts with a BOM mark.  Error handling is "strict".
-    Return NULL if an exception was raised by the codec."""
-    raise NotImplementedError
-
 @cpython_api([rffi.CCHARP, Py_ssize_t, rffi.CCHARP], PyObject)
 def PyUnicode_DecodeUTF7(space, s, size, errors):
     """Create a Unicode object by decoding size bytes of the UTF-7 encoded string
diff --git a/pypy/module/cpyext/test/test_eval.py b/pypy/module/cpyext/test/test_eval.py
--- a/pypy/module/cpyext/test/test_eval.py
+++ b/pypy/module/cpyext/test/test_eval.py
@@ -13,7 +13,7 @@
     PyEval_GetBuiltins, PyEval_GetLocals, PyEval_GetGlobals,
     _PyEval_SliceIndex)
 from pypy.module.cpyext.api import (
-    c_fopen, c_fclose, c_fileno, Py_ssize_tP, is_valid_fd)
+    c_fopen, c_fclose, c_fileno, Py_ssize_tP)
 from pypy.module.cpyext.pyobject import get_w_obj_and_decref
 from pypy.interpreter.gateway import interp2app
 from pypy.interpreter.error import OperationError
@@ -150,7 +150,6 @@
         os.close(c_fileno(fp))
         with raises_w(space, IOError):
             PyRun_File(space, fp, filename, Py_file_input, w_globals, w_locals)
-        if is_valid_fd(c_fileno(fp)):
             c_fclose(fp)
         rffi.free_charp(filename)
 
diff --git a/pypy/module/cpyext/test/test_longobject.py b/pypy/module/cpyext/test/test_longobject.py
--- a/pypy/module/cpyext/test/test_longobject.py
+++ b/pypy/module/cpyext/test/test_longobject.py
@@ -259,6 +259,48 @@
         assert module.from_bytearray(False, False) == 0x9ABC41
         assert module.from_bytearray(False, True) == -0x6543BF
 
+    def test_asbytearray(self):
+        module = self.import_extension('foo', [
+            ("as_bytearray", "METH_VARARGS",
+             """
+                 PyObject *result;
+                 PyLongObject *o;
+                 int n, little_endian, is_signed;
+                 unsigned char *bytes;
+                 if (!PyArg_ParseTuple(args, "O!iii", &PyLong_Type, &o, &n,
+                         &little_endian, &is_signed))
+                     return NULL;
+                 bytes = malloc(n);
+                 if (_PyLong_AsByteArray(o, bytes, (size_t)n,
+                                         little_endian, is_signed) != 0)
+                 {
+                     free(bytes);
+                     return NULL;
+                 }
+                 result = PyString_FromStringAndSize((const char *)bytes, n);
+                 free(bytes);
+                 return result;
+             """),
+            ])
+        s = module.as_bytearray(0x41BC9A, 4, True, False)
+        assert s == "\x9A\xBC\x41\x00"
+        s = module.as_bytearray(0x41BC9A, 4, False, False)
+        assert s == "\x00\x41\xBC\x9A"
+        s = module.as_bytearray(0x41BC9A, 3, True, False)
+        assert s == "\x9A\xBC\x41"
+        s = module.as_bytearray(0x41BC9A, 3, True, True)
+        assert s == "\x9A\xBC\x41"
+        s = module.as_bytearray(0x9876, 2, True, False)
+        assert s == "\x76\x98"
+        s = module.as_bytearray(0x9876 - 0x10000, 2, True, True)
+        assert s == "\x76\x98"
+        raises(OverflowError, module.as_bytearray,
+                              0x9876, 2, False, True)
+        raises(OverflowError, module.as_bytearray,
+                              -1, 2, True, False)
+        raises(OverflowError, module.as_bytearray,
+                              0x1234567, 3, True, False)
+
     def test_fromunicode(self):
         module = self.import_extension('foo', [
             ("from_unicode", "METH_O",
diff --git a/pypy/module/cpyext/test/test_object.py b/pypy/module/cpyext/test/test_object.py
--- a/pypy/module/cpyext/test/test_object.py
+++ b/pypy/module/cpyext/test/test_object.py
@@ -218,7 +218,7 @@
 
         if not cls.runappdirect:
             cls.total_mem = 0
-            def add_memory_pressure(estimate):
+            def add_memory_pressure(estimate, object=None):
                 assert estimate >= 0
                 cls.total_mem += estimate
             cls.orig_add_memory_pressure = [rgc.add_memory_pressure]
diff --git a/pypy/module/cpyext/test/test_typeobject.py b/pypy/module/cpyext/test/test_typeobject.py
--- a/pypy/module/cpyext/test/test_typeobject.py
+++ b/pypy/module/cpyext/test/test_typeobject.py
@@ -3,13 +3,23 @@
 from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase
 from pypy.module.cpyext.test.test_api import BaseApiTest
 from pypy.module.cpyext.api import generic_cpy_call
-from pypy.module.cpyext.pyobject import make_ref, from_ref, decref
+from pypy.module.cpyext.pyobject import make_ref, from_ref, decref, as_pyobj
 from pypy.module.cpyext.typeobject import cts, PyTypeObjectPtr
 
 import sys
 import pytest
 
 class AppTestTypeObject(AppTestCpythonExtensionBase):
+
+    def setup_class(cls):
+        AppTestCpythonExtensionBase.setup_class.im_func(cls)
+        def _check_uses_shortcut(w_inst):
+            res = hasattr(w_inst, "_cpy_ref") and w_inst._cpy_ref
+            res = res and as_pyobj(cls.space, w_inst) == w_inst._cpy_ref
+            return cls.space.newbool(res)
+        cls.w__check_uses_shortcut = cls.space.wrap(
+            gateway.interp2app(_check_uses_shortcut))
+
     def test_typeobject(self):
         import sys
         module = self.import_module(name='foo')
@@ -162,6 +172,25 @@
         assert fuu2(u"abc").baz().escape()
         raises(TypeError, module.fooType.object_member.__get__, 1)
 
+    def test_shortcut(self):
+        # test that instances of classes that are defined in C become an
+        # instance of W_BaseCPyObject and thus can be converted faster back to
+        # their pyobj, because they store a pointer to it directly.
+        if self.runappdirect:
+            skip("can't run with -A")
+        module = self.import_module(name='foo')
+        obj = module.fooType()
+        assert self._check_uses_shortcut(obj)
+        # W_TypeObjects use shortcut
+        assert self._check_uses_shortcut(object)
+        assert self._check_uses_shortcut(type)
+        # None, True, False use shortcut
+        assert self._check_uses_shortcut(None)
+        assert self._check_uses_shortcut(True)
+        assert self._check_uses_shortcut(False)
+        assert not self._check_uses_shortcut(1)
+        assert not self._check_uses_shortcut(object())
+
     def test_multiple_inheritance1(self):
         module = self.import_module(name='foo')
         obj = module.UnicodeSubtype(u'xyz')
diff --git a/pypy/module/cpyext/test/test_unicodeobject.py b/pypy/module/cpyext/test/test_unicodeobject.py
--- a/pypy/module/cpyext/test/test_unicodeobject.py
+++ b/pypy/module/cpyext/test/test_unicodeobject.py
@@ -359,6 +359,20 @@
         m = self.import_module('_widechar')
         raises(ValueError, m.test_widechar)
 
+    def test_AsUTFNString(self):
+        module = self.import_extension('foo', [
+            ("asutf8", "METH_O", "return PyUnicode_AsUTF8String(args);"),
+            ("asutf16", "METH_O", "return PyUnicode_AsUTF16String(args);"),
+            ("asutf32", "METH_O", "return PyUnicode_AsUTF32String(args);"),
+            ])
+        u = u'sp\x09m\u1234\U00012345'
+        s = module.asutf8(u)
+        assert s == u.encode('utf-8')
+        s = module.asutf16(u)
+        assert s == u.encode('utf-16')
+        s = module.asutf32(u)
+        assert s == u.encode('utf-32')
+
 
 class TestUnicode(BaseApiTest):
     def test_unicodeobject(self, space):
@@ -448,10 +462,24 @@
         lltype.free(ar, flavor='raw')
 
     def test_AsUTF8String(self, space):
-        w_u = space.wrap(u'sp\x09m')
+        w_u = space.wrap(u'sp\x09m\u1234')
         w_res = PyUnicode_AsUTF8String(space, w_u)
         assert space.type(w_res) is space.w_bytes
-        assert space.unwrap(w_res) == 'sp\tm'
+        assert space.unwrap(w_res) == 'sp\tm\xe1\x88\xb4'
+
+    def test_AsUTF16String(self, space):
+        u = u'sp\x09m\u1234\U00012345'
+        w_u = space.wrap(u)
+        w_res = PyUnicode_AsUTF16String(space, w_u)
+        assert space.type(w_res) is space.w_bytes
+        assert space.unwrap(w_res) == u.encode('utf-16')
+
+    def test_AsUTF32String(self, space):
+        u = u'sp\x09m\u1234\U00012345'
+        w_u = space.wrap(u)
+        w_res = PyUnicode_AsUTF32String(space, w_u)
+        assert space.type(w_res) is space.w_bytes
+        assert space.unwrap(w_res) == u.encode('utf-32')
 
     def test_decode_utf8(self, space):
         u = rffi.str2charp(u'sp\x134m'.encode("utf-8"))
diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py
--- a/pypy/module/cpyext/typeobject.py
+++ b/pypy/module/cpyext/typeobject.py
@@ -341,8 +341,12 @@
         if len(slot_names) == 1:
             func = getattr(pto, slot_names[0])
             if slot_names[0] == 'c_tp_hash':
-                if hash_not_impl == func:
-                    # special case for tp_hash == PyObject_HashNotImplemented
+                # two special cases where __hash__ is explicitly set to None
+                # (which leads to an unhashable type):
+                # 1) tp_hash == PyObject_HashNotImplemented
+                # 2) tp_hash == NULL and tp_richcompare not NULL
+                if hash_not_impl == func or (
+                        not func and pto.c_tp_richcompare):
                     dict_w[method_name] = space.w_None
                     continue
         else:
diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py
--- a/pypy/module/cpyext/unicodeobject.py
+++ b/pypy/module/cpyext/unicodeobject.py
@@ -716,7 +716,7 @@
     ref[0] = rffi.cast(PyObject, py_newuni)
     return 0
 
-def make_conversion_functions(suffix, encoding):
+def make_conversion_functions(suffix, encoding, only_for_asstring=False):
     @cpython_api([PyObject], PyObject)
     @func_renamer('PyUnicode_As%sString' % suffix)
     def PyUnicode_AsXXXString(space, w_unicode):
@@ -728,6 +728,9 @@
         return unicodeobject.encode_object(space, w_unicode, encoding, "strict")
     globals()['PyUnicode_As%sString' % suffix] = PyUnicode_AsXXXString
 
+    if only_for_asstring:
+        return
+
     @cpython_api([CONST_STRING, Py_ssize_t, CONST_STRING], PyObject)
     @func_renamer('PyUnicode_Decode%s' % suffix)
     def PyUnicode_DecodeXXX(space, s, size, errors):
@@ -758,6 +761,8 @@
     globals()['PyUnicode_Encode%s' % suffix] = PyUnicode_EncodeXXX
 
 make_conversion_functions('UTF8', 'utf-8')
+make_conversion_functions('UTF16', 'utf-16', only_for_asstring=True)
+make_conversion_functions('UTF32', 'utf-32', only_for_asstring=True)
 make_conversion_functions('ASCII', 'ascii')
 make_conversion_functions('Latin1', 'latin-1')
 if sys.platform == 'win32':
diff --git a/pypy/module/faulthandler/handler.py b/pypy/module/faulthandler/handler.py
--- a/pypy/module/faulthandler/handler.py
+++ b/pypy/module/faulthandler/handler.py
@@ -1,6 +1,5 @@
 import os
 from rpython.rtyper.lltypesystem import lltype, llmemory, rffi
-from rpython.rlib.rposix import is_valid_fd
 from rpython.rlib.rarithmetic import widen, ovfcheck_float_to_longlong
 from rpython.rlib.objectmodel import keepalive_until_here
 from rpython.rtyper.annlowlevel import llhelper
@@ -35,7 +34,7 @@
                 raise oefmt(space.w_RuntimeError, "sys.stderr is None")
         elif space.isinstance_w(w_file, space.w_int):
             fd = space.c_int_w(w_file)
-            if fd < 0 or not is_valid_fd(fd):
+            if fd < 0:
                 raise oefmt(space.w_ValueError,
                             "file is not a valid file descriptor")
             return fd, None
diff --git a/pypy/module/gc/__init__.py b/pypy/module/gc/__init__.py
--- a/pypy/module/gc/__init__.py
+++ b/pypy/module/gc/__init__.py
@@ -19,6 +19,7 @@
                 space.config.translation.gctransformer == "framework"):
             self.appleveldefs.update({
                 'dump_rpy_heap': 'app_referents.dump_rpy_heap',
+                'get_stats': 'app_referents.get_stats',
                 })
             self.interpleveldefs.update({
                 'get_rpy_roots': 'referents.get_rpy_roots',
@@ -28,6 +29,7 @@
                 'get_objects': 'referents.get_objects',
                 'get_referents': 'referents.get_referents',
                 'get_referrers': 'referents.get_referrers',
+                '_get_stats': 'referents.get_stats',
                 '_dump_rpy_heap': 'referents._dump_rpy_heap',
                 'get_typeids_z': 'referents.get_typeids_z',
                 'get_typeids_list': 'referents.get_typeids_list',
diff --git a/pypy/module/gc/app_referents.py b/pypy/module/gc/app_referents.py
--- a/pypy/module/gc/app_referents.py
+++ b/pypy/module/gc/app_referents.py
@@ -48,3 +48,66 @@
                 file.flush()
             fd = file.fileno()
         gc._dump_rpy_heap(fd)
+
+class GcStats(object):
+    def __init__(self, s):
+        self._s = s
+        for item in ('total_gc_memory', 'jit_backend_used',
+                     'total_memory_pressure',
+                     'total_allocated_memory', 'jit_backend_allocated',
+                     'peak_memory', 'peak_allocated_memory', 'total_arena_memory',
+                     'total_rawmalloced_memory', 'nursery_size',
+                     'peak_arena_memory', 'peak_rawmalloced_memory'):
+            setattr(self, item, self._format(getattr(self._s, item)))
+        self.memory_used_sum = self._format(self._s.total_gc_memory + self._s.total_memory_pressure +
+                                            self._s.jit_backend_used)
+        self.memory_allocated_sum = self._format(self._s.total_allocated_memory + self._s.total_memory_pressure +
+                                            self._s.jit_backend_allocated)
+
+    def _format(self, v):
+        if v < 1000000:
+            # bit unlikely ;-)
+            return "%.1fkB" % (v / 1024.)
+        return "%.1fMB" % (v / 1024. / 1024.)
+
+    def __repr__(self):
+        if self._s.total_memory_pressure != -1:
+            extra = "\nmemory pressure:    %s" % self.total_memory_pressure
+        else:
+            extra = ""
+        return """Total memory consumed:
+    GC used:            %s (peak: %s)
+       in arenas:            %s
+       rawmalloced:          %s
+       nursery:              %s
+    raw assembler used: %s%s
+    -----------------------------
+    Total:              %s
+
+    Total memory allocated:
+    GC allocated:            %s (peak: %s)
+       in arenas:            %s
+       rawmalloced:          %s
+       nursery:              %s
+    raw assembler allocated: %s%s
+    -----------------------------
+    Total:                   %s
+    """ % (self.total_gc_memory, self.peak_memory,
+              self.total_arena_memory,
+              self.total_rawmalloced_memory,
+              self.nursery_size,
+           self.jit_backend_used,
+           extra,
+           self.memory_used_sum,
+
+           self.total_allocated_memory, self.peak_allocated_memory,
+              self.peak_arena_memory,
+              self.peak_rawmalloced_memory,
+              self.nursery_size,
+           self.jit_backend_allocated,
+           extra,
+           self.memory_allocated_sum)
+
+
+def get_stats():
+    return GcStats(gc._get_stats())
diff --git a/pypy/module/gc/referents.py b/pypy/module/gc/referents.py
--- a/pypy/module/gc/referents.py
+++ b/pypy/module/gc/referents.py
@@ -1,7 +1,7 @@
-from rpython.rlib import rgc
+from rpython.rlib import rgc, jit_hooks
 from pypy.interpreter.baseobjspace import W_Root
-from pypy.interpreter.typedef import TypeDef
-from pypy.interpreter.gateway import unwrap_spec
+from pypy.interpreter.typedef import TypeDef, interp_attrproperty
+from pypy.interpreter.gateway import unwrap_spec, interp2app
 from pypy.interpreter.error import oefmt, wrap_oserror
 from rpython.rlib.objectmodel import we_are_translated
 
@@ -170,3 +170,53 @@
     l = rgc.get_typeids_list()
     list_w = [space.newint(l[i]) for i in range(len(l))]
     return space.newlist(list_w)
+
+class W_GcStats(W_Root):
+    def __init__(self, memory_pressure):
+        if memory_pressure:
+            self.total_memory_pressure = rgc.get_stats(rgc.TOTAL_MEMORY_PRESSURE)
+        else:
+            self.total_memory_pressure = -1
+        self.total_gc_memory = rgc.get_stats(rgc.TOTAL_MEMORY)
+        self.total_allocated_memory = rgc.get_stats(rgc.TOTAL_ALLOCATED_MEMORY)
+        self.peak_memory = rgc.get_stats(rgc.PEAK_MEMORY)
+        self.peak_allocated_memory = rgc.get_stats(rgc.PEAK_ALLOCATED_MEMORY)
+        self.jit_backend_allocated = jit_hooks.stats_asmmemmgr_allocated(None)
+        self.jit_backend_used = jit_hooks.stats_asmmemmgr_used(None)
+        self.total_arena_memory = rgc.get_stats(rgc.TOTAL_ARENA_MEMORY)
+        self.total_rawmalloced_memory = rgc.get_stats(
+            rgc.TOTAL_RAWMALLOCED_MEMORY)
+        self.peak_arena_memory = rgc.get_stats(rgc.PEAK_ARENA_MEMORY)
+        self.peak_rawmalloced_memory = rgc.get_stats(rgc.PEAK_RAWMALLOCED_MEMORY)
+        self.nursery_size = rgc.get_stats(rgc.NURSERY_SIZE)
+
+W_GcStats.typedef = TypeDef("GcStats",
+    total_memory_pressure=interp_attrproperty("total_memory_pressure",
+        cls=W_GcStats, wrapfn="newint"),
+    total_gc_memory=interp_attrproperty("total_gc_memory",
+        cls=W_GcStats, wrapfn="newint"),
+    peak_allocated_memory=interp_attrproperty("peak_allocated_memory",
+        cls=W_GcStats, wrapfn="newint"),
+    peak_memory=interp_attrproperty("peak_memory",
+        cls=W_GcStats, wrapfn="newint"),
+    total_allocated_memory=interp_attrproperty("total_allocated_memory",
+        cls=W_GcStats, wrapfn="newint"),
+    jit_backend_allocated=interp_attrproperty("jit_backend_allocated",
+        cls=W_GcStats, wrapfn="newint"),
+    jit_backend_used=interp_attrproperty("jit_backend_used",
+        cls=W_GcStats, wrapfn="newint"),
+    total_arena_memory=interp_attrproperty("total_arena_memory",
+        cls=W_GcStats, wrapfn="newint"),
+    total_rawmalloced_memory=interp_attrproperty("total_rawmalloced_memory",
+        cls=W_GcStats, wrapfn="newint"),
+    peak_arena_memory=interp_attrproperty("peak_arena_memory",
+        cls=W_GcStats, wrapfn="newint"),
+    peak_rawmalloced_memory=interp_attrproperty("peak_rawmalloced_memory",
+        cls=W_GcStats, wrapfn="newint"),
+    nursery_size=interp_attrproperty("nursery_size",
+        cls=W_GcStats, wrapfn="newint"),
+)
+
+ at unwrap_spec(memory_pressure=bool)
+def get_stats(space, memory_pressure=False):
+    return W_GcStats(memory_pressure)
diff --git a/pypy/module/posix/app_posix.py b/pypy/module/posix/app_posix.py
--- a/pypy/module/posix/app_posix.py
+++ b/pypy/module/posix/app_posix.py
@@ -1,6 +1,5 @@
 # NOT_RPYTHON
 from _structseq import structseqtype, structseqfield
-from __pypy__ import validate_fd
 
 # XXX we need a way to access the current module's globals more directly...
 import errno
@@ -114,14 +113,6 @@
     columns  = structseqfield(0, "width of the terminal window in characters")
     lines = structseqfield(1, "height of the terminal window in characters")
 
-if osname == 'posix':
-    # POSIX: we want to check the file descriptor when fdopen() is called,
-    # not later when we read or write data.  So we call fstat(), letting
-    # it raise if fd is invalid.
-    _validate_fd = posix.fstat
-else:
-    _validate_fd = validate_fd
-
 
 class times_result(metaclass=structseqtype):
 
diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py
--- a/pypy/module/posix/interp_posix.py
+++ b/pypy/module/posix/interp_posix.py
@@ -2223,8 +2223,9 @@
     Return a string describing the encoding of the device if the output
     is a terminal; else return None.
     """
-    if not (rposix.is_valid_fd(fd) and os.isatty(fd)):
-        return space.w_None
+    with rposix.FdValidator(fd):
+        if not (os.isatty(fd)):
+            return space.w_None
     if _WIN32:
         if fd == 0:
             ccp = rwin32.GetConsoleCP()
diff --git a/pypy/module/posix/test/test_posix2.py b/pypy/module/posix/test/test_posix2.py
--- a/pypy/module/posix/test/test_posix2.py
+++ b/pypy/module/posix/test/test_posix2.py
@@ -17,6 +17,7 @@
     USEMODULES += ['fcntl', 'select', '_posixsubprocess', '_socket']
 else:
     USEMODULES += ['_rawffi', 'thread']
+    USEMODULES += ['_rawffi', 'thread', 'signal', '_cffi_backend']
 
 def setup_module(mod):
     mod.space = gettestobjspace(usemodules=USEMODULES)
diff --git a/pypy/module/pyexpat/interp_pyexpat.py b/pypy/module/pyexpat/interp_pyexpat.py
--- a/pypy/module/pyexpat/interp_pyexpat.py
+++ b/pypy/module/pyexpat/interp_pyexpat.py
@@ -843,11 +843,11 @@
     # Currently this is just the size of the pointer and some estimated bytes.
     # The struct isn't actually defined in expat.h - it is in xmlparse.c
     # XXX: find a good estimate of the XML_ParserStruct
-    rgc.add_memory_pressure(XML_Parser_SIZE + 300)
     if not xmlparser:
         raise oefmt(space.w_RuntimeError, "XML_ParserCreate failed")
 
     parser = W_XMLParserType(space, xmlparser, w_intern)
+    rgc.add_memory_pressure(XML_Parser_SIZE + 300, parser)
     XML_SetUnknownEncodingHandler(
         parser.itself, UnknownEncodingHandlerData_callback,
         rffi.cast(rffi.VOIDP, parser.id))
diff --git a/pypy/module/signal/interp_signal.py b/pypy/module/signal/interp_signal.py
--- a/pypy/module/signal/interp_signal.py
+++ b/pypy/module/signal/interp_signal.py
@@ -259,8 +259,6 @@
                     "signal.set_wakeup_fd is not implemented on Windows")
 
     if fd != -1:
-        if not rposix.is_valid_fd(fd):
-            raise oefmt(space.w_ValueError, "invalid fd")
         try:
             os.fstat(fd)
             flags = rposix.get_status_flags(fd)
diff --git a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py
--- a/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py
+++ b/pypy/module/test_lib_pypy/cffi_tests/cffi1/test_recompiler.py
@@ -2298,3 +2298,11 @@
     else:
         assert lib.__loader__ is None
         assert lib.__spec__ is None
+
+def test_realize_struct_error():
+    ffi = FFI()
+    ffi.cdef("""typedef ... foo_t; struct foo_s { void (*x)(foo_t); };""")
+    lib = verify(ffi, "test_realize_struct_error", """
+        typedef int foo_t; struct foo_s { void (*x)(foo_t); };
+    """)
+    py.test.raises(TypeError, ffi.new, "struct foo_s *")
diff --git a/pypy/module/time/interp_time.py b/pypy/module/time/interp_time.py
--- a/pypy/module/time/interp_time.py
+++ b/pypy/module/time/interp_time.py
@@ -221,9 +221,10 @@
         if (rffi.TIME_T in args or rffi.TIME_TP in args
             or result in (rffi.TIME_T, rffi.TIME_TP)):
             name = '_' + name + '64'
+    _calling_conv = kwds.pop('calling_conv', calling_conv)
     return rffi.llexternal(name, args, result,
                            compilation_info=eci,
-                           calling_conv=calling_conv,
+                           calling_conv=_calling_conv,
                            releasegil=False,
                            **kwds)
 
@@ -332,20 +333,34 @@
                              "RPY_EXTERN "
                              "int pypy_get_daylight();\n"
                              "RPY_EXTERN "
-                             "char** pypy_get_tzname();\n"
+                             "int pypy_get_tzname(size_t, int, char*);\n"
                              "RPY_EXTERN "
                              "void pypy__tzset();"],
         separate_module_sources = ["""
-        long pypy_get_timezone() { return timezone; }
-        int pypy_get_daylight() { return daylight; }
-        char** pypy_get_tzname() { return tzname; }
-        void pypy__tzset() { return _tzset(); }
+            long pypy_get_timezone() {
+                long timezone; 
+                _get_timezone(&timezone); 
+                return timezone;
+            };
+            int pypy_get_daylight() {
+                int daylight;
+                _get_daylight(&daylight);
+                return daylight;
+            };
+            int pypy_get_tzname(size_t len, int index, char * tzname) {
+                size_t s;
+                errno_t ret = _get_tzname(&s, tzname, len, index);
+                return (int)s;
+            };
+            void pypy__tzset() { _tzset(); }
         """])
     # Ensure sure that we use _tzset() and timezone from the same C Runtime.
     c_tzset = external('pypy__tzset', [], lltype.Void, win_eci)
     c_get_timezone = external('pypy_get_timezone', [], rffi.LONG, win_eci)
     c_get_daylight = external('pypy_get_daylight', [], rffi.INT, win_eci)
-    c_get_tzname = external('pypy_get_tzname', [], rffi.CCHARPP, win_eci)
+    c_get_tzname = external('pypy_get_tzname',
+                            [rffi.SIZE_T, rffi.INT, rffi.CCHARP], 
+                            rffi.INT, win_eci, calling_conv='c')
 
 c_strftime = external('strftime', [rffi.CCHARP, rffi.SIZE_T, rffi.CCHARP, TM_P],
                       rffi.SIZE_T)
@@ -359,8 +374,11 @@
         timezone = c_get_timezone()
         altzone = timezone - 3600
         daylight = c_get_daylight()
-        tzname_ptr = c_get_tzname()
-        tzname = rffi.charp2str(tzname_ptr[0]), rffi.charp2str(tzname_ptr[1])
+        with rffi.scoped_alloc_buffer(100) as buf:
+            s = c_get_tzname(100, 0, buf.raw)
+            tzname[0] = buf.str(s)
+            s = c_get_tzname(100, 1, buf.raw)
+            tzname[1] = buf.str(s)
 
     if _POSIX:
         if _CYGWIN:
diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py
--- a/pypy/objspace/std/mapdict.py
+++ b/pypy/objspace/std/mapdict.py
@@ -4,10 +4,11 @@
 from rpython.rlib.rarithmetic import intmask, r_uint
 
 from pypy.interpreter.baseobjspace import W_Root
+from pypy.interpreter.unicodehelper import encode_utf8
 from pypy.objspace.std.dictmultiobject import (
     W_DictMultiObject, DictStrategy, ObjectDictStrategy, BaseKeyIterator,
     BaseValueIterator, BaseItemIterator, _never_equal_to_string,
-    W_DictObject,
+    W_DictObject, BytesDictStrategy, UnicodeDictStrategy
 )
 from pypy.objspace.std.typeobject import MutableCell
 
@@ -25,6 +26,10 @@
 # note: we use "x * NUM_DIGITS_POW2" instead of "x << NUM_DIGITS" because
 # we want to propagate knowledge that the result cannot be negative
 
+# the maximum number of attributes stored in mapdict (afterwards just use a
+# dict)
+LIMIT_MAP_ATTRIBUTES = 80
+
 
 class AbstractAttribute(object):
     _immutable_fields_ = ['terminator']
@@ -253,6 +258,9 @@
     def materialize_r_dict(self, space, obj, dict_w):
         raise NotImplementedError("abstract base class")
 
+    def materialize_str_dict(self, space, obj, str_dict):
+        raise NotImplementedError("abstract base class")
+
     def remove_dict_entries(self, obj):
         raise NotImplementedError("abstract base class")
 
@@ -272,6 +280,13 @@
 
     def _write_terminator(self, obj, name, index, w_value):
         obj._get_mapdict_map().add_attr(obj, name, index, w_value)
+        if index == DICT and obj._get_mapdict_map().length() >= LIMIT_MAP_ATTRIBUTES:
+            space = self.space
+            w_dict = obj.getdict(space)
+            assert isinstance(w_dict, W_DictMultiObject)
+            strategy = w_dict.get_strategy()
+            assert isinstance(strategy, MapDictStrategy)
+            strategy.switch_to_text_strategy(w_dict)
         return True
 
     def copy(self, obj):
@@ -302,6 +317,12 @@
         self.devolved_dict_terminator = DevolvedDictTerminator(space, w_cls)
 
     def materialize_r_dict(self, space, obj, dict_w):
+        return self._make_devolved(space)
+
+    def materialize_str_dict(self, space, obj, dict_w):
+        return self._make_devolved(space)
+
+    def _make_devolved(self, space):
         result = Object()
         result.space = space
         result._mapdict_init_empty(self.devolved_dict_terminator)
@@ -408,6 +429,15 @@
             self._copy_attr(obj, new_obj)
         return new_obj
 
+    def materialize_str_dict(self, space, obj, str_dict):
+        new_obj = self.back.materialize_str_dict(space, obj, str_dict)
+        if self.index == DICT:
+            enc_name = encode_utf8(space, self.name)
+            str_dict[enc_name] = obj._mapdict_read_storage(self.storageindex)
+        else:
+            self._copy_attr(obj, new_obj)
+        return new_obj
+
     def remove_dict_entries(self, obj):
         new_obj = self.back.remove_dict_entries(obj)
         if self.index != DICT:
@@ -737,6 +767,15 @@
         assert w_obj.getdict(self.space) is w_dict or w_obj._get_mapdict_map().terminator.w_cls is None
         materialize_r_dict(self.space, w_obj, dict_w)
 
+    def switch_to_text_strategy(self, w_dict):
+        w_obj = self.unerase(w_dict.dstorage)
+        strategy = self.space.fromcache(UnicodeDictStrategy)
+        str_dict = strategy.unerase(strategy.get_empty_storage())
+        w_dict.set_strategy(strategy)
+        w_dict.dstorage = strategy.erase(str_dict)
+        assert w_obj.getdict(self.space) is w_dict or w_obj._get_mapdict_map().terminator.w_cls is None
+        materialize_str_dict(self.space, w_obj, str_dict)
+
     def getitem(self, w_dict, w_key):
         space = self.space
         w_lookup_type = space.type(w_key)
@@ -832,6 +871,11 @@
     new_obj = map.materialize_r_dict(space, obj, dict_w)
     obj._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
 
+def materialize_str_dict(space, obj, dict_w):
+    map = obj._get_mapdict_map()
+    new_obj = map.materialize_str_dict(space, obj, dict_w)
+    obj._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
+
 
 class IteratorMixin(object):
 
diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py
--- a/pypy/objspace/std/test/test_mapdict.py
+++ b/pypy/objspace/std/test/test_mapdict.py
@@ -114,6 +114,34 @@
     assert obj2.getdictvalue(space, "b") == 60
     assert obj2.map is obj.map
 
+def test_add_attribute_limit():
+    for numslots in [0, 10, 100]:
+        cls = Class()
+        obj = cls.instantiate()
+        for i in range(numslots):
+            obj.setslotvalue(i, i) # some extra slots too, sometimes
+        # test that eventually attributes are really just stored in a dictionary
+        for i in range(1000):
+            obj.setdictvalue(space, str(i), i)
+        # moved to dict (which is the remaining non-slot item)
+        assert len(obj.storage) == 1 + numslots
+        assert isinstance(obj.getdict(space).dstrategy, UnicodeDictStrategy)
+
+        for i in range(1000):
+            assert obj.getdictvalue(space, str(i)) == i
+        for i in range(numslots):
+            assert obj.getslotvalue(i) == i # check extra slots
+
+    # this doesn't happen with slots
+    cls = Class()
+    obj = cls.instantiate()
+    for i in range(1000):
+        obj.setslotvalue(i, i)
+    assert len(obj.storage) == 1000
+
+    for i in range(1000):
+        assert obj.getslotvalue(i) == i
+
 def test_insert_different_orders():
     cls = Class()
     obj = cls.instantiate()
@@ -797,7 +825,6 @@
         assert d == {}
 
     def test_change_class_slots(self):
-        skip("not supported by pypy yet")
         class A(object):
             __slots__ = ["x", "y"]
 
@@ -815,7 +842,6 @@
         assert isinstance(a, B)
 
     def test_change_class_slots_dict(self):
-        skip("not supported by pypy yet")
         class A(object):
             __slots__ = ["x", "__dict__"]
         class B(object):
@@ -843,7 +869,7 @@
         assert a.y == 2
         d = a.__dict__
         d[1] = 3
-        assert d == {"x": 1, "y": 2, 1:3}
+        assert d == {"y": 2, 1: 3}
         a.__class__ = B
         assert a.x == 1
         assert a.y == 2
diff --git a/rpython/annotator/bookkeeper.py b/rpython/annotator/bookkeeper.py
--- a/rpython/annotator/bookkeeper.py
+++ b/rpython/annotator/bookkeeper.py
@@ -71,6 +71,7 @@
 
         self.needs_generic_instantiate = {}
         self.thread_local_fields = set()
+        self.memory_pressure_types = set()
 
         self.register_builtins()
 
diff --git a/rpython/jit/codewriter/support.py b/rpython/jit/codewriter/support.py


More information about the pypy-commit mailing list