[pypy-commit] pypy NonConstant: hg merge default

rlamy noreply at buildbot.pypy.org
Sat Feb 15 15:49:12 CET 2014


Author: Ronan Lamy <ronan.lamy at gmail.com>
Branch: NonConstant
Changeset: r69146:f61dac5d3b9e
Date: 2014-02-15 14:48 +0000
http://bitbucket.org/pypy/pypy/changeset/f61dac5d3b9e/

Log:	hg merge default

diff --git a/include/PyPy.h b/include/PyPy.h
new file mode 100644
--- /dev/null
+++ b/include/PyPy.h
@@ -0,0 +1,54 @@
+#ifndef _PYPY_H_
+#define _PYPY_H_
+
+/* This header is meant to be included in programs that use PyPy as an
+   embedded library. */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* You should call this first once. */
+void rpython_startup_code(void);
+
+
+/* Initialize the home directory of PyPy.  It is necessary to call this.
+
+   Call it with "home" being the file name of the libpypy.so, for
+   example; it will be used as a starting point when searching for the
+   lib-python and lib_pypy directories.  They are searched from
+   "home/..", "home/../..", etc.  Returns 0 if everything was fine.  If
+   an error occurs, returns 1 and (if verbose != 0) prints some
+   information to stderr.
+ */
+int pypy_setup_home(char *home, int verbose);
+
+
+/* If your program has multiple threads, then you need to call
+   pypy_init_threads() once at init time, and then pypy_thread_attach()
+   once in each other thread that just started and in which you want to
+   run Python code (including via callbacks, see below).
+ */
+void pypy_init_threads(void);
+void pypy_thread_attach(void);
+
+
+/* The main entry point: executes "source" as plain Python code.
+   Returns 0 if everything was fine.  If a Python exception is
+   uncaught, it is printed to stderr and 1 is returned.
+
+   Usually, the Python code from "source" should use cffi to fill in
+   global variables of "function pointer" type in your program.  Use
+   cffi callbacks to do so.  Once it is done, there is no need to call
+   pypy_execute_source() any more: from C, you call directly the
+   functions (which are "callbacks" from the point of view of Python).
+ */
+int pypy_execute_source(char *source);
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst
--- a/pypy/doc/faq.rst
+++ b/pypy/doc/faq.rst
@@ -103,8 +103,7 @@
 
 .. _`extension modules`: cpython_differences.html#extension-modules
 .. _`cpython differences`: cpython_differences.html
-.. _`compatibility wiki`:
-.. https://bitbucket.org/pypy/compatibility/wiki/Home
+.. _`compatibility wiki`: https://bitbucket.org/pypy/compatibility/wiki/Home
 .. _cffi: http://cffi.readthedocs.org/
 
 ---------------------------------
diff --git a/pypy/doc/project-ideas.rst b/pypy/doc/project-ideas.rst
--- a/pypy/doc/project-ideas.rst
+++ b/pypy/doc/project-ideas.rst
@@ -142,32 +142,17 @@
 
 * `hg`
 
-Experiment (again) with LLVM backend for RPython compilation
-------------------------------------------------------------
-
-We already tried working with LLVM and at the time, LLVM was not mature enough
-for our needs. It's possible that this has changed, reviving the LLVM backend
-(or writing new from scratch) for static compilation would be a good project.
-
-(On the other hand, just generating C code and using clang might be enough.
-The issue with that is the so-called "asmgcc GC root finder", which has tons
-of issues of this own.  In my opinion (arigo), it would be definitely a
-better project to try to optimize the alternative, the "shadowstack" GC root
-finder, which is nicely portable.  So far it gives a pypy that is around
-7% slower.)
-
 Embedding PyPy
 ----------------------------------------
 
 Note: there is a basic proof-of-concept for that as a `uwsgi pypy plugin`_
 
 Being able to embed PyPy, say with its own limited C API, would be
-useful.  But here is the most interesting variant, straight from
-EuroPython live discussion :-)  We can have a generic "libpypy.so" that
-can be used as a placeholder dynamic library, and when it gets loaded,
-it runs a .py module that installs (via ctypes) the interface it wants
-exported.  This would give us a one-size-fits-all generic .so file to be
-imported by any application that wants to load .so files :-)
+useful.  But there is a possibly better variant: use CFFI.  With some
+minimal tools atop CFFI, it would be possible to write a pure Python
+library, and then compile automatically from it an .so/.dll file that is
+a dynamic-link library with whatever C API we want.  This gives us a
+one-size-fits-all generic way to make .so/.dll files from Python.
 
 .. _`uwsgi pypy plugin`: http://uwsgi-docs.readthedocs.org/en/latest/PyPy.html
 
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -57,3 +57,13 @@
 mapdicts keep track of whether or not an attribute is every assigned to
 multiple times. If it's only assigned once then an elidable lookup is used when
 possible.
+
+.. branch: precompiled-headers
+Create a Makefile using precompiled headers for MSVC platforms.
+The downside is a messy nmake-compatible Makefile. Since gcc shows minimal
+speedup, it was not implemented.
+
+.. branch: camelot
+With a properly configured 256-color terminal (TERM=...-256color), the
+Mandelbrot set shown during translation now uses a range of 50 colours.
+Essential!
diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py
--- a/pypy/goal/targetpypystandalone.py
+++ b/pypy/goal/targetpypystandalone.py
@@ -90,9 +90,10 @@
     return f
     """)
 
-    @entrypoint('main', [rffi.CCHARP, lltype.Signed], c_name='pypy_setup_home')
+    @entrypoint('main', [rffi.CCHARP, rffi.INT], c_name='pypy_setup_home')
     def pypy_setup_home(ll_home, verbose):
         from pypy.module.sys.initpath import pypy_find_stdlib
+        verbose = rffi.cast(lltype.Signed, verbose)
         if ll_home:
             home = rffi.charp2str(ll_home)
         else:
@@ -120,7 +121,8 @@
     @entrypoint('main', [rffi.CCHARP], c_name='pypy_execute_source')
     def pypy_execute_source(ll_source):
         source = rffi.charp2str(ll_source)
-        return _pypy_execute_source(source)
+        res = _pypy_execute_source(source)
+        return rffi.cast(rffi.INT, res)
 
     @entrypoint('main', [], c_name='pypy_init_threads')
     def pypy_init_threads():
diff --git a/pypy/interpreter/test/test_targetpypy.py b/pypy/interpreter/test/test_targetpypy.py
--- a/pypy/interpreter/test/test_targetpypy.py
+++ b/pypy/interpreter/test/test_targetpypy.py
@@ -12,8 +12,10 @@
     _, d = create_entry_point(space, None)
     execute_source = d['pypy_execute_source']
     lls = rffi.str2charp("import sys; sys.modules['xyz'] = 3")
-    execute_source(lls)
+    res = execute_source(lls)
     lltype.free(lls, flavor='raw')
+    assert lltype.typeOf(res) == rffi.INT
+    assert rffi.cast(lltype.Signed, res) == 0
     x = space.int_w(space.getitem(space.getattr(space.builtin_modules['sys'],
                                                 space.wrap('modules')),
                                                 space.wrap('xyz')))
@@ -24,5 +26,5 @@
     # did not crash - the same globals
     pypy_setup_home = d['pypy_setup_home']
     lls = rffi.str2charp(__file__)
-    pypy_setup_home(lls, 1)
+    pypy_setup_home(lls, rffi.cast(rffi.INT, 1))
     lltype.free(lls, flavor='raw')
diff --git a/pypy/module/__pypy__/__init__.py b/pypy/module/__pypy__/__init__.py
--- a/pypy/module/__pypy__/__init__.py
+++ b/pypy/module/__pypy__/__init__.py
@@ -81,6 +81,7 @@
         'newdict'                   : 'interp_dict.newdict',
         'dictstrategy'              : 'interp_dict.dictstrategy',
         'set_debug'                 : 'interp_magic.set_debug',
+        'locals_to_fast'            : 'interp_magic.locals_to_fast',
     }
     if sys.platform == 'win32':
         interpleveldefs['get_console_cp'] = 'interp_magic.get_console_cp'
diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py
--- a/pypy/module/__pypy__/interp_magic.py
+++ b/pypy/module/__pypy__/interp_magic.py
@@ -1,5 +1,6 @@
 from pypy.interpreter.error import OperationError, wrap_oserror
 from pypy.interpreter.gateway import unwrap_spec
+from pypy.interpreter.pyframe import PyFrame
 from rpython.rlib.objectmodel import we_are_translated
 from pypy.objspace.std.listobject import W_ListObject
 from pypy.objspace.std.typeobject import MethodCache
@@ -111,3 +112,8 @@
 @unwrap_spec(estimate=int)
 def add_memory_pressure(estimate):
     rgc.add_memory_pressure(estimate)
+
+ at unwrap_spec(w_frame=PyFrame)
+def locals_to_fast(space, w_frame):
+    assert isinstance(w_frame, PyFrame)
+    w_frame.locals2fast()
diff --git a/pypy/module/__pypy__/test/test_locals2fast.py b/pypy/module/__pypy__/test/test_locals2fast.py
new file mode 100644
--- /dev/null
+++ b/pypy/module/__pypy__/test/test_locals2fast.py
@@ -0,0 +1,81 @@
+# Tests from Fabio Zadrozny
+
+
+class AppTestLocals2Fast:
+    """
+    Test setting locals in one function from another function
+    using several approaches.
+    """
+
+    def setup_class(cls):
+        cls.w_save_locals = cls.space.appexec([], """():
+            import sys
+            if '__pypy__' in sys.builtin_module_names:
+                import __pypy__
+                save_locals = __pypy__.locals_to_fast
+            else:
+                # CPython version
+                import ctypes
+                @staticmethod
+                def save_locals(frame):
+                    ctypes.pythonapi.PyFrame_LocalsToFast(
+                        ctypes.py_object(frame), ctypes.c_int(0))
+            return save_locals
+        """)
+
+    def test_set_locals_using_save_locals(self):
+        import sys
+        def use_save_locals(name, value):
+            frame = sys._getframe().f_back
+            locals_dict = frame.f_locals
+            locals_dict[name] = value
+            self.save_locals(frame)
+        def test_method(fn):
+            x = 1
+            # The method 'fn' should attempt to set x = 2 in the current frame.
+            fn('x', 2)
+            return x
+        x = test_method(use_save_locals)
+        assert x == 2
+
+    def test_frame_simple_change(self):
+        import sys
+        frame = sys._getframe()
+        a = 20
+        frame.f_locals['a'] = 50
+        self.save_locals(frame)
+        assert a == 50
+
+    def test_frame_co_freevars(self):
+        import sys
+        outer_var = 20
+        def func():
+            frame = sys._getframe()
+            frame.f_locals['outer_var'] = 50
+            self.save_locals(frame)
+            assert outer_var == 50
+        func()
+
+    def test_frame_co_cellvars(self):
+        import sys
+        def check_co_vars(a):
+            frame = sys._getframe()
+            def function2():
+                print a
+            assert 'a' in frame.f_code.co_cellvars
+            frame = sys._getframe()
+            frame.f_locals['a'] = 50
+            self.save_locals(frame)
+            assert a == 50
+        check_co_vars(1)
+
+    def test_frame_change_in_inner_frame(self):
+        import sys
+        def change(f):
+            assert f is not sys._getframe()
+            f.f_locals['a'] = 50
+            self.save_locals(f)
+        frame = sys._getframe()
+        a = 20
+        change(frame)
+        assert a == 50
diff --git a/pypy/module/bz2/interp_bz2.py b/pypy/module/bz2/interp_bz2.py
--- a/pypy/module/bz2/interp_bz2.py
+++ b/pypy/module/bz2/interp_bz2.py
@@ -31,7 +31,7 @@
     _compilation_info_ = eci
     calling_conv = 'c'
 
-    CHECK_LIBRARY = platform.Has('dump("x", (int)&BZ2_bzCompress)')
+    CHECK_LIBRARY = platform.Has('dump("x", (long)&BZ2_bzCompress)')
 
     off_t = platform.SimpleType("off_t", rffi.LONGLONG)
     size_t = platform.SimpleType("size_t", rffi.ULONG)
diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py
--- a/pypy/module/cpyext/api.py
+++ b/pypy/module/cpyext/api.py
@@ -910,6 +910,8 @@
     # implement function callbacks and generate function decls
     functions = []
     pypy_decls = []
+    pypy_decls.append("#ifndef _PYPY_PYPY_DECL_H\n")
+    pypy_decls.append("#define _PYPY_PYPY_DECL_H\n")
     pypy_decls.append("#ifndef PYPY_STANDALONE\n")
     pypy_decls.append("#ifdef __cplusplus")
     pypy_decls.append("extern \"C\" {")
@@ -953,6 +955,7 @@
     pypy_decls.append("}")
     pypy_decls.append("#endif")
     pypy_decls.append("#endif /*PYPY_STANDALONE*/\n")
+    pypy_decls.append("#endif /*_PYPY_PYPY_DECL_H*/\n")
 
     pypy_decl_h = udir.join('pypy_decl.h')
     pypy_decl_h.write('\n'.join(pypy_decls))
diff --git a/pypy/module/cpyext/include/bufferobject.h b/pypy/module/cpyext/include/bufferobject.h
--- a/pypy/module/cpyext/include/bufferobject.h
+++ b/pypy/module/cpyext/include/bufferobject.h
@@ -37,7 +37,7 @@
 
 PyObject* PyBuffer_New(Py_ssize_t size);
 
-void _Py_init_bufferobject(void);
+PyTypeObject *_Py_get_buffer_type(void);
 
 #ifdef __cplusplus
 }
diff --git a/pypy/module/cpyext/include/pycapsule.h b/pypy/module/cpyext/include/pycapsule.h
--- a/pypy/module/cpyext/include/pycapsule.h
+++ b/pypy/module/cpyext/include/pycapsule.h
@@ -50,7 +50,7 @@
 
 PyAPI_FUNC(void *) PyCapsule_Import(const char *name, int no_block);
 
-void _Py_init_capsule(void);
+PyTypeObject *_Py_get_capsule_type(void);
 
 #ifdef __cplusplus
 }
diff --git a/pypy/module/cpyext/include/pycobject.h b/pypy/module/cpyext/include/pycobject.h
--- a/pypy/module/cpyext/include/pycobject.h
+++ b/pypy/module/cpyext/include/pycobject.h
@@ -48,7 +48,7 @@
 } PyCObject;
 #endif
 
-void _Py_init_pycobject(void);
+PyTypeObject *_Py_get_cobject_type(void);
  
 #ifdef __cplusplus
 }
diff --git a/pypy/module/pypyjit/test_pypy_c/test_misc.py b/pypy/module/pypyjit/test_pypy_c/test_misc.py
--- a/pypy/module/pypyjit/test_pypy_c/test_misc.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_misc.py
@@ -333,8 +333,8 @@
         loop, = log.loops_by_id("struct")
         if sys.maxint == 2 ** 63 - 1:
             extra = """
-            i8 = int_lt(i4, -2147483648)
-            guard_false(i8, descr=...)
+            i8 = int_ge(i4, -2147483648)
+            guard_true(i8, descr=...)
             """
         else:
             extra = ""
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -1,3 +1,10 @@
+"""The builtin dict implementation"""
+
+from rpython.rlib import jit, rerased
+from rpython.rlib.debug import mark_dict_non_null
+from rpython.rlib.objectmodel import newlist_hint, r_dict, specialize
+from rpython.tool.sourcetools import func_renamer, func_with_new_name
+
 from pypy.interpreter.baseobjspace import W_Root
 from pypy.interpreter.error import OperationError, oefmt
 from pypy.interpreter.gateway import (
@@ -7,18 +14,10 @@
 from pypy.objspace.std.stdtypedef import StdTypeDef
 from pypy.objspace.std.util import negate
 
-from rpython.rlib import jit, rerased
-from rpython.rlib.debug import mark_dict_non_null
-from rpython.rlib.objectmodel import newlist_hint, r_dict, specialize
-from rpython.tool.sourcetools import func_renamer, func_with_new_name
-
 
 UNROLL_CUTOFF = 5
 
 
-def _is_str(space, w_key):
-    return space.is_w(space.type(w_key), space.w_str)
-
 def _never_equal_to_string(space, w_lookup_type):
     """Handles the case of a non string key lookup.
     Types that have a sane hash/eq function should allow us to return True
@@ -29,8 +28,8 @@
     return (space.is_w(w_lookup_type, space.w_NoneType) or
             space.is_w(w_lookup_type, space.w_int) or
             space.is_w(w_lookup_type, space.w_bool) or
-            space.is_w(w_lookup_type, space.w_float)
-            )
+            space.is_w(w_lookup_type, space.w_float))
+
 
 @specialize.call_location()
 def w_dict_unrolling_heuristic(w_dct):
@@ -69,19 +68,18 @@
             w_type = space.w_dict
 
         storage = strategy.get_empty_storage()
-        w_self = space.allocate_instance(W_DictMultiObject, w_type)
-        W_DictMultiObject.__init__(w_self, space, strategy, storage)
-        return w_self
+        w_obj = space.allocate_instance(W_DictMultiObject, w_type)
+        W_DictMultiObject.__init__(w_obj, space, strategy, storage)
+        return w_obj
 
     def __init__(self, space, strategy, storage):
         self.space = space
         self.strategy = strategy
         self.dstorage = storage
 
-    def __repr__(w_self):
+    def __repr__(self):
         """representation for debugging purposes"""
-        #print('XXXXXXX', w_self.dstorage)
-        return "%s(%s)" % (w_self.__class__.__name__, w_self.strategy)
+        return "%s(%s)" % (self.__class__.__name__, self.strategy)
 
     def unwrap(w_dict, space):
         result = {}
@@ -98,9 +96,9 @@
                 return space.get_and_call_function(w_missing, w_dict, w_key)
         return None
 
-    def initialize_content(w_self, list_pairs_w):
+    def initialize_content(self, list_pairs_w):
         for w_k, w_v in list_pairs_w:
-            w_self.setitem(w_k, w_v)
+            self.setitem(w_k, w_v)
 
     def setitem_str(self, key, w_value):
         self.strategy.setitem_str(self, key, w_value)
@@ -115,7 +113,8 @@
         if w_fill is None:
             w_fill = space.w_None
         if space.is_w(w_type, space.w_dict):
-            w_dict = W_DictMultiObject.allocate_and_init_instance(space, w_type)
+            w_dict = W_DictMultiObject.allocate_and_init_instance(space,
+                                                                  w_type)
 
             byteslist = space.listview_bytes(w_keys)
             if byteslist is not None:
@@ -312,8 +311,7 @@
         try:
             w_key, w_value = self.popitem()
         except KeyError:
-            raise OperationError(space.w_KeyError,
-                                 space.wrap("popitem(): dictionary is empty"))
+            raise oefmt(space.w_KeyError, "popitem(): dictionary is empty")
         return space.newtuple([w_key, w_value])
 
     @unwrap_spec(w_default=WrappedDefault(None))
@@ -597,6 +595,7 @@
     def getiterkeys(self, w_dict):
         return iter([None])
     getitervalues = getiterkeys
+
     def getiteritems(self, w_dict):
         return iter([(None, None)])
 
@@ -615,8 +614,8 @@
         space = self.space
         if self.len != self.dictimplementation.length():
             self.len = -1   # Make this error state sticky
-            msg = "dictionary changed size during iteration"
-            raise OperationError(space.w_RuntimeError, space.wrap(msg))
+            raise oefmt(space.w_RuntimeError,
+                        "dictionary changed size during iteration")
 
         # look for the next entry
         if self.pos < self.len:
@@ -635,14 +634,15 @@
                 w_value = self.dictimplementation.getitem(w_key)
                 if w_value is None:
                     self.len = -1   # Make this error state sticky
-                    msg = "dictionary changed during iteration"
-                    raise OperationError(space.w_RuntimeError, space.wrap(msg))
+                    raise oefmt(space.w_RuntimeError,
+                                "dictionary changed during iteration")
                 return (w_key, w_value)
         # no more entries
         self.dictimplementation = None
         return EMPTY
     return func_with_new_name(next, 'next_' + TP)
 
+
 class BaseIteratorImplementation(object):
     def __init__(self, space, strategy, implementation):
         self.space = space
@@ -665,13 +665,14 @@
 class BaseItemIterator(BaseIteratorImplementation):
     next_item = _new_next('item')
 
+
 def create_iterator_classes(dictimpl, override_next_item=None):
     if not hasattr(dictimpl, 'wrapkey'):
-        wrapkey = lambda space, key : key
+        wrapkey = lambda space, key: key
     else:
         wrapkey = dictimpl.wrapkey.im_func
     if not hasattr(dictimpl, 'wrapvalue'):
-        wrapvalue = lambda space, key : key
+        wrapvalue = lambda space, key: key
     else:
         wrapvalue = dictimpl.wrapvalue.im_func
 
@@ -800,7 +801,8 @@
             return w_dict.getitem(w_key)
 
     def w_keys(self, w_dict):
-        l = [self.wrap(key) for key in self.unerase(w_dict.dstorage).iterkeys()]
+        l = [self.wrap(key)
+             for key in self.unerase(w_dict.dstorage).iterkeys()]
         return self.space.newlist(l)
 
     def values(self, w_dict):
@@ -1036,7 +1038,8 @@
     def wrapkey(space, key):
         return space.wrap(key)
 
-    # XXX there is no space.newlist_int yet to implement w_keys more efficiently
+    # XXX there is no space.newlist_int yet to implement w_keys more
+    # efficiently
 
 create_iterator_classes(IntDictStrategy)
 
@@ -1071,8 +1074,7 @@
     for w_pair in data_w:
         pair = space.fixedview(w_pair)
         if len(pair) != 2:
-            raise OperationError(space.w_ValueError,
-                         space.wrap("sequence of pairs expected"))
+            raise oefmt(space.w_ValueError, "sequence of pairs expected")
         w_key, w_value = pair
         w_dict.setitem(w_key, w_value)
 
@@ -1128,9 +1130,9 @@
 
     ignore_for_isinstance_cache = True
 
-    def __init__(w_self, space, iteratorimplementation):
-        w_self.space = space
-        w_self.iteratorimplementation = iteratorimplementation
+    def __init__(self, space, iteratorimplementation):
+        self.space = space
+        self.iteratorimplementation = iteratorimplementation
 
     def descr_iter(self, space):
         return self
@@ -1158,9 +1160,8 @@
         new_inst = mod.get('dictiter_surrogate_new')
         w_typeobj = space.type(self)
 
-        raise OperationError(
-            space.w_TypeError,
-            space.wrap("can't pickle dictionary-keyiterator objects"))
+        raise oefmt(space.w_TypeError,
+                    "can't pickle dictionary-keyiterator objects")
         # XXXXXX get that working again
 
         # we cannot call __init__ since we don't have the original dict
@@ -1174,8 +1175,8 @@
             w_clone = space.allocate_instance(W_DictMultiIterItemsObject,
                                               w_typeobj)
         else:
-            msg = "unsupported dictiter type '%s' during pickling" % (self,)
-            raise OperationError(space.w_TypeError, space.wrap(msg))
+            raise oefmt(space.w_TypeError,
+                        "unsupported dictiter type '%R' during pickling", self)
         w_clone.space = space
         w_clone.content = self.content
         w_clone.len = self.len
@@ -1244,8 +1245,8 @@
 # Views
 
 class W_DictViewObject(W_Root):
-    def __init__(w_self, space, w_dict):
-        w_self.w_dict = w_dict
+    def __init__(self, space, w_dict):
+        self.w_dict = w_dict
 
     def descr_repr(self, space):
         w_seq = space.call_function(space.w_list, self)
diff --git a/rpython/annotator/annrpython.py b/rpython/annotator/annrpython.py
--- a/rpython/annotator/annrpython.py
+++ b/rpython/annotator/annrpython.py
@@ -582,18 +582,18 @@
 
     def consider_op(self, block, opindex):
         op = block.operations[opindex]
-        argcells = [self.binding(a) for a in op.args]
+        try:
+            argcells = [self.binding(a) for a in op.args]
 
-        # let's be careful about avoiding propagated SomeImpossibleValues
-        # to enter an op; the latter can result in violations of the
-        # more general results invariant: e.g. if SomeImpossibleValue enters is_
-        #  is_(SomeImpossibleValue, None) -> SomeBool
-        #  is_(SomeInstance(not None), None) -> SomeBool(const=False) ...
-        # boom -- in the assert of setbinding()
-        for arg in argcells:
-            if isinstance(arg, annmodel.SomeImpossibleValue):
-                raise BlockedInference(self, op, opindex)
-        try:
+            # let's be careful about avoiding propagated SomeImpossibleValues
+            # to enter an op; the latter can result in violations of the
+            # more general results invariant: e.g. if SomeImpossibleValue enters is_
+            #  is_(SomeImpossibleValue, None) -> SomeBool
+            #  is_(SomeInstance(not None), None) -> SomeBool(const=False) ...
+            # boom -- in the assert of setbinding()
+            for arg in argcells:
+                if isinstance(arg, annmodel.SomeImpossibleValue):
+                    raise BlockedInference(self, op, opindex)
             resultcell = op.consider(self, *argcells)
         except annmodel.AnnotatorError as e: # note that UnionError is a subclass
             graph = self.bookkeeper.position_key[0]
diff --git a/rpython/annotator/test/test_annrpython.py b/rpython/annotator/test/test_annrpython.py
--- a/rpython/annotator/test/test_annrpython.py
+++ b/rpython/annotator/test/test_annrpython.py
@@ -4139,6 +4139,16 @@
             a.build_types(f, [str])
         assert ("Cannot prove that the object is callable" in exc.value.msg)
 
+    def test_UnionError_on_PBC(self):
+        l = ['a', 1]
+        def f(x):
+            l.append(x)
+        a = self.RPythonAnnotator()
+        with py.test.raises(annmodel.UnionError) as excinfo:
+            a.build_types(f, [int])
+        assert 'Happened at file' in excinfo.value.source
+        assert 'Known variable annotations:' in excinfo.value.source
+
     def test_str_format_error(self):
         def f(s, x):
             return s.format(x)
diff --git a/rpython/jit/codewriter/longlong.py b/rpython/jit/codewriter/longlong.py
--- a/rpython/jit/codewriter/longlong.py
+++ b/rpython/jit/codewriter/longlong.py
@@ -26,6 +26,7 @@
     getrealfloat    = lambda x: x
     gethash         = compute_hash
     gethash_fast    = longlong2float.float2longlong
+    extract_bits    = longlong2float.float2longlong
     is_longlong     = lambda TYPE: False
 
     # -------------------------------------
@@ -42,6 +43,7 @@
     getrealfloat    = longlong2float.longlong2float
     gethash         = lambda xll: rarithmetic.intmask(xll - (xll >> 32))
     gethash_fast    = gethash
+    extract_bits    = lambda x: x
     is_longlong     = lambda TYPE: (TYPE is lltype.SignedLongLong or
                                     TYPE is lltype.UnsignedLongLong)
 
diff --git a/rpython/jit/metainterp/history.py b/rpython/jit/metainterp/history.py
--- a/rpython/jit/metainterp/history.py
+++ b/rpython/jit/metainterp/history.py
@@ -275,7 +275,12 @@
 
     def same_constant(self, other):
         if isinstance(other, ConstFloat):
-            return self.value == other.value
+            # careful in this comparison: if self.value and other.value
+            # are both NaN, stored as regular floats (i.e. on 64-bit),
+            # then just using "==" would say False: two NaNs are always
+            # different from each other.
+            return (longlong.extract_bits(self.value) ==
+                    longlong.extract_bits(other.value))
         return False
 
     def nonnull(self):
diff --git a/rpython/jit/metainterp/pyjitpl.py b/rpython/jit/metainterp/pyjitpl.py
--- a/rpython/jit/metainterp/pyjitpl.py
+++ b/rpython/jit/metainterp/pyjitpl.py
@@ -594,11 +594,9 @@
         if tobox is not None:
             # sanity check: see whether the current struct value
             # corresponds to what the cache thinks the value is
-            # XXX pypy with the following check fails on micronumpy,
-            # XXX investigate
-            #resbox = executor.execute(self.metainterp.cpu, self.metainterp,
-            #                          rop.GETFIELD_GC, fielddescr, box)
-            #assert resbox.constbox().same_constant(tobox.constbox())
+            resbox = executor.execute(self.metainterp.cpu, self.metainterp,
+                                      rop.GETFIELD_GC, fielddescr, box)
+            assert resbox.constbox().same_constant(tobox.constbox())
             return tobox
         resbox = self.execute_with_descr(opnum, fielddescr, box)
         self.metainterp.heapcache.getfield_now_known(box, fielddescr, resbox)
diff --git a/rpython/jit/metainterp/test/test_history.py b/rpython/jit/metainterp/test/test_history.py
--- a/rpython/jit/metainterp/test/test_history.py
+++ b/rpython/jit/metainterp/test/test_history.py
@@ -1,5 +1,8 @@
 from rpython.jit.metainterp.history import *
 from rpython.rtyper.lltypesystem import lltype, llmemory, rffi
+from rpython.rlib.rfloat import NAN, INFINITY
+from rpython.jit.codewriter import longlong
+from rpython.translator.c.test.test_standalone import StandaloneTests
 
 
 def test_repr():
@@ -38,3 +41,36 @@
     assert not c3a.same_constant(c1b)
     assert not c3a.same_constant(c2b)
     assert     c3a.same_constant(c3b)
+
+def test_same_constant_float():
+    c1 = Const._new(12.34)
+    c2 = Const._new(12.34)
+    c3 = Const._new(NAN)
+    c4 = Const._new(NAN)
+    c5 = Const._new(INFINITY)
+    c6 = Const._new(INFINITY)
+    assert c1.same_constant(c2)
+    assert c3.same_constant(c4)
+    assert c5.same_constant(c6)
+    assert not c1.same_constant(c4)
+    assert not c1.same_constant(c6)
+    assert not c3.same_constant(c2)
+    assert not c3.same_constant(c6)
+    assert not c5.same_constant(c2)
+    assert not c5.same_constant(c4)
+
+
+class TestZTranslated(StandaloneTests):
+    def test_ztranslated_same_constant_float(self):
+        def fn(args):
+            n = INFINITY
+            c1 = ConstFloat(longlong.getfloatstorage(n - INFINITY))
+            c2 = ConstFloat(longlong.getfloatstorage(n - INFINITY))
+            c3 = ConstFloat(longlong.getfloatstorage(12.34))
+            if c1.same_constant(c2):
+                print "ok!"
+            return 0
+
+        t, cbuilder = self.compile(fn)
+        data = cbuilder.cmdexec('')
+        assert "ok!\n" in data
diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py
--- a/rpython/memory/gc/incminimark.py
+++ b/rpython/memory/gc/incminimark.py
@@ -1837,6 +1837,11 @@
                 #
                 if self.objects_with_finalizers.non_empty():
                     self.deal_with_objects_with_finalizers()
+                elif self.old_objects_with_weakrefs.non_empty():
+                    # Weakref support: clear the weak pointers to dying objects
+                    # (if we call deal_with_objects_with_finalizers(), it will
+                    # invoke invalidate_old_weakrefs() itself directly)
+                    self.invalidate_old_weakrefs()
 
                 ll_assert(not self.objects_to_trace.non_empty(),
                           "objects_to_trace should be empty")
@@ -1846,9 +1851,7 @@
                 self.more_objects_to_trace.delete()
 
                 #
-                # Weakref support: clear the weak pointers to dying objects
-                if self.old_objects_with_weakrefs.non_empty():
-                    self.invalidate_old_weakrefs()
+                # Light finalizers
                 if self.old_objects_with_light_finalizers.non_empty():
                     self.deal_with_old_objects_with_finalizers()
                 #objects_to_trace processed fully, can move on to sweeping
@@ -2206,6 +2209,12 @@
                     self._recursively_bump_finalization_state_from_2_to_3(y)
             self._recursively_bump_finalization_state_from_1_to_2(x)
 
+        # Clear the weak pointers to dying objects.  Also clears them if
+        # they point to objects which have the GCFLAG_FINALIZATION_ORDERING
+        # bit set here.  These are objects which will be added to
+        # run_finalizers().
+        self.invalidate_old_weakrefs()
+
         while marked.non_empty():
             x = marked.popleft()
             state = self._finalization_state(x)
@@ -2333,7 +2342,9 @@
             ll_assert((self.header(pointing_to).tid & GCFLAG_NO_HEAP_PTRS)
                       == 0, "registered old weakref should not "
                             "point to a NO_HEAP_PTRS obj")
-            if self.header(pointing_to).tid & GCFLAG_VISITED:
+            tid = self.header(pointing_to).tid
+            if ((tid & (GCFLAG_VISITED | GCFLAG_FINALIZATION_ORDERING)) ==
+                        GCFLAG_VISITED):
                 new_with_weakref.append(obj)
             else:
                 (obj + offset).address[0] = llmemory.NULL
diff --git a/rpython/memory/test/gc_test_base.py b/rpython/memory/test/gc_test_base.py
--- a/rpython/memory/test/gc_test_base.py
+++ b/rpython/memory/test/gc_test_base.py
@@ -29,6 +29,7 @@
     GC_CAN_SHRINK_ARRAY = False
     GC_CAN_SHRINK_BIG_ARRAY = False
     BUT_HOW_BIG_IS_A_BIG_STRING = 3*WORD
+    WREF_IS_INVALID_BEFORE_DEL_IS_CALLED = False
 
     def setup_class(cls):
         cls._saved_logstate = py.log._getstate()
@@ -370,15 +371,23 @@
         class A(object):
             count = 0
         a = A()
+        expected_invalid = self.WREF_IS_INVALID_BEFORE_DEL_IS_CALLED
         class B(object):
             def __del__(self):
                 # when __del__ is called, the weakref to myself is still valid
-                # in RPython (at least with most GCs; this test might be
-                # skipped for specific GCs)
-                if self.ref() is self:
-                    a.count += 10  # ok
+                # in RPython with most GCs.  However, this can lead to strange
+                # bugs with incminimark.  https://bugs.pypy.org/issue1687
+                # So with incminimark, we expect the opposite.
+                if expected_invalid:
+                    if self.ref() is None:
+                        a.count += 10  # ok
+                    else:
+                        a.count = 666  # not ok
                 else:
-                    a.count = 666  # not ok
+                    if self.ref() is self:
+                        a.count += 10  # ok
+                    else:
+                        a.count = 666  # not ok
         def g():
             b = B()
             ref = weakref.ref(b)
diff --git a/rpython/memory/test/test_incminimark_gc.py b/rpython/memory/test/test_incminimark_gc.py
--- a/rpython/memory/test/test_incminimark_gc.py
+++ b/rpython/memory/test/test_incminimark_gc.py
@@ -1,6 +1,38 @@
-from rpython.rlib.rarithmetic import LONG_BIT
+from rpython.rtyper.lltypesystem import lltype
+from rpython.rtyper.lltypesystem.lloperation import llop
 
 from rpython.memory.test import test_minimark_gc
 
 class TestIncrementalMiniMarkGC(test_minimark_gc.TestMiniMarkGC):
     from rpython.memory.gc.incminimark import IncrementalMiniMarkGC as GCClass
+    WREF_IS_INVALID_BEFORE_DEL_IS_CALLED = True
+
+    def test_weakref_not_in_stack(self):
+        import weakref
+        class A(object):
+            pass
+        class B(object):
+            def __init__(self, next):
+                self.next = next
+        def g():
+            a = A()
+            a.x = 5
+            wr = weakref.ref(a)
+            llop.gc__collect(lltype.Void)   # make everything old
+            assert wr() is not None
+            assert a.x == 5
+            return wr
+        def f():
+            ref = g()
+            llop.gc__collect(lltype.Void, 1)    # start a major cycle
+            # at this point the stack is scanned, and the weakref points
+            # to an object not found, but still reachable:
+            b = ref()
+            llop.debug_print(lltype.Void, b)
+            assert b is not None
+            llop.gc__collect(lltype.Void)   # finish the major cycle
+            # assert does not crash, because 'b' is still kept alive
+            b.x = 42
+            return ref() is b
+        res = self.interpret(f, [])
+        assert res == True
diff --git a/rpython/rlib/objectmodel.py b/rpython/rlib/objectmodel.py
--- a/rpython/rlib/objectmodel.py
+++ b/rpython/rlib/objectmodel.py
@@ -366,6 +366,9 @@
 
     def compute_result_annotation(self, s_l, s_sizehint):
         from rpython.annotator import model as annmodel
+        if annmodel.s_None.contains(s_l):
+            return   # first argument is only None so far, but we
+                     # expect a generalization later
         if not isinstance(s_l, annmodel.SomeList):
             raise annmodel.AnnotatorError("First argument must be a list")
         if not isinstance(s_sizehint, annmodel.SomeInteger):
diff --git a/rpython/rlib/rfile.py b/rpython/rlib/rfile.py
--- a/rpython/rlib/rfile.py
+++ b/rpython/rlib/rfile.py
@@ -29,9 +29,9 @@
 OFF_T = CC['off_t']
 c_open = llexternal('fopen', [rffi.CCHARP, rffi.CCHARP], lltype.Ptr(FILE))
 c_close = llexternal('fclose', [lltype.Ptr(FILE)], rffi.INT)
-c_write = llexternal('fwrite', [rffi.CCHARP, rffi.SIZE_T, rffi.SIZE_T,
+c_fwrite = llexternal('fwrite', [rffi.CCHARP, rffi.SIZE_T, rffi.SIZE_T,
                                      lltype.Ptr(FILE)], rffi.SIZE_T)
-c_read = llexternal('fread', [rffi.CCHARP, rffi.SIZE_T, rffi.SIZE_T,
+c_fread = llexternal('fread', [rffi.CCHARP, rffi.SIZE_T, rffi.SIZE_T,
                                    lltype.Ptr(FILE)], rffi.SIZE_T)
 c_feof = llexternal('feof', [lltype.Ptr(FILE)], rffi.INT)
 c_ferror = llexternal('ferror', [lltype.Ptr(FILE)], rffi.INT)
@@ -40,13 +40,16 @@
                           rffi.INT)
 c_tmpfile = llexternal('tmpfile', [], lltype.Ptr(FILE))
 c_fileno = llexternal('fileno', [lltype.Ptr(FILE)], rffi.INT)
-c_ftell = llexternal('ftell', [lltype.Ptr(FILE)], lltype.Signed)
+c_ftell = llexternal('ftell', [lltype.Ptr(FILE)], rffi.LONG)
 c_fflush = llexternal('fflush', [lltype.Ptr(FILE)], rffi.INT)
 c_ftruncate = llexternal('ftruncate', [rffi.INT, OFF_T], rffi.INT, macro=True)
 
 c_fgets = llexternal('fgets', [rffi.CCHARP, rffi.INT, lltype.Ptr(FILE)],
                      rffi.CCHARP)
 
+c_popen = llexternal('popen', [rffi.CCHARP, rffi.CCHARP], lltype.Ptr(FILE))
+c_pclose = llexternal('pclose', [lltype.Ptr(FILE)], rffi.INT)
+
 BASE_BUF_SIZE = 4096
 BASE_LINE_SIZE = 100
 
@@ -75,6 +78,21 @@
         raise OSError(errno, os.strerror(errno))
     return RFile(res)
 
+def create_popen_file(command, type):
+    ll_command = rffi.str2charp(command)
+    try:
+        ll_type = rffi.str2charp(type)
+        try:
+            ll_f = c_popen(ll_command, ll_type)
+            if not ll_f:
+                errno = rposix.get_errno()
+                raise OSError(errno, os.strerror(errno))
+        finally:
+            lltype.free(ll_type, flavor='raw')
+    finally:
+        lltype.free(ll_command, flavor='raw')
+    return RPopenFile(ll_f)
+
 class RFile(object):
     def __init__(self, ll_file):
         self.ll_file = ll_file
@@ -89,30 +107,26 @@
         try:
             # note that since we got a nonmoving buffer, it is either raw
             # or already cannot move, so the arithmetics below are fine
-            total_bytes = 0
-            ll_current = ll_value
-            while total_bytes < len(value):
-                bytes = c_write(ll_current, 1, len(value) - r_uint(total_bytes),
-                                ll_file)
-                if bytes == 0:
-                    errno = rposix.get_errno()
-                    raise OSError(errno, os.strerror(errno))
-                total_bytes += bytes
-                ll_current = rffi.cast(rffi.CCHARP,
-                                       rffi.cast(lltype.Unsigned, ll_value) +
-                                       total_bytes)
+            length = len(value)
+            bytes = c_fwrite(ll_value, 1, length, ll_file)
+            if bytes != length:
+                errno = rposix.get_errno()
+                raise OSError(errno, os.strerror(errno))
         finally:
             rffi.free_nonmovingbuffer(value, ll_value)
 
     def close(self):
-        if self.ll_file:
+        ll_f = self.ll_file
+        if ll_f:
             # double close is allowed
-            res = c_close(self.ll_file)
             self.ll_file = lltype.nullptr(FILE)
+            res = self._do_close(ll_f)
             if res == -1:
                 errno = rposix.get_errno()
                 raise OSError(errno, os.strerror(errno))
 
+    _do_close = staticmethod(c_close)    # overridden in RPopenFile
+
     def read(self, size=-1):
         # XXX CPython uses a more delicate logic here
         ll_file = self.ll_file
@@ -124,27 +138,25 @@
             try:
                 s = StringBuilder()
                 while True:
-                    returned_size = c_read(buf, 1, BASE_BUF_SIZE, ll_file)
+                    returned_size = c_fread(buf, 1, BASE_BUF_SIZE, ll_file)
+                    returned_size = intmask(returned_size)  # is between 0 and BASE_BUF_SIZE
                     if returned_size == 0:
                         if c_feof(ll_file):
                             # ok, finished
                             return s.build()
-                        errno = c_ferror(ll_file)
-                        c_clearerror(ll_file)
-                        raise OSError(errno, os.strerror(errno))
+                        raise _error(ll_file)
                     s.append_charpsize(buf, returned_size)
             finally:
                 lltype.free(buf, flavor='raw')
         else:
             raw_buf, gc_buf = rffi.alloc_buffer(size)
             try:
-                returned_size = c_read(raw_buf, 1, size, ll_file)
+                returned_size = c_fread(raw_buf, 1, size, ll_file)
+                returned_size = intmask(returned_size)  # is between 0 and size
                 if returned_size == 0:
                     if not c_feof(ll_file):
-                        errno = c_ferror(ll_file)
-                        raise OSError(errno, os.strerror(errno))
-                s = rffi.str_from_buffer(raw_buf, gc_buf, size,
-                                         rffi.cast(lltype.Signed, returned_size))
+                        raise _error(ll_file)
+                s = rffi.str_from_buffer(raw_buf, gc_buf, size, returned_size)
             finally:
                 rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
             return s
@@ -200,8 +212,7 @@
         if not result:
             if c_feof(self.ll_file):   # ok
                 return 0
-            errno = c_ferror(self.ll_file)
-            raise OSError(errno, os.strerror(errno))
+            raise _error(self.ll_file)
         #
         # Assume that fgets() works as documented, and additionally
         # never writes beyond the final \0, which the CPython
@@ -240,3 +251,13 @@
             finally:
                 rffi.keep_buffer_alive_until_here(raw_buf, gc_buf)
         raise ValueError("I/O operation on closed file")
+
+
+class RPopenFile(RFile):
+    _do_close = staticmethod(c_pclose)
+
+
+def _error(ll_file):
+    errno = c_ferror(ll_file)
+    c_clearerror(ll_file)
+    raise OSError(errno, os.strerror(errno))
diff --git a/rpython/rlib/rgc.py b/rpython/rlib/rgc.py
--- a/rpython/rlib/rgc.py
+++ b/rpython/rlib/rgc.py
@@ -15,7 +15,6 @@
 
 def set_max_heap_size(nbytes):
     """Limit the heap size to n bytes.
-    So far only implemented by the Boehm GC and the semispace/generation GCs.
     """
     pass
 
diff --git a/rpython/rlib/test/test_rfile.py b/rpython/rlib/test/test_rfile.py
--- a/rpython/rlib/test/test_rfile.py
+++ b/rpython/rlib/test/test_rfile.py
@@ -1,5 +1,5 @@
 
-import os
+import os, sys, py
 from rpython.rtyper.test.tool import BaseRtypingTest
 from rpython.tool.udir import udir
 from rpython.rlib import rfile
@@ -142,6 +142,15 @@
         cls.tmpdir = udir.join('test_rfile_direct')
         cls.tmpdir.ensure(dir=True)
 
+    def test_read_a_lot(self):
+        fname = str(self.tmpdir.join('file_read_a_lot'))
+        with open(fname, 'w') as f:
+            f.write('dupa' * 999)
+        f = rfile.create_file(fname, 'r')
+        s = f.read()
+        assert s == 'dupa' * 999
+        f.close()
+
     def test_readline(self):
         fname = str(self.tmpdir.join('file_readline'))
         j = 0
@@ -175,3 +184,15 @@
             got = f.readline()
             assert got == ''
             f.close()
+
+
+class TestPopen:
+    def setup_class(cls):
+        if sys.platform == 'win32':
+            py.test.skip("not for win32")
+
+    def test_popen(self):
+        f = rfile.create_popen_file("python -c 'print 42'", "r")
+        s = f.read()
+        f.close()
+        assert s == '42\n'
diff --git a/rpython/rlib/test/test_rpoll.py b/rpython/rlib/test/test_rpoll.py
--- a/rpython/rlib/test/test_rpoll.py
+++ b/rpython/rlib/test/test_rpoll.py
@@ -1,5 +1,6 @@
 import os
 import errno
+import py
 
 from rpython.rlib.rsocket import *
 from rpython.rlib.rpoll import *
@@ -55,6 +56,8 @@
     serv.close()
 
 def test_select():
+    if os.name == 'nt':
+        py.test.skip('cannot select on file handles on windows')
     def f():
         readend, writeend = os.pipe()
         try:
@@ -72,6 +75,8 @@
     interpret(f, [])
 
 def test_select_timeout():
+    if os.name == 'nt':
+        py.test.skip('cannot select on file handles on windows')
     from time import time
     def f():
         # once there was a bug where the sleeping time was doubled
diff --git a/rpython/tool/ansi_mandelbrot.py b/rpython/tool/ansi_mandelbrot.py
--- a/rpython/tool/ansi_mandelbrot.py
+++ b/rpython/tool/ansi_mandelbrot.py
@@ -14,8 +14,12 @@
 """
 
 
-palette = [39, 34, 35, 36, 31, 33, 32, 37]
-
+import os
+if os.environ.get('TERM', 'dumb').find('256') > 0:
+    from ansiramp import ansi_ramp80
+    palette = map(lambda x: "38;5;%d" % x, ansi_ramp80)
+else:
+    palette = [39, 34, 35, 36, 31, 33, 32, 37]
 
 colour_range = None # used for debugging
 
diff --git a/rpython/tool/ansiramp.py b/rpython/tool/ansiramp.py
new file mode 100755
--- /dev/null
+++ b/rpython/tool/ansiramp.py
@@ -0,0 +1,29 @@
+#! /usr/bin/env python
+import colorsys
+
+def hsv2ansi(h, s, v):
+    # h: 0..1, s/v: 0..1
+    if s < 0.1:
+        return int(v * 23) + 232
+    r, g, b = map(lambda x: int(x * 5), colorsys.hsv_to_rgb(h, s, v))
+    return 16 + (r * 36) + (g * 6) + b
+
+def ramp_idx(i, num):
+    assert num > 0
+    i0 = float(i) / num
+    h = 0.57 + i0
+    s = 1 - pow(i0,3)
+    v = 1
+    return hsv2ansi(h, s, v)
+
+def ansi_ramp(num):
+    return [ramp_idx(i, num) for i in range(num)]
+
+ansi_ramp80 = ansi_ramp(80)
+
+if __name__ == '__main__':
+    import sys
+    from py.io import ansi_print
+    colors = int(sys.argv[1]) if len(sys.argv) > 1 else 80
+    for col in range(colors):
+        ansi_print('#', "38;5;%d" % ramp_idx(col, colors), newline=False, flush=True)
diff --git a/rpython/tool/sourcetools.py b/rpython/tool/sourcetools.py
--- a/rpython/tool/sourcetools.py
+++ b/rpython/tool/sourcetools.py
@@ -6,7 +6,6 @@
 # XXX We should try to generalize and single out one approach to dynamic
 # XXX code compilation.
 
-import types
 import sys, os, inspect, new
 import py
 
@@ -296,40 +295,3 @@
     result.func_defaults = f.func_defaults
     result.func_dict.update(f.func_dict)
     return result
-
-
-def _convert_const_maybe(x, encoding):
-    if isinstance(x, str):
-        return x.decode(encoding)
-    elif isinstance(x, tuple):
-        items = [_convert_const_maybe(item, encoding) for item in x]
-        return tuple(items)
-    return x
-
-def with_unicode_literals(fn=None, **kwds):
-    """Decorator that replace all string literals with unicode literals.
-    Similar to 'from __future__ import string literals' at function level.
-    Useful to limit changes in the py3k branch.
-    """
-    encoding = kwds.pop('encoding', 'ascii')
-    if kwds:
-        raise TypeError("Unexpected keyword argument(s): %s" % ', '.join(kwds.keys()))
-    def decorator(fn):
-        co = fn.func_code
-        new_consts = []
-        for const in co.co_consts:
-            new_consts.append(_convert_const_maybe(const, encoding))
-        new_consts = tuple(new_consts)
-        new_code = types.CodeType(co.co_argcount, co.co_nlocals, co.co_stacksize,
-                                  co.co_flags, co.co_code, new_consts, co.co_names,
-                                  co.co_varnames, co.co_filename, co.co_name,
-                                  co.co_firstlineno, co.co_lnotab)
-        fn.func_code = new_code
-        return fn
-    #
-    # support the usage of @with_unicode_literals instead of @with_unicode_literals()
-    if fn is not None:
-        assert type(fn) is types.FunctionType
-        return decorator(fn)
-    else:
-        return decorator
diff --git a/rpython/tool/test/test_sourcetools.py b/rpython/tool/test/test_sourcetools.py
--- a/rpython/tool/test/test_sourcetools.py
+++ b/rpython/tool/test/test_sourcetools.py
@@ -1,7 +1,5 @@
-# -*- encoding: utf-8 -*-
-import py
 from rpython.tool.sourcetools import (
-    func_with_new_name, func_renamer, rpython_wrapper, with_unicode_literals)
+    func_renamer, func_with_new_name, rpython_wrapper)
 
 def test_rename():
     def f(x, y=5):
@@ -57,30 +55,3 @@
         ('decorated', 40, 2),
         ('bar', 40, 2),
         ]
-
-        
-def test_with_unicode_literals():
-    @with_unicode_literals()
-    def foo():
-        return 'hello'
-    assert type(foo()) is unicode
-    #
-    @with_unicode_literals
-    def foo():
-        return 'hello'
-    assert type(foo()) is unicode
-    #
-    def foo():
-        return 'hello àèì'
-    py.test.raises(UnicodeDecodeError, "with_unicode_literals(foo)")
-    #
-    @with_unicode_literals(encoding='utf-8')
-    def foo():
-        return 'hello àèì'
-    assert foo() == u'hello àèì'
-    #
-    @with_unicode_literals
-    def foo():
-        return ('a', 'b')
-    assert type(foo()[0]) is unicode
-
diff --git a/rpython/translator/c/dlltool.py b/rpython/translator/c/dlltool.py
--- a/rpython/translator/c/dlltool.py
+++ b/rpython/translator/c/dlltool.py
@@ -21,7 +21,8 @@
             entrypoints.append(getfunctionptr(graph))
         return entrypoints
 
-    def gen_makefile(self, targetdir, exe_name=None):
+    def gen_makefile(self, targetdir, exe_name=None,
+                    headers_to_precompile=[]):
         pass # XXX finish
 
     def compile(self):
@@ -30,6 +31,8 @@
         extsymeci = ExternalCompilationInfo(export_symbols=export_symbols)
         self.eci = self.eci.merge(extsymeci)
         files = [self.c_source_filename] + self.extrafiles
+        files += self.eventually_copy(self.eci.separate_module_files)
+        self.eci.separate_module_files = ()
         oname = self.name
         self.so_name = self.translator.platform.compile(files, self.eci,
                                                         standalone=False,
diff --git a/rpython/translator/c/genc.py b/rpython/translator/c/genc.py
--- a/rpython/translator/c/genc.py
+++ b/rpython/translator/c/genc.py
@@ -260,12 +260,13 @@
                 defines['PYPY_MAIN_FUNCTION'] = "pypy_main_startup"
                 self.eci = self.eci.merge(ExternalCompilationInfo(
                     export_symbols=["pypy_main_startup", "pypy_debug_file"]))
-        self.eci, cfile, extra = gen_source(db, modulename, targetdir,
-                                            self.eci, defines=defines,
-                                            split=self.split)
+        self.eci, cfile, extra, headers_to_precompile = \
+                gen_source(db, modulename, targetdir,
+                           self.eci, defines=defines, split=self.split)
         self.c_source_filename = py.path.local(cfile)
         self.extrafiles = self.eventually_copy(extra)
-        self.gen_makefile(targetdir, exe_name=exe_name)
+        self.gen_makefile(targetdir, exe_name=exe_name,
+                          headers_to_precompile=headers_to_precompile)
         return cfile
 
     def eventually_copy(self, cfiles):
@@ -375,18 +376,22 @@
         self._compiled = True
         return self.executable_name
 
-    def gen_makefile(self, targetdir, exe_name=None):
-        cfiles = [self.c_source_filename] + self.extrafiles
+    def gen_makefile(self, targetdir, exe_name=None, headers_to_precompile=[]):
+        module_files = self.eventually_copy(self.eci.separate_module_files)
+        self.eci.separate_module_files = []
+        cfiles = [self.c_source_filename] + self.extrafiles + list(module_files)
         if exe_name is not None:
             exe_name = targetdir.join(exe_name)
         mk = self.translator.platform.gen_makefile(
             cfiles, self.eci,
             path=targetdir, exe_name=exe_name,
+            headers_to_precompile=headers_to_precompile,
+            no_precompile_cfiles = module_files,
             shared=self.config.translation.shared)
 
         if self.has_profopt():
             profopt = self.config.translation.profopt
-            mk.definition('ABS_TARGET', '$(shell python -c "import sys,os; print os.path.abspath(sys.argv[1])" $(TARGET))')
+            mk.definition('ABS_TARGET', str(targetdir.join('$(TARGET)')))
             mk.definition('DEFAULT_TARGET', 'profopt')
             mk.definition('PROFOPT', profopt)
 
@@ -397,7 +402,7 @@
             ('debug_exc', '', '$(MAKE) CFLAGS="$(DEBUGFLAGS) -DRPY_ASSERT -DDO_LOG_EXC" debug_target'),
             ('debug_mem', '', '$(MAKE) CFLAGS="$(DEBUGFLAGS) -DRPY_ASSERT -DPYPY_USE_TRIVIAL_MALLOC" debug_target'),
             ('no_obmalloc', '', '$(MAKE) CFLAGS="-g -O2 -DRPY_ASSERT -DPYPY_NO_OBMALLOC" $(TARGET)'),
-            ('linuxmemchk', '', '$(MAKE) CFLAGS="$(DEBUGFLAGS) -DRPY_ASSERT -DPPY_USE_LINUXMEMCHK" debug_target'),
+            ('linuxmemchk', '', '$(MAKE) CFLAGS="$(DEBUGFLAGS) -DRPY_ASSERT -DPYPY_USE_LINUXMEMCHK" debug_target'),
             ('llsafer', '', '$(MAKE) CFLAGS="-O2 -DRPY_LL_ASSERT" $(TARGET)'),
             ('lldebug', '', '$(MAKE) CFLAGS="$(DEBUGFLAGS) -DRPY_ASSERT -DRPY_LL_ASSERT" debug_target'),
             ('lldebug0','', '$(MAKE) CFLAGS="-O0 $(DEBUGFLAGS) -DRPY_ASSERT -DRPY_LL_ASSERT" debug_target'),
@@ -427,8 +432,8 @@
             mk.definition('ASMFILES', sfiles)
             mk.definition('ASMLBLFILES', lblsfiles)
             mk.definition('GCMAPFILES', gcmapfiles)
-            if sys.platform == 'win32':
-                mk.definition('DEBUGFLAGS', '/MD /Zi')
+            if self.translator.platform.name == 'msvc':
+                mk.definition('DEBUGFLAGS', '-MD -Zi')
             else:
                 if self.config.translation.shared:
                     mk.definition('DEBUGFLAGS', '-O2 -fomit-frame-pointer -g -fPIC')
@@ -484,11 +489,11 @@
                 mk.rule('.PRECIOUS', '%.s', "# don't remove .s files if Ctrl-C'ed")
 
         else:
-            if sys.platform == 'win32':
-                mk.definition('DEBUGFLAGS', '/MD /Zi')
+            if self.translator.platform.name == 'msvc':
+                mk.definition('DEBUGFLAGS', '-MD -Zi')
             else:
                 mk.definition('DEBUGFLAGS', '-O1 -g')
-        if sys.platform == 'win32':
+        if self.translator.platform.name == 'msvc':
             mk.rule('debug_target', 'debugmode_$(DEFAULT_TARGET)', 'rem')
         else:
             mk.rule('debug_target', '$(TARGET)', '#')
@@ -511,6 +516,7 @@
     def __init__(self, database):
         self.database = database
         self.extrafiles = []
+        self.headers_to_precompile = []
         self.path = None
         self.namespace = NameManager()
 
@@ -539,6 +545,8 @@
         filepath = self.path.join(name)
         if name.endswith('.c'):
             self.extrafiles.append(filepath)
+        if name.endswith('.h'):
+            self.headers_to_precompile.append(filepath)
         return filepath.open('w')
 
     def getextrafiles(self):
@@ -686,11 +694,11 @@
                     print >> fc, '/***********************************************************/'
                     print >> fc, '/***  Implementations                                    ***/'
                     print >> fc
-                    print >> fc, '#define PYPY_FILE_NAME "%s"' % name
                     print >> fc, '#include "common_header.h"'
                     print >> fc, '#include "structdef.h"'
                     print >> fc, '#include "forwarddecl.h"'
                     print >> fc, '#include "preimpl.h"'
+                    print >> fc, '#define PYPY_FILE_NAME "%s"' % name
                     print >> fc, '#include "src/g_include.h"'
                     print >> fc
                 print >> fc, MARKER
@@ -732,12 +740,14 @@
     print >> f, "#endif"
 
 def gen_preimpl(f, database):
+    f.write('#ifndef _PY_PREIMPLE_H\n#define _PY_PREIMPL_H\n')
     if database.translator is None or database.translator.rtyper is None:
         return
     preimplementationlines = pre_include_code_lines(
         database, database.translator.rtyper)
     for line in preimplementationlines:
         print >> f, line
+    f.write('#endif /* _PY_PREIMPL_H */\n')    
 
 def gen_startupcode(f, database):
     # generate the start-up code and put it into a function
@@ -799,6 +809,7 @@
     f = filename.open('w')
     incfilename = targetdir.join('common_header.h')
     fi = incfilename.open('w')
+    fi.write('#ifndef _PY_COMMON_HEADER_H\n#define _PY_COMMON_HEADER_H\n')
 
     #
     # Header
@@ -811,6 +822,7 @@
 
     eci.write_c_header(fi)
     print >> fi, '#include "src/g_prerequisite.h"'
+    fi.write('#endif /* _PY_COMMON_HEADER_H*/\n')
 
     fi.close()
 
@@ -822,6 +834,8 @@
     sg.set_strategy(targetdir, split)
     database.prepare_inline_helpers()
     sg.gen_readable_parts_of_source(f)
+    headers_to_precompile = sg.headers_to_precompile[:]
+    headers_to_precompile.insert(0, incfilename)
 
     gen_startupcode(f, database)
     f.close()
@@ -834,5 +848,4 @@
 
     eci = add_extra_files(eci)
     eci = eci.convert_sources_to_files()
-    files, eci = eci.get_module_files()
-    return eci, filename, sg.getextrafiles() + list(files)
+    return eci, filename, sg.getextrafiles(), headers_to_precompile
diff --git a/rpython/translator/c/test/test_extfunc.py b/rpython/translator/c/test/test_extfunc.py
--- a/rpython/translator/c/test/test_extfunc.py
+++ b/rpython/translator/c/test/test_extfunc.py
@@ -65,7 +65,8 @@
 
     f1 = compile(does_stuff, [])
     f1()
-    assert open(filename, 'r').read() == "hello world\n"
+    with open(filename, 'r') as fid:
+        assert fid.read() == "hello world\n"
     os.unlink(filename)
 
 def test_big_read():
@@ -296,8 +297,10 @@
         os.chdir(path)
         return os.getcwd()
     f1 = compile(does_stuff, [str])
-    # different on windows please
-    assert f1('/tmp') == os.path.realpath('/tmp')
+    if os.name == 'nt':
+        assert f1(os.environment['TEMP']) == os.path.realpath(os.environment['TEMP'])
+    else:    
+        assert f1('/tmp') == os.path.realpath('/tmp')
 
 def test_mkdir_rmdir():
     def does_stuff(path, delete):
diff --git a/rpython/translator/c/test/test_newgc.py b/rpython/translator/c/test/test_newgc.py
--- a/rpython/translator/c/test/test_newgc.py
+++ b/rpython/translator/c/test/test_newgc.py
@@ -658,7 +658,8 @@
 
     def test_open_read_write_seek_close(self):
         self.run('open_read_write_seek_close')
-        assert open(self.filename, 'r').read() == "hello world\n"
+        with open(self.filename, 'r') as fid:
+            assert fid.read() == "hello world\n"
         os.unlink(self.filename)
 
     def define_callback_with_collect(cls):
diff --git a/rpython/translator/platform/__init__.py b/rpython/translator/platform/__init__.py
--- a/rpython/translator/platform/__init__.py
+++ b/rpython/translator/platform/__init__.py
@@ -100,7 +100,8 @@
         return ExecutionResult(returncode, stdout, stderr)
 
     def gen_makefile(self, cfiles, eci, exe_name=None, path=None,
-                     shared=False):
+                     shared=False, headers_to_precompile=[],
+                     no_precompile_cfiles = []):
         raise NotImplementedError("Pure abstract baseclass")
 
     def __repr__(self):
diff --git a/rpython/translator/platform/darwin.py b/rpython/translator/platform/darwin.py
--- a/rpython/translator/platform/darwin.py
+++ b/rpython/translator/platform/darwin.py
@@ -50,14 +50,17 @@
         return ["-Wl,-exported_symbols_list,%s" % (response_file,)]
 
     def gen_makefile(self, cfiles, eci, exe_name=None, path=None,
-                     shared=False):
+                     shared=False, headers_to_precompile=[],
+                     no_precompile_cfiles = []):
         # ensure frameworks are passed in the Makefile
         fs = self._frameworks(eci.frameworks)
         if len(fs) > 0:
             # concat (-framework, FrameworkName) pairs
             self.extra_libs += tuple(map(" ".join, zip(fs[::2], fs[1::2])))
         mk = super(Darwin, self).gen_makefile(cfiles, eci, exe_name, path,
-                                              shared)
+                                shared=shared,
+                                headers_to_precompile=headers_to_precompile,
+                                no_precompile_cfiles = no_precompile_cfiles)
         return mk
 
 
diff --git a/rpython/translator/platform/posix.py b/rpython/translator/platform/posix.py
--- a/rpython/translator/platform/posix.py
+++ b/rpython/translator/platform/posix.py
@@ -83,7 +83,8 @@
         return [entry[2:] for entry in out.split()]
 
     def gen_makefile(self, cfiles, eci, exe_name=None, path=None,
-                     shared=False):
+                     shared=False, headers_to_precompile=[],
+                     no_precompile_cfiles = []):
         cfiles = self._all_cfiles(cfiles, eci)
 
         if path is None:
diff --git a/rpython/translator/platform/test/test_distutils.py b/rpython/translator/platform/test/test_distutils.py
--- a/rpython/translator/platform/test/test_distutils.py
+++ b/rpython/translator/platform/test/test_distutils.py
@@ -11,3 +11,7 @@
 
     def test_900_files(self):
         py.test.skip('Makefiles not suppoerted')
+
+    def test_precompiled_headers(self):
+        py.test.skip('Makefiles not suppoerted')
+
diff --git a/rpython/translator/platform/test/test_makefile.py b/rpython/translator/platform/test/test_makefile.py
--- a/rpython/translator/platform/test/test_makefile.py
+++ b/rpython/translator/platform/test/test_makefile.py
@@ -1,7 +1,10 @@
 
 from rpython.translator.platform.posix import GnuMakefile as Makefile
+from rpython.translator.platform import host
+from rpython.tool.udir import udir
+from rpython.translator.tool.cbuild import ExternalCompilationInfo
 from StringIO import StringIO
-import re
+import re, sys, py
 
 def test_simple_makefile():
     m = Makefile()
@@ -29,3 +32,112 @@
     val = s.getvalue()
     assert not re.search('CC += +xxx', val, re.M)
     assert re.search('CC += +yyy', val, re.M)    
+
+class TestMakefile(object):
+    platform = host
+    strict_on_stderr = True
+
+    def check_res(self, res, expected='42\n'):
+        assert res.out == expected
+        if self.strict_on_stderr:
+            assert res.err == ''
+        assert res.returncode == 0        
+    
+    def test_900_files(self):
+        txt = '#include <stdio.h>\n'
+        for i in range(900):
+            txt += 'int func%03d();\n' % i
+        txt += 'int main() {\n    int j=0;'    
+        for i in range(900):
+            txt += '    j += func%03d();\n' % i
+        txt += '    printf("%d\\n", j);\n'
+        txt += '    return 0;};\n'
+        cfile = udir.join('test_900_files.c')
+        cfile.write(txt)
+        cfiles = [cfile]
+        for i in range(900):
+            cfile2 = udir.join('implement%03d.c' %i)
+            cfile2.write('''
+                int func%03d()
+            {
+                return %d;
+            }
+            ''' % (i, i))
+            cfiles.append(cfile2)
+        mk = self.platform.gen_makefile(cfiles, ExternalCompilationInfo(), path=udir)
+        mk.write()
+        self.platform.execute_makefile(mk)
+        res = self.platform.execute(udir.join('test_900_files'))
+        self.check_res(res, '%d\n' %sum(range(900)))
+
+    def test_precompiled_headers(self):
+        if self.platform.cc != 'cl.exe':
+            py.test.skip("Only MSVC profits from precompiled headers")
+        import time
+        tmpdir = udir.join('precompiled_headers').ensure(dir=1)
+        # Create an eci that should not use precompiled headers
+        eci = ExternalCompilationInfo(include_dirs=[tmpdir])
+        main_c = tmpdir.join('main_no_pch.c')
+        eci.separate_module_files = [main_c]
+        ncfiles = 10
+        nprecompiled_headers = 20
+        txt = ''
+        for i in range(ncfiles):
+            txt += "int func%03d();\n" % i
+        txt += "\nint main(int argc, char * argv[])\n"
+        txt += "{\n   int i=0;\n"
+        for i in range(ncfiles):
+            txt += "   i += func%03d();\n" % i
+        txt += '    printf("%d\\n", i);\n'
+        txt += "   return 0;\n};\n"
+        main_c.write(txt)
+        # Create some large headers with dummy functions to be precompiled
+        cfiles_precompiled_headers = []
+        for i in range(nprecompiled_headers):
+            pch_name =tmpdir.join('pcheader%03d.h' % i)
+            txt = '#ifndef PCHEADER%03d_H\n#define PCHEADER%03d_H\n' %(i, i)
+            for j in range(3000):
+                txt += "int pcfunc%03d_%03d();\n" %(i, j)
+            txt += '#endif'
+            pch_name.write(txt)    
+            cfiles_precompiled_headers.append(pch_name)        
+        # Create some cfiles with headers we want precompiled
+        cfiles = []
+        for i in range(ncfiles):
+            c_name =tmpdir.join('implement%03d.c' % i)
+            txt = ''
+            for pch_name in cfiles_precompiled_headers:
+                txt += '#include "%s"\n' % pch_name
+            txt += "int func%03d(){ return %d;};\n" % (i, i)
+            c_name.write(txt)
+            cfiles.append(c_name)        
+        if sys.platform == 'win32':
+            clean = ('clean', '', 'for %f in ( $(OBJECTS) $(TARGET) ) do @if exist %f del /f %f')
+            get_time = time.clock
+        else:    
+            clean = ('clean', '', 'rm -f $(OBJECTS) $(TARGET) ')
+            get_time = time.time
+        #write a non-precompiled header makefile
+        mk = self.platform.gen_makefile(cfiles, eci, path=tmpdir)
+        mk.rule(*clean)
+        mk.write()
+        t0 = get_time()
+        self.platform.execute_makefile(mk)
+        t1 = get_time()
+        t_normal = t1 - t0
+        self.platform.execute_makefile(mk, extra_opts=['clean'])
+        # Write a super-duper makefile with precompiled headers
+        mk = self.platform.gen_makefile(cfiles, eci, path=tmpdir,
+                           headers_to_precompile=cfiles_precompiled_headers,)
+        mk.rule(*clean)
+        mk.write()
+        t0 = get_time()
+        self.platform.execute_makefile(mk)
+        t1 = get_time()
+        t_precompiled = t1 - t0
+        res = self.platform.execute(mk.exe_name)
+        self.check_res(res, '%d\n' %sum(range(ncfiles)))
+        print "precompiled haeder 'make' time %.2f, non-precompiled header time %.2f" %(t_precompiled, t_normal)
+        assert t_precompiled < t_normal * 0.5
+
+   
diff --git a/rpython/translator/platform/test/test_platform.py b/rpython/translator/platform/test/test_platform.py
--- a/rpython/translator/platform/test/test_platform.py
+++ b/rpython/translator/platform/test/test_platform.py
@@ -59,34 +59,6 @@
         res = self.platform.execute(executable)
         self.check_res(res)
 
-    def test_900_files(self):
-        txt = '#include <stdio.h>\n'
-        for i in range(900):
-            txt += 'int func%03d();\n' % i
-        txt += 'int main() {\n    int j=0;'    
-        for i in range(900):
-            txt += '    j += func%03d();\n' % i
-        txt += '    printf("%d\\n", j);\n'
-        txt += '    return 0;};\n'
-        cfile = udir.join('test_900_files.c')
-        cfile.write(txt)
-        cfiles = [cfile]
-        for i in range(900):
-            cfile2 = udir.join('implement%03d.c' %i)
-            cfile2.write('''
-                int func%03d()
-            {
-                return %d;
-            }
-            ''' % (i, i))
-            cfiles.append(cfile2)
-        mk = self.platform.gen_makefile(cfiles, ExternalCompilationInfo(), path=udir)
-        mk.write()
-        self.platform.execute_makefile(mk)
-        res = self.platform.execute(udir.join('test_900_files'))
-        self.check_res(res, '%d\n' %sum(range(900)))
-
-
     def test_nice_errors(self):
         cfile = udir.join('test_nice_errors.c')
         cfile.write('')
diff --git a/rpython/translator/platform/windows.py b/rpython/translator/platform/windows.py
--- a/rpython/translator/platform/windows.py
+++ b/rpython/translator/platform/windows.py
@@ -249,7 +249,8 @@
 
 
     def gen_makefile(self, cfiles, eci, exe_name=None, path=None,
-                     shared=False):
+                     shared=False, headers_to_precompile=[],
+                     no_precompile_cfiles = []):
         cfiles = self._all_cfiles(cfiles, eci)
 
         if path is None:
@@ -313,20 +314,60 @@
             ('CC_LINK', self.link),
             ('LINKFILES', eci.link_files),
             ('MASM', self.masm),
+            ('MAKE', 'nmake.exe'),
             ('_WIN32', '1'),
             ]
         if self.x64:
             definitions.append(('_WIN64', '1'))
 
+        rules = [
+            ('all', '$(DEFAULT_TARGET)', []),
+            ('.asm.obj', '', '$(MASM) /nologo /Fo$@ /c $< $(INCLUDEDIRS)'),
+            ]
+
+        if len(headers_to_precompile)>0:
+            stdafx_h = path.join('stdafx.h')
+            txt  = '#ifndef PYPY_STDAFX_H\n'
+            txt += '#define PYPY_STDAFX_H\n'
+            txt += '\n'.join(['#include "' + m.pathrel(c) + '"' for c in headers_to_precompile])
+            txt += '\n#endif\n'
+            stdafx_h.write(txt)
+            stdafx_c = path.join('stdafx.c')
+            stdafx_c.write('#include "stdafx.h"\n')
+            definitions.append(('CREATE_PCH', '/Ycstdafx.h /Fpstdafx.pch /FIstdafx.h'))
+            definitions.append(('USE_PCH', '/Yustdafx.h /Fpstdafx.pch /FIstdafx.h'))
+            rules.append(('$(OBJECTS)', 'stdafx.pch', []))
+            rules.append(('stdafx.pch', 'stdafx.h', 
+               '$(CC) stdafx.c /c /nologo $(CFLAGS) $(CFLAGSEXTRA) '
+               '$(CREATE_PCH) $(INCLUDEDIRS)'))
+            rules.append(('.c.obj', '', 
+                    '$(CC) /nologo $(CFLAGS) $(CFLAGSEXTRA) $(USE_PCH) '
+                    '/Fo$@ /c $< $(INCLUDEDIRS)'))
+            #Do not use precompiled headers for some files
+            #rules.append((r'{..\module_cache}.c{..\module_cache}.obj', '',
+            #        '$(CC) /nologo $(CFLAGS) $(CFLAGSEXTRA) /Fo$@ /c $< $(INCLUDEDIRS)'))
+            # nmake cannot handle wildcard target specifications, so we must
+            # create a rule for compiling each file from eci since they cannot use
+            # precompiled headers :(
+            no_precompile = []
+            for f in list(no_precompile_cfiles):
+                f = m.pathrel(py.path.local(f))
+                if f not in no_precompile and f.endswith('.c'):
+                    no_precompile.append(f)
+                    target = f[:-1] + 'obj'
+                    rules.append((target, f,
+                        '$(CC) /nologo $(CFLAGS) $(CFLAGSEXTRA) '
+                        '/Fo%s /c %s $(INCLUDEDIRS)' %(target, f)))
+
+        else:
+            rules.append(('.c.obj', '', 
+                          '$(CC) /nologo $(CFLAGS) $(CFLAGSEXTRA) '
+                          '/Fo$@ /c $< $(INCLUDEDIRS)'))
+
+
         for args in definitions:
             m.definition(*args)
 
-        rules = [
-            ('all', '$(DEFAULT_TARGET)', []),
-            ('.c.obj', '', '$(CC) /nologo $(CFLAGS) $(CFLAGSEXTRA) /Fo$@ /c $< $(INCLUDEDIRS)'),
-            ('.asm.obj', '', '$(MASM) /nologo /Fo$@ /c $< $(INCLUDEDIRS)'),
-            ]
-
         for rule in rules:
             m.rule(*rule)
         
@@ -371,7 +412,7 @@
                     'mt.exe -nologo -manifest $*.manifest -outputresource:$@;1',
                     ])
             m.rule('debugmode_$(DEFAULT_TARGET)', ['debugmode_$(TARGET)', 'main.obj'],
-                   ['$(CC_LINK) /nologo /DEBUG main.obj $(SHARED_IMPORT_LIB) /out:$@'
+                   ['$(CC_LINK) /nologo /DEBUG main.obj debugmode_$(SHARED_IMPORT_LIB) /out:$@'
                     ])
 
         return m
@@ -392,6 +433,25 @@
 
         self._handle_error(returncode, stdout, stderr, path.join('make'))
 
+class WinDefinition(posix.Definition):
+    def write(self, f):
+        def write_list(prefix, lst):
+            lst = lst or ['']
+            for i, fn in enumerate(lst):
+                print >> f, prefix, fn,
+                if i < len(lst)-1:
+                    print >> f, '\\'
+                else:
+                    print >> f
+                prefix = ' ' * len(prefix)
+        name, value = self.name, self.value
+        if isinstance(value, str):
+            f.write('%s = %s\n' % (name, value))
+        else:
+            write_list('%s =' % (name,), value)
+        f.write('\n')
+
+
 class NMakefile(posix.GnuMakefile):
     def write(self, out=None):
         # nmake expands macros when it parses rules.
@@ -410,6 +470,14 @@
         if out is None:
             f.close()
 
+    def definition(self, name, value):
+        defs = self.defs
+        defn = WinDefinition(name, value)
+        if name in defs:
+            self.lines[defs[name]] = defn
+        else:
+            defs[name] = len(self.lines)
+            self.lines.append(defn)
 
 class MingwPlatform(posix.BasePosix):
     name = 'mingw32'


More information about the pypy-commit mailing list