[pypy-svn] pypy 32ptr-on-64bit: merge default. Untested.

arigo commits-noreply at bitbucket.org
Thu Mar 24 09:07:25 CET 2011


Author: Armin Rigo <arigo at tunes.org>
Branch: 32ptr-on-64bit
Changeset: r42887:f70be93895ba
Date: 2011-03-22 17:29 +0100
http://bitbucket.org/pypy/pypy/changeset/f70be93895ba/

Log:	merge default. Untested.

diff --git a/py/_test/parseopt.py b/py/_test/parseopt.py
deleted file mode 100644
--- a/py/_test/parseopt.py
+++ /dev/null
@@ -1,112 +0,0 @@
-"""
-thin wrapper around Python's optparse.py  
-adding some extra checks and ways to systematically
-have Environment variables provide default values
-for options.  basic usage:
-
-   >>> parser = Parser()
-   >>> parser.addoption("--hello", action="store_true", dest="hello")
-   >>> option, args = parser.parse(['--hello'])
-   >>> option.hello 
-   True
-   >>> args
-   []
-    
-"""
-import py
-import optparse 
-
-class Parser:
-    """ Parser for command line arguments. """ 
-
-    def __init__(self, usage=None, processopt=None):
-        self._anonymous = OptionGroup("custom options", parser=self)
-        self._groups = []
-        self._processopt = processopt
-        self._usage = usage 
-        self.hints = []
-
-    def processoption(self, option):
-        if self._processopt:
-            if option.dest:
-                self._processopt(option)
-
-    def addnote(self, note):
-        self._notes.append(note)
-
-    def getgroup(self, name, description="", after=None):
-        for group in self._groups:
-            if group.name == name:
-                return group
-        group = OptionGroup(name, description, parser=self)
-        i = 0
-        for i, grp in enumerate(self._groups):
-            if grp.name == after:
-                break
-        self._groups.insert(i+1, group)
-        return group 
-
-    addgroup = getgroup 
-    def addgroup(self, name, description=""):
-        py.log._apiwarn("1.1", "use getgroup() which gets-or-creates")
-        return self.getgroup(name, description)
-
-    def addoption(self, *opts, **attrs):
-        """ add an optparse-style option. """
-        self._anonymous.addoption(*opts, **attrs)
-
-    def parse(self, args):
-        optparser = MyOptionParser(self)
-        groups = self._groups + [self._anonymous]
-        for group in groups:
-            if group.options:
-                desc = group.description or group.name 
-                optgroup = optparse.OptionGroup(optparser, desc)
-                optgroup.add_options(group.options)
-                optparser.add_option_group(optgroup)
-        return optparser.parse_args([str(x) for x in args])
-
-    def parse_setoption(self, args, option):
-        parsedoption, args = self.parse(args)
-        for name, value in parsedoption.__dict__.items():
-            setattr(option, name, value)
-        return args
-
-
-class OptionGroup:
-    def __init__(self, name, description="", parser=None):
-        self.name = name
-        self.description = description
-        self.options = []
-        self.parser = parser 
-
-    def addoption(self, *optnames, **attrs):
-        """ add an option to this group. """
-        option = optparse.Option(*optnames, **attrs)
-        self._addoption_instance(option, shortupper=False)
-
-    def _addoption(self, *optnames, **attrs):
-        option = optparse.Option(*optnames, **attrs)
-        self._addoption_instance(option, shortupper=True)
-
-    def _addoption_instance(self, option, shortupper=False):
-        if not shortupper:
-            for opt in option._short_opts:
-                if opt[0] == '-' and opt[1].islower(): 
-                    raise ValueError("lowercase shortoptions reserved")
-        if self.parser:
-            self.parser.processoption(option)
-        self.options.append(option)
-
-
-class MyOptionParser(optparse.OptionParser):
-    def __init__(self, parser):
-        self._parser = parser 
-        optparse.OptionParser.__init__(self, usage=parser._usage)
-    def format_epilog(self, formatter):
-        hints = self._parser.hints 
-        if hints:
-            s = "\n".join(["hint: " + x for x in hints]) + "\n"
-            s = "\n" + s + "\n"
-            return s
-        return ""

diff --git a/pypy/doc/config/objspace.std.optimized_int_add.txt b/pypy/doc/config/objspace.std.optimized_int_add.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.optimized_int_add.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Optimize the addition of two integers a bit. Enabling this option gives small
-speedups.

diff --git a/py/_test/pycollect.py b/py/_test/pycollect.py
deleted file mode 100644
--- a/py/_test/pycollect.py
+++ /dev/null
@@ -1,399 +0,0 @@
-"""
-Python related collection nodes.  
-""" 
-import py
-import inspect
-from py._test.collect import configproperty, warnoldcollect
-from py._test import funcargs
-from py._code.code import TerminalRepr
-
-class PyobjMixin(object):
-    def obj(): 
-        def fget(self):
-            try: 
-                return self._obj   
-            except AttributeError: 
-                self._obj = obj = self._getobj() 
-                return obj 
-        def fset(self, value): 
-            self._obj = value 
-        return property(fget, fset, None, "underlying python object") 
-    obj = obj()
-
-    def _getobj(self):
-        return getattr(self.parent.obj, self.name)
-
-    def getmodpath(self, stopatmodule=True, includemodule=False):
-        """ return python path relative to the containing module. """
-        chain = self.listchain()
-        chain.reverse()
-        parts = []
-        for node in chain:
-            if isinstance(node, Instance):
-                continue
-            name = node.name 
-            if isinstance(node, Module):
-                assert name.endswith(".py")
-                name = name[:-3]
-                if stopatmodule:
-                    if includemodule:
-                        parts.append(name)
-                    break
-            parts.append(name)
-        parts.reverse()
-        s = ".".join(parts)
-        return s.replace(".[", "[")
-
-    def _getfslineno(self):
-        try:
-            return self._fslineno
-        except AttributeError:
-            pass
-        obj = self.obj
-        # xxx let decorators etc specify a sane ordering
-        if hasattr(obj, 'place_as'):
-            obj = obj.place_as
-
-        self._fslineno = py.code.getfslineno(obj)
-        return self._fslineno
-
-    def reportinfo(self):
-        fspath, lineno = self._getfslineno()
-        modpath = self.getmodpath()
-        return fspath, lineno, modpath 
-
-class PyCollectorMixin(PyobjMixin, py.test.collect.Collector): 
-    Class = configproperty('Class')
-    Instance = configproperty('Instance')
-    Function = configproperty('Function')
-    Generator = configproperty('Generator')
-    
-    def funcnamefilter(self, name): 
-        return name.startswith('test') 
-    def classnamefilter(self, name): 
-        return name.startswith('Test')
-
-    def collect(self):
-        l = self._deprecated_collect()
-        if l is not None:
-            return l
-        # NB. we avoid random getattrs and peek in the __dict__ instead
-        dicts = [getattr(self.obj, '__dict__', {})]
-        for basecls in inspect.getmro(self.obj.__class__):
-            dicts.append(basecls.__dict__)
-        seen = {}
-        l = []
-        for dic in dicts:
-            for name, obj in dic.items():
-                if name in seen:
-                    continue
-                seen[name] = True
-                if name[0] != "_":
-                    res = self.makeitem(name, obj)
-                    if res is None:
-                        continue
-                    if not isinstance(res, list):
-                        res = [res]
-                    l.extend(res)
-        l.sort(key=lambda item: item.reportinfo()[:2])
-        return l
-
-    def _deprecated_join(self, name):
-        if self.__class__.join != py.test.collect.Collector.join:
-            warnoldcollect()
-            return self.join(name)
-
-    def makeitem(self, name, obj):
-        return self.ihook.pytest_pycollect_makeitem(
-            collector=self, name=name, obj=obj)
-
-    def _istestclasscandidate(self, name, obj):
-        if self.classnamefilter(name) and \
-           inspect.isclass(obj):
-            if hasinit(obj):
-                # XXX WARN 
-                return False
-            return True
-
-    def _genfunctions(self, name, funcobj):
-        module = self.getparent(Module).obj
-        clscol = self.getparent(Class)
-        cls = clscol and clscol.obj or None
-        metafunc = funcargs.Metafunc(funcobj, config=self.config, 
-            cls=cls, module=module)
-        gentesthook = self.config.hook.pytest_generate_tests
-        plugins = funcargs.getplugins(self, withpy=True)
-        gentesthook.pcall(plugins, metafunc=metafunc)
-        if not metafunc._calls:
-            return self.Function(name, parent=self)
-        l = []
-        for callspec in metafunc._calls:
-            subname = "%s[%s]" %(name, callspec.id)
-            function = self.Function(name=subname, parent=self, 
-                callspec=callspec, callobj=funcobj)
-            l.append(function)
-        return l
-        
-class Module(py.test.collect.File, PyCollectorMixin):
-    def _getobj(self):
-        return self._memoizedcall('_obj', self._importtestmodule)
-
-    def _importtestmodule(self):
-        # we assume we are only called once per module 
-        mod = self.fspath.pyimport()
-        #print "imported test module", mod
-        self.config.pluginmanager.consider_module(mod)
-        return mod
-
-    def setup(self): 
-        if getattr(self.obj, 'disabled', 0):
-            py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, "
-                "use pytestmark=..., see pytest_skipping plugin" % (self.obj,))
-            py.test.skip("%r is disabled" %(self.obj,))
-        if hasattr(self.obj, 'setup_module'): 
-            #XXX: nose compat hack, move to nose plugin
-            # if it takes a positional arg, its probably a py.test style one
-            # so we pass the current module object
-            if inspect.getargspec(self.obj.setup_module)[0]:
-                self.obj.setup_module(self.obj)
-            else:
-                self.obj.setup_module()
-
-    def teardown(self): 
-        if hasattr(self.obj, 'teardown_module'): 
-            #XXX: nose compat hack, move to nose plugin
-            # if it takes a positional arg, its probably a py.test style one
-            # so we pass the current module object
-            if inspect.getargspec(self.obj.teardown_module)[0]:
-                self.obj.teardown_module(self.obj)
-            else:
-                self.obj.teardown_module()
-
-class Class(PyCollectorMixin, py.test.collect.Collector): 
-
-    def collect(self):
-        l = self._deprecated_collect()
-        if l is not None:
-            return l
-        return [self.Instance(name="()", parent=self)]
-
-    def setup(self): 
-        if getattr(self.obj, 'disabled', 0):
-            py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, "
-                "use pytestmark=..., see pytest_skipping plugin" % (self.obj,))
-            py.test.skip("%r is disabled" %(self.obj,))
-        setup_class = getattr(self.obj, 'setup_class', None)
-        if setup_class is not None: 
-            setup_class = getattr(setup_class, 'im_func', setup_class) 
-            setup_class(self.obj) 
-
-    def teardown(self): 
-        teardown_class = getattr(self.obj, 'teardown_class', None) 
-        if teardown_class is not None: 
-            teardown_class = getattr(teardown_class, 'im_func', teardown_class) 
-            teardown_class(self.obj) 
-
-class Instance(PyCollectorMixin, py.test.collect.Collector): 
-    def _getobj(self): 
-        return self.parent.obj()  
-    def Function(self): 
-        return getattr(self.obj, 'Function', 
-                       PyCollectorMixin.Function.__get__(self)) # XXX for python 2.2
-    def _keywords(self):
-        return []
-    Function = property(Function)
-
-    #def __repr__(self):
-    #    return "<%s of '%s'>" %(self.__class__.__name__, 
-    #                         self.parent.obj.__name__)
-
-    def newinstance(self):  
-        self.obj = self._getobj()
-        return self.obj
-
-class FunctionMixin(PyobjMixin):
-    """ mixin for the code common to Function and Generator.
-    """
-
-    def setup(self): 
-        """ perform setup for this test function. """
-        if inspect.ismethod(self.obj):
-            name = 'setup_method' 
-        else: 
-            name = 'setup_function' 
-        if isinstance(self.parent, Instance):
-            obj = self.parent.newinstance()
-            self.obj = self._getobj()
-        else:
-            obj = self.parent.obj 
-        setup_func_or_method = getattr(obj, name, None)
-        if setup_func_or_method is not None: 
-            setup_func_or_method(self.obj) 
-
-    def teardown(self): 
-        """ perform teardown for this test function. """
-        if inspect.ismethod(self.obj):
-            name = 'teardown_method' 
-        else: 
-            name = 'teardown_function' 
-        obj = self.parent.obj 
-        teardown_func_or_meth = getattr(obj, name, None)
-        if teardown_func_or_meth is not None: 
-            teardown_func_or_meth(self.obj) 
-
-    def _prunetraceback(self, traceback):
-        if hasattr(self, '_obj') and not self.config.option.fulltrace: 
-            code = py.code.Code(self.obj) 
-            path, firstlineno = code.path, code.firstlineno 
-            ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
-            if ntraceback == traceback:
-                ntraceback = ntraceback.cut(path=path)
-                if ntraceback == traceback:
-                    ntraceback = ntraceback.cut(excludepath=py._pydir)
-            traceback = ntraceback.filter()
-        return traceback 
-
-    def _repr_failure_py(self, excinfo, style="long"):
-        if excinfo.errisinstance(funcargs.FuncargRequest.LookupError):
-            fspath, lineno, msg = self.reportinfo()
-            lines, _ = inspect.getsourcelines(self.obj)
-            for i, line in enumerate(lines):
-                if line.strip().startswith('def'):
-                    return FuncargLookupErrorRepr(fspath, lineno,
-            lines[:i+1], str(excinfo.value))
-        return super(FunctionMixin, self)._repr_failure_py(excinfo, 
-            style=style)
-
-    def repr_failure(self, excinfo, outerr=None):
-        assert outerr is None, "XXX outerr usage is deprecated"
-        return self._repr_failure_py(excinfo, 
-            style=self.config.getvalue("tbstyle"))
-
-    shortfailurerepr = "F"
-
-class FuncargLookupErrorRepr(TerminalRepr):
-    def __init__(self, filename, firstlineno, deflines, errorstring):
-        self.deflines = deflines
-        self.errorstring = errorstring
-        self.filename = filename
-        self.firstlineno = firstlineno
-
-    def toterminal(self, tw):
-        tw.line()
-        for line in self.deflines:
-            tw.line("    " + line.strip())
-        for line in self.errorstring.split("\n"):
-            tw.line("        " + line.strip(), red=True)
-        tw.line()
-        tw.line("%s:%d" % (self.filename, self.firstlineno+1))
-
-class Generator(FunctionMixin, PyCollectorMixin, py.test.collect.Collector): 
-    def collect(self):
-        # test generators are seen as collectors but they also 
-        # invoke setup/teardown on popular request 
-        # (induced by the common "test_*" naming shared with normal tests)
-        self.config._setupstate.prepare(self) 
-        l = []
-        seen = {}
-        for i, x in enumerate(self.obj()): 
-            name, call, args = self.getcallargs(x)
-            if not py.builtin.callable(call): 
-                raise TypeError("%r yielded non callable test %r" %(self.obj, call,))
-            if name is None:
-                name = "[%d]" % i
-            else:
-                name = "['%s']" % name
-            if name in seen:
-                raise ValueError("%r generated tests with non-unique name %r" %(self, name))
-            seen[name] = True
-            l.append(self.Function(name, self, args=args, callobj=call))
-        return l
-        
-    def getcallargs(self, obj):
-        if not isinstance(obj, (tuple, list)):
-            obj = (obj,)
-        # explict naming
-        if isinstance(obj[0], py.builtin._basestring):
-            name = obj[0]
-            obj = obj[1:]
-        else:
-            name = None
-        call, args = obj[0], obj[1:]
-        return name, call, args 
-    
-
-#
-#  Test Items 
-#
-_dummy = object()
-class Function(FunctionMixin, py.test.collect.Item): 
-    """ a Function Item is responsible for setting up  
-        and executing a Python callable test object.
-    """
-    _genid = None
-    def __init__(self, name, parent=None, args=None, config=None,
-                 callspec=None, callobj=_dummy):
-        super(Function, self).__init__(name, parent, config=config)
-        self._args = args 
-        if self._isyieldedfunction():
-            assert not callspec, "yielded functions (deprecated) cannot have funcargs" 
-        else:
-            if callspec is not None:
-                self.funcargs = callspec.funcargs or {}
-                self._genid = callspec.id 
-                if hasattr(callspec, "param"):
-                    self._requestparam = callspec.param
-            else:
-                self.funcargs = {}
-        if callobj is not _dummy: 
-            self._obj = callobj 
-        self.function = getattr(self.obj, 'im_func', self.obj)
-
-    def _getobj(self):
-        name = self.name
-        i = name.find("[") # parametrization
-        if i != -1:
-            name = name[:i]
-        return getattr(self.parent.obj, name)
-
-    def _isyieldedfunction(self):
-        return self._args is not None
-
-    def readkeywords(self):
-        d = super(Function, self).readkeywords()
-        d.update(py.builtin._getfuncdict(self.obj))
-        return d
-
-    def runtest(self):
-        """ execute the underlying test function. """
-        self.ihook.pytest_pyfunc_call(pyfuncitem=self)
-
-    def setup(self):
-        super(Function, self).setup()
-        if hasattr(self, 'funcargs'): 
-            funcargs.fillfuncargs(self)
-
-    def __eq__(self, other):
-        try:
-            return (self.name == other.name and 
-                    self._args == other._args and
-                    self.parent == other.parent and
-                    self.obj == other.obj and 
-                    getattr(self, '_genid', None) == 
-                    getattr(other, '_genid', None) 
-            )
-        except AttributeError:
-            pass
-        return False
-
-    def __ne__(self, other):
-        return not self == other
-    
-    def __hash__(self):
-        return hash((self.parent, self.name))
-
-def hasinit(obj):
-    init = getattr(obj, '__init__', None)
-    if init:
-        if init != object.__init__:
-            return True

diff --git a/pypy/doc/config/objspace.nofaking.txt b/pypy/doc/config/objspace.nofaking.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.nofaking.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-This options prevents the automagic borrowing of implementations of
-modules and types not present in PyPy from CPython.
-
-As such, it is required when translating, as then there is no CPython
-to borrow from.  For running py.py it is useful for testing the
-implementation of modules like "posix", but it makes everything even
-slower than it is already.

diff --git a/pypy/rpython/llinterp.py b/pypy/rpython/llinterp.py
--- a/pypy/rpython/llinterp.py
+++ b/pypy/rpython/llinterp.py
@@ -5,6 +5,7 @@
 from pypy.rpython.lltypesystem import rclass
 from pypy.rpython.ootypesystem import ootype
 from pypy.rlib.objectmodel import ComputedIntSymbolic, CDefinedIntSymbolic
+from pypy.rlib.objectmodel import Symbolic
 from pypy.rlib import rstackovf
 
 import sys, os
@@ -1152,7 +1153,9 @@
         # special case
         if type(x) is CDefinedIntSymbolic:
             x = x.default
-        assert isinstance(x, int)
+        # if type(x) is a subclass of Symbolic, bool(x) will usually raise
+        # a TypeError -- unless __nonzero__ has been explicitly overridden.
+        assert isinstance(x, (int, Symbolic))
         return bool(x)
 
     # read frame var support

diff --git a/py/_cmdline/pycountloc.py b/py/_cmdline/pycountloc.py
deleted file mode 100755
--- a/py/_cmdline/pycountloc.py
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env python
-
-# hands on script to compute the non-empty Lines of Code 
-# for tests and non-test code 
-
-"""\
-py.countloc [PATHS]
-
-Count (non-empty) lines of python code and number of python files recursively
-starting from a list of paths given on the command line (starting from the
-current working directory). Distinguish between test files and normal ones and
-report them separately.
-"""
-import py
-
-def main():
-    parser = py.std.optparse.OptionParser(usage=__doc__)
-    (options, args) = parser.parse_args()
-    countloc(args)
-   
-def nodot(p):
-    return p.check(dotfile=0)
-
-class FileCounter(object):  
-    def __init__(self):
-        self.file2numlines = {}
-        self.numlines = 0
-        self.numfiles = 0
-
-    def addrecursive(self, directory, fil="*.py", rec=nodot):
-        for x in directory.visit(fil, rec): 
-            self.addfile(x)
-
-    def addfile(self, fn, emptylines=False):
-        if emptylines:
-            s = len(p.readlines())
-        else:
-            s = 0
-            for i in fn.readlines():
-                if i.strip():
-                    s += 1
-        self.file2numlines[fn] = s 
-        self.numfiles += 1
-        self.numlines += s
-
-    def getnumlines(self, fil): 
-        numlines = 0
-        for path, value in self.file2numlines.items():
-            if fil(path): 
-                numlines += value
-        return numlines 
-
-    def getnumfiles(self, fil): 
-        numfiles = 0
-        for path in self.file2numlines:
-            if fil(path): 
-                numfiles += 1
-        return numfiles
-
-def get_loccount(locations=None):
-    if locations is None:
-        localtions = [py.path.local()]
-    counter = FileCounter()
-    for loc in locations: 
-        counter.addrecursive(loc, '*.py', rec=nodot)
-
-    def istestfile(p):
-        return p.check(fnmatch='test_*.py')
-    isnottestfile = lambda x: not istestfile(x)
-
-    numfiles = counter.getnumfiles(isnottestfile) 
-    numlines = counter.getnumlines(isnottestfile) 
-    numtestfiles = counter.getnumfiles(istestfile)
-    numtestlines = counter.getnumlines(istestfile)
-   
-    return counter, numfiles, numlines, numtestfiles, numtestlines
-
-def countloc(paths=None):
-    if not paths:
-        paths = ['.']
-    locations = [py.path.local(x) for x in paths]
-    (counter, numfiles, numlines, numtestfiles,
-     numtestlines) = get_loccount(locations)
-
-    items = counter.file2numlines.items()
-    items.sort(lambda x,y: cmp(x[1], y[1]))
-    for x, y in items:
-        print("%3d %30s" % (y,x))
-    
-    print("%30s %3d" %("number of testfiles", numtestfiles))
-    print("%30s %3d" %("number of non-empty testlines", numtestlines))
-    print("%30s %3d" %("number of files", numfiles))
-    print("%30s %3d" %("number of non-empty lines", numlines))
-

diff --git a/py/_cmdline/pyconvert_unittest.py b/py/_cmdline/pyconvert_unittest.py
deleted file mode 100644
--- a/py/_cmdline/pyconvert_unittest.py
+++ /dev/null
@@ -1,253 +0,0 @@
-import re
-import sys
-
-try:
-    import parser
-except ImportError:
-    parser = None
-
-d={}
-#  d is the dictionary of unittest changes, keyed to the old name
-#  used by unittest.
-#  d[old][0] is the new replacement function.
-#  d[old][1] is the operator you will substitute, or '' if there is none.
-#  d[old][2] is the possible number of arguments to the unittest
-#  function.
-
-# Old Unittest Name             new name         operator  # of args
-d['assertRaises']           = ('raises',               '', ['Any'])
-d['fail']                   = ('raise AssertionError', '', [0,1])
-d['assert_']                = ('assert',               '', [1,2])
-d['failIf']                 = ('assert not',           '', [1,2])
-d['assertEqual']            = ('assert',            ' ==', [2,3])
-d['failIfEqual']            = ('assert not',        ' ==', [2,3])
-d['assertIn']               = ('assert',            ' in', [2,3])
-d['assertNotIn']            = ('assert',            ' not in', [2,3])
-d['assertNotEqual']         = ('assert',            ' !=', [2,3])
-d['failUnlessEqual']        = ('assert',            ' ==', [2,3])
-d['assertAlmostEqual']      = ('assert round',      ' ==', [2,3,4])
-d['failIfAlmostEqual']      = ('assert not round',  ' ==', [2,3,4])
-d['assertNotAlmostEqual']   = ('assert round',      ' !=', [2,3,4])
-d['failUnlessAlmostEquals'] = ('assert round',      ' ==', [2,3,4])
-
-#  the list of synonyms
-d['failUnlessRaises']      = d['assertRaises']
-d['failUnless']            = d['assert_']
-d['assertEquals']          = d['assertEqual']
-d['assertNotEquals']       = d['assertNotEqual']
-d['assertAlmostEquals']    = d['assertAlmostEqual']
-d['assertNotAlmostEquals'] = d['assertNotAlmostEqual']
-
-# set up the regular expressions we will need
-leading_spaces = re.compile(r'^(\s*)') # this never fails
-
-pat = ''
-for k in d.keys():  # this complicated pattern to match all unittests
-    pat += '|' + r'^(\s*)' + 'self.' + k + r'\(' # \tself.whatever(
-
-old_names = re.compile(pat[1:])
-linesep='\n'        # nobody will really try to convert files not read
-                    # in text mode, will they?
-
-
-def blocksplitter(fp):
-    '''split a file into blocks that are headed by functions to rename'''
-
-    blocklist = []
-    blockstring = ''
-
-    for line in fp:
-        interesting = old_names.match(line)
-        if interesting :
-            if blockstring:
-                blocklist.append(blockstring)
-                blockstring = line # reset the block
-        else:
-            blockstring += line
-            
-    blocklist.append(blockstring)
-    return blocklist
-
-def rewrite_utest(block):
-    '''rewrite every block to use the new utest functions'''
-
-    '''returns the rewritten unittest, unless it ran into problems,
-       in which case it just returns the block unchanged.
-    '''
-    utest = old_names.match(block)
-
-    if not utest:
-        return block
-
-    old = utest.group(0).lstrip()[5:-1] # the name we want to replace
-    new = d[old][0] # the name of the replacement function
-    op  = d[old][1] # the operator you will use , or '' if there is none.
-    possible_args = d[old][2]  # a list of the number of arguments the
-                               # unittest function could possibly take.
-                
-    if possible_args == ['Any']: # just rename assertRaises & friends
-        return re.sub('self.'+old, new, block)
-
-    message_pos = possible_args[-1]
-    # the remaining unittests can have an optional message to print
-    # when they fail.  It is always the last argument to the function.
-
-    try:
-        indent, argl, trailer = decompose_unittest(old, block)
-
-    except SyntaxError: # but we couldn't parse it!
-        return block
-    
-    argnum = len(argl)
-    if argnum not in possible_args:
-        # sanity check - this one isn't real either
-        return block
-
-    elif argnum == message_pos:
-        message = argl[-1]
-        argl = argl[:-1]
-    else:
-        message = None
-
-    if argnum is 0 or (argnum is 1 and argnum is message_pos): #unittest fail()
-        string = ''
-        if message:
-            message = ' ' + message
-
-    elif message_pos is 4:  # assertAlmostEqual & friends
-        try:
-            pos = argl[2].lstrip()
-        except IndexError:
-            pos = '7' # default if none is specified
-        string = '(%s -%s, %s)%s 0' % (argl[0], argl[1], pos, op )
-
-    else: # assert_, assertEquals and all the rest
-        string = ' ' + op.join(argl)
-
-    if message:
-        string = string + ',' + message
-
-    return indent + new + string + trailer
-
-def decompose_unittest(old, block):
-    '''decompose the block into its component parts'''
-
-    ''' returns indent, arglist, trailer 
-        indent -- the indentation
-        arglist -- the arguments to the unittest function
-        trailer -- any extra junk after the closing paren, such as #commment
-    '''
- 
-    indent = re.match(r'(\s*)', block).group()
-    pat = re.search('self.' + old + r'\(', block)
-
-    args, trailer = get_expr(block[pat.end():], ')')
-    arglist = break_args(args, [])
-
-    if arglist == ['']: # there weren't any
-        return indent, [], trailer
-
-    for i in range(len(arglist)):
-        try:
-            parser.expr(arglist[i].lstrip('\t '))
-        except SyntaxError:
-            if i == 0:
-                arglist[i] = '(' + arglist[i] + ')'
-            else:
-                arglist[i] = ' (' + arglist[i] + ')'
-
-    return indent, arglist, trailer
-
-def break_args(args, arglist):
-    '''recursively break a string into a list of arguments'''
-    try:
-        first, rest = get_expr(args, ',')
-        if not rest:
-            return arglist + [first]
-        else:
-            return [first] + break_args(rest, arglist)
-    except SyntaxError:
-        return arglist + [args]
-
-def get_expr(s, char):
-    '''split a string into an expression, and the rest of the string'''
-
-    pos=[]
-    for i in range(len(s)):
-        if s[i] == char:
-            pos.append(i)
-    if pos == []:
-        raise SyntaxError # we didn't find the expected char.  Ick.
-     
-    for p in pos:
-        # make the python parser do the hard work of deciding which comma
-        # splits the string into two expressions
-        try:
-            parser.expr('(' + s[:p] + ')')
-            return s[:p], s[p+1:]
-        except SyntaxError: # It's not an expression yet
-            pass
-    raise SyntaxError       # We never found anything that worked.
-
-
-def main():
-    import sys
-    import py
-
-    usage = "usage: %prog [-s [filename ...] | [-i | -c filename ...]]"
-    optparser = py.std.optparse.OptionParser(usage)
-
-    def select_output (option, opt, value, optparser, **kw):
-        if hasattr(optparser, 'output'):
-            optparser.error(
-                'Cannot combine -s -i and -c options. Use one only.')
-        else:
-            optparser.output = kw['output']
-
-    optparser.add_option("-s", "--stdout", action="callback",
-                         callback=select_output,
-                         callback_kwargs={'output':'stdout'},
-                         help="send your output to stdout")
-
-    optparser.add_option("-i", "--inplace", action="callback",
-                         callback=select_output,
-                         callback_kwargs={'output':'inplace'},
-                         help="overwrite files in place")
-
-    optparser.add_option("-c", "--copy", action="callback",
-                         callback=select_output,
-                         callback_kwargs={'output':'copy'},
-                         help="copy files ... fn.py --> fn_cp.py")
-
-    options, args = optparser.parse_args()
-
-    output = getattr(optparser, 'output', 'stdout')
-
-    if output in ['inplace', 'copy'] and not args:
-        optparser.error(
-                '-i and -c option  require at least one filename')
-
-    if not args:
-        s = ''
-        for block in blocksplitter(sys.stdin):
-            s += rewrite_utest(block)
-        sys.stdout.write(s)
-
-    else:
-        for infilename in args: # no error checking to see if we can open, etc.
-            infile = file(infilename)
-            s = ''
-            for block in blocksplitter(infile):
-                s += rewrite_utest(block)
-            if output == 'inplace':
-                outfile = file(infilename, 'w+')
-            elif output == 'copy': # yes, just go clobber any existing .cp
-                outfile = file (infilename[:-3]+ '_cp.py', 'w+')
-            else:
-                outfile = sys.stdout
-
-            outfile.write(s)
-
-    
-if __name__ == '__main__':
-    main()

diff --git a/pypy/module/cpyext/test/conftest.py b/pypy/module/cpyext/test/conftest.py
--- a/pypy/module/cpyext/test/conftest.py
+++ b/pypy/module/cpyext/test/conftest.py
@@ -1,5 +1,6 @@
 import py
-from pypy.conftest import option, gettestobjspace
+import pytest
+from pypy.conftest import gettestobjspace
 
 def pytest_ignore_collect(path, config):
     if config.option.runappdirect:

diff --git a/py/_cmdline/__init__.py b/py/_cmdline/__init__.py
deleted file mode 100644
--- a/py/_cmdline/__init__.py
+++ /dev/null
@@ -1,1 +0,0 @@
-#

diff --git a/pypy/doc/config/translation.gcrootfinder.txt b/pypy/doc/config/translation.gcrootfinder.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.gcrootfinder.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-Choose method how to find roots in the GC. Boehm and refcounting have their own
-methods, this is mostly only interesting for framework GCs. For those you have
-a choice of various alternatives:
-
- - use a shadow stack (XXX link to paper), e.g. explicitly maintaining a stack
-   of roots
-
- - use stackless to find roots by unwinding the stack.  Requires
-   :config:`translation.stackless`.  Note that this turned out to
-   be slower than just using a shadow stack.
-
- - use GCC and i386 specific assembler hackery to find the roots on the stack.
-   This is fastest but platform specific.
-
- - Use LLVM's GC facilities to find the roots.

diff --git a/py/_path/gateway/channeltest.py b/py/_path/gateway/channeltest.py
deleted file mode 100644
--- a/py/_path/gateway/channeltest.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import threading
-
-
-class PathServer:
-
-    def __init__(self, channel):
-        self.channel = channel
-        self.C2P = {}
-        self.next_id = 0
-        threading.Thread(target=self.serve).start()
-
-    def p2c(self, path):
-        id = self.next_id
-        self.next_id += 1
-        self.C2P[id] = path
-        return id
-
-    def command_LIST(self, id, *args):
-        path = self.C2P[id]
-        answer = [(self.p2c(p), p.basename) for p in path.listdir(*args)]
-        self.channel.send(answer)
-
-    def command_DEL(self, id):
-        del self.C2P[id]
-
-    def command_GET(self, id, spec):
-        path = self.C2P[id]
-        self.channel.send(path._getbyspec(spec))
-
-    def command_READ(self, id):
-        path = self.C2P[id]
-        self.channel.send(path.read())
-
-    def command_JOIN(self, id, resultid, *args):
-        path = self.C2P[id]
-        assert resultid not in self.C2P
-        self.C2P[resultid] = path.join(*args)
-
-    def command_DIRPATH(self, id, resultid):
-        path = self.C2P[id]
-        assert resultid not in self.C2P
-        self.C2P[resultid] = path.dirpath()
-
-    def serve(self):
-        try:
-            while 1:
-                msg = self.channel.receive()
-                meth = getattr(self, 'command_' + msg[0])
-                meth(*msg[1:])
-        except EOFError:
-            pass
-
-if __name__ == '__main__':
-    import py
-    gw = execnet.PopenGateway()
-    channel = gw._channelfactory.new()
-    srv = PathServer(channel)
-    c = gw.remote_exec("""
-        import remotepath
-        p = remotepath.RemotePath(channel.receive(), channel.receive())
-        channel.send(len(p.listdir()))
-    """)
-    c.send(channel)
-    c.send(srv.p2c(py.path.local('/tmp')))
-    print(c.receive())

diff --git a/py/_plugin/standalonetemplate.py b/py/_plugin/standalonetemplate.py
deleted file mode 100755
--- a/py/_plugin/standalonetemplate.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#! /usr/bin/env python
-
-sources = """
- at SOURCES@"""
-
-import sys
-import base64
-import zlib
-import imp
-
-class DictImporter(object):
-    def __init__(self, sources):
-        self.sources = sources
-
-    def find_module(self, fullname, path=None):
-        if fullname in self.sources:
-            return self
-        if fullname+'.__init__' in self.sources:
-            return self
-        return None
-
-    def load_module(self, fullname):
-        # print "load_module:",  fullname
-        from types import ModuleType
-        try:
-            s = self.sources[fullname]
-            is_pkg = False
-        except KeyError:
-            s = self.sources[fullname+'.__init__']
-            is_pkg = True
-        
-        co = compile(s, fullname, 'exec')
-        module = sys.modules.setdefault(fullname, ModuleType(fullname))
-        module.__file__ = "%s/%s" % (__file__, fullname)
-        module.__loader__ = self
-        if is_pkg:
-            module.__path__ = [fullname]
-            
-        do_exec(co, module.__dict__)
-        return sys.modules[fullname]
-
-    def get_source(self, name):
-        res = self.sources.get(name)
-        if res is None:
-            res = self.sources.get(name+'.__init__')
-        return res
-
-if __name__ == "__main__":
-    if sys.version_info >= (3,0):
-        exec("def do_exec(co, loc): exec(co, loc)\n")
-        import pickle
-        sources = sources.encode("ascii") # ensure bytes 
-        sources = pickle.loads(zlib.decompress(base64.decodebytes(sources)))
-    else:
-        import cPickle as pickle
-        exec("def do_exec(co, loc): exec co in loc\n")
-        sources = pickle.loads(zlib.decompress(base64.decodestring(sources)))
-
-    importer = DictImporter(sources)
-    sys.meta_path.append(importer)
-
-    import py
-    py.cmdline.pytest()

diff --git a/pypy/doc/config/objspace.usemodules.crypt.txt b/pypy/doc/config/objspace.usemodules.crypt.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.crypt.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'crypt' module. 
-This module is expected to be fully working.

diff --git a/py/_test/collect.py b/py/_test/collect.py
deleted file mode 100644
--- a/py/_test/collect.py
+++ /dev/null
@@ -1,418 +0,0 @@
-"""
-test collection nodes, forming a tree, Items are leafs.
-""" 
-import py
-
-def configproperty(name):
-    def fget(self):
-        #print "retrieving %r property from %s" %(name, self.fspath)
-        return self.config._getcollectclass(name, self.fspath)
-    return property(fget)
-
-class HookProxy:
-    def __init__(self, node):
-        self.node = node
-    def __getattr__(self, name):
-        if name[0] == "_":
-            raise AttributeError(name)
-        hookmethod = getattr(self.node.config.hook, name)
-        def call_matching_hooks(**kwargs):
-            plugins = self.node.config._getmatchingplugins(self.node.fspath)
-            return hookmethod.pcall(plugins, **kwargs)
-        return call_matching_hooks
-
-class Node(object): 
-    """ base class for all Nodes in the collection tree.  
-        Collector subclasses have children, Items are terminal nodes. 
-    """
-    def __init__(self, name, parent=None, config=None):
-        self.name = name 
-        self.parent = parent
-        self.config = config or parent.config
-        self.fspath = getattr(parent, 'fspath', None) 
-        self.ihook = HookProxy(self)
-
-    def _reraiseunpicklingproblem(self):
-        if hasattr(self, '_unpickle_exc'):
-            py.builtin._reraise(*self._unpickle_exc)
-            
-    # 
-    # note to myself: Pickling is uh.
-    # 
-    def __getstate__(self):
-        return (self.name, self.parent)
-    def __setstate__(self, nameparent):
-        name, parent = nameparent
-        try:
-            colitems = parent._memocollect()
-            for colitem in colitems:
-                if colitem.name == name:
-                    # we are a copy that will not be returned
-                    # by our parent 
-                    self.__dict__ = colitem.__dict__
-                    break
-            else:
-                raise ValueError("item %r not found in parent collection %r" %(
-                    name, [x.name for x in colitems]))
-        except KeyboardInterrupt:
-            raise
-        except Exception:
-            # our parent can't collect us but we want unpickling to
-            # otherwise continue - self._reraiseunpicklingproblem() will
-            # reraise the problem 
-            self._unpickle_exc = py.std.sys.exc_info()
-            self.name = name 
-            self.parent = parent 
-            self.config = parent.config
-
-    def __repr__(self): 
-        if getattr(self.config.option, 'debug', False):
-            return "<%s %r %0x>" %(self.__class__.__name__, 
-                getattr(self, 'name', None), id(self))
-        else:
-            return "<%s %r>" %(self.__class__.__name__, 
-                getattr(self, 'name', None))
-
-    # methods for ordering nodes
-
-    def __eq__(self, other): 
-        if not isinstance(other, Node):
-            return False 
-        return self.name == other.name and self.parent == other.parent 
-
-    def __ne__(self, other):
-        return not self == other
-    
-    def __hash__(self):
-        return hash((self.name, self.parent))
- 
-    def setup(self): 
-        pass
-
-    def teardown(self): 
-        pass
-
-    def _memoizedcall(self, attrname, function):
-        exattrname = "_ex_" + attrname 
-        failure = getattr(self, exattrname, None)
-        if failure is not None:
-            py.builtin._reraise(failure[0], failure[1], failure[2])
-        if hasattr(self, attrname):
-            return getattr(self, attrname)
-        try:
-            res = function()
-        except (KeyboardInterrupt, SystemExit):
-            raise
-        except:
-            failure = py.std.sys.exc_info()
-            setattr(self, exattrname, failure)
-            raise
-        setattr(self, attrname, res)
-        return res 
-
-    def listchain(self):
-        """ return list of all parent collectors up to self, 
-            starting from root of collection tree. """ 
-        l = [self]
-        while 1: 
-            x = l[0]
-            if x.parent is not None and x.parent.parent is not None:
-                l.insert(0, x.parent)
-            else: 
-                return l 
-
-    def listnames(self): 
-        return [x.name for x in self.listchain()]
-
-    def getparent(self, cls):
-        current = self
-        while current and not isinstance(current, cls):
-            current = current.parent
-        return current 
-    
-    def readkeywords(self):
-        return dict([(x, True) for x in self._keywords()])
-
-    def _keywords(self):
-        return [self.name]
-
-    def _skipbykeyword(self, keywordexpr): 
-        """ return True if they given keyword expression means to 
-            skip this collector/item. 
-        """
-        if not keywordexpr:
-            return
-        chain = self.listchain()
-        for key in filter(None, keywordexpr.split()):
-            eor = key[:1] == '-'
-            if eor:
-                key = key[1:]
-            if not (eor ^ self._matchonekeyword(key, chain)):
-                return True
-
-    def _matchonekeyword(self, key, chain):
-        elems = key.split(".")
-        # XXX O(n^2), anyone cares?
-        chain = [item.readkeywords() for item in chain if item._keywords()]
-        for start, _ in enumerate(chain):
-            if start + len(elems) > len(chain):
-                return False
-            for num, elem in enumerate(elems):
-                for keyword in chain[num + start]:
-                    ok = False
-                    if elem in keyword:
-                        ok = True
-                        break
-                if not ok:
-                    break
-            if num == len(elems) - 1 and ok:
-                return True
-        return False
-
-    def _prunetraceback(self, traceback):
-        return traceback 
-
-    def _repr_failure_py(self, excinfo, style=None):
-        excinfo.traceback = self._prunetraceback(excinfo.traceback)
-        # XXX should excinfo.getrepr record all data and toterminal()
-        # process it? 
-        if style is None:
-            if self.config.option.tbstyle == "short":
-                style = "short"
-            else:
-                style = "long"
-        return excinfo.getrepr(funcargs=True, 
-                               showlocals=self.config.option.showlocals,
-                               style=style)
-
-    repr_failure = _repr_failure_py
-    shortfailurerepr = "F"
-
-class Collector(Node):
-    """ 
-        Collector instances create children through collect()
-        and thus iteratively build a tree.  attributes::
-
-        parent: attribute pointing to the parent collector
-                (or None if this is the root collector)
-        name:   basename of this collector object
-    """
-    Directory = configproperty('Directory')
-    Module = configproperty('Module')
-
-    def collect(self):
-        """ returns a list of children (items and collectors) 
-            for this collection node. 
-        """
-        raise NotImplementedError("abstract")
-
-    def collect_by_name(self, name):
-        """ return a child matching the given name, else None. """
-        for colitem in self._memocollect():
-            if colitem.name == name:
-                return colitem
-
-    def repr_failure(self, excinfo, outerr=None):
-        """ represent a failure. """
-        assert outerr is None, "XXX deprecated"
-        return self._repr_failure_py(excinfo)
-
-    def _memocollect(self):
-        """ internal helper method to cache results of calling collect(). """
-        return self._memoizedcall('_collected', self.collect)
-
-    # **********************************************************************
-    # DEPRECATED METHODS 
-    # **********************************************************************
-    
-    def _deprecated_collect(self):
-        # avoid recursion:
-        # collect -> _deprecated_collect -> custom run() ->
-        # super().run() -> collect
-        attrname = '_depcollectentered'
-        if hasattr(self, attrname):
-            return
-        setattr(self, attrname, True)
-        method = getattr(self.__class__, 'run', None)
-        if method is not None and method != Collector.run:
-            warnoldcollect(function=method)
-            names = self.run()
-            return [x for x in [self.join(name) for name in names] if x]
-
-    def run(self):
-        """ DEPRECATED: returns a list of names available from this collector.
-            You can return an empty list.  Callers of this method
-            must take care to catch exceptions properly.  
-        """
-        return [colitem.name for colitem in self._memocollect()]
-
-    def join(self, name): 
-        """  DEPRECATED: return a child collector or item for the given name.  
-             If the return value is None there is no such child. 
-        """
-        return self.collect_by_name(name)
-
-    def _prunetraceback(self, traceback):
-        if hasattr(self, 'fspath'):
-            path = self.fspath 
-            ntraceback = traceback.cut(path=self.fspath)
-            if ntraceback == traceback:
-                ntraceback = ntraceback.cut(excludepath=py._pydir)
-            traceback = ntraceback.filter()
-        return traceback 
-
-class FSCollector(Collector): 
-    def __init__(self, fspath, parent=None, config=None):
-        fspath = py.path.local(fspath) 
-        super(FSCollector, self).__init__(fspath.basename, parent, config=config)
-        self.fspath = fspath 
-
-    def __getstate__(self):
-        # RootCollector.getbynames() inserts a directory which we need
-        # to throw out here for proper re-instantiation
-        if isinstance(self.parent.parent, RootCollector):
-            assert self.parent.fspath == self.parent.parent.fspath, self.parent
-            return (self.name, self.parent.parent) # shortcut
-        return super(Collector, self).__getstate__()
-
-class File(FSCollector):
-    """ base class for collecting tests from a file. """
-
-class Directory(FSCollector): 
-    def recfilter(self, path): 
-        if path.check(dir=1, dotfile=0):
-            return path.basename not in ('CVS', '_darcs', '{arch}')
-
-    def collect(self):
-        l = self._deprecated_collect() 
-        if l is not None:
-            return l 
-        l = []
-        for path in self.fspath.listdir(sort=True): 
-            res = self.consider(path)
-            if res is not None:
-                if isinstance(res, (list, tuple)):
-                    l.extend(res)
-                else:
-                    l.append(res)
-        return l
-
-    def consider(self, path):
-        if self.ihook.pytest_ignore_collect(path=path, config=self.config):
-           return
-        if path.check(file=1):
-            res = self.consider_file(path)
-        elif path.check(dir=1):
-            res = self.consider_dir(path)
-        else:
-            res = None
-        if isinstance(res, list):
-            # throw out identical results
-            l = []
-            for x in res:
-                if x not in l:
-                    assert x.parent == self, (x.parent, self)
-                    assert x.fspath == path, (x.fspath, path)
-                    l.append(x)
-            res = l 
-        return res
-
-    def consider_file(self, path):
-        return self.ihook.pytest_collect_file(path=path, parent=self)
-
-    def consider_dir(self, path, usefilters=None):
-        if usefilters is not None:
-            py.log._apiwarn("0.99", "usefilters argument not needed")
-        return self.ihook.pytest_collect_directory(path=path, parent=self)
-
-class Item(Node): 
-    """ a basic test item. """
-    def _deprecated_testexecution(self):
-        if self.__class__.run != Item.run:
-            warnoldtestrun(function=self.run)
-        elif self.__class__.execute != Item.execute:
-            warnoldtestrun(function=self.execute)
-        else:
-            return False
-        self.run()
-        return True
-
-    def run(self):
-        """ deprecated, here because subclasses might call it. """
-        return self.execute(self.obj)
-
-    def execute(self, obj):
-        """ deprecated, here because subclasses might call it. """
-        return obj()
-
-    def reportinfo(self):
-        return self.fspath, None, ""
-        
-def warnoldcollect(function=None):
-    py.log._apiwarn("1.0", 
-        "implement collector.collect() instead of "
-        "collector.run() and collector.join()",
-        stacklevel=2, function=function)
-
-def warnoldtestrun(function=None):
-    py.log._apiwarn("1.0", 
-        "implement item.runtest() instead of "
-        "item.run() and item.execute()",
-        stacklevel=2, function=function)
-
-
-    
-class RootCollector(Directory):
-    def __init__(self, config):
-        Directory.__init__(self, config.topdir, parent=None, config=config)
-        self.name = None
-
-    def __repr__(self):
-        return "<RootCollector fspath=%r>" %(self.fspath,)
-        
-    def getbynames(self, names):
-        current = self.consider(self.config.topdir)
-        while names:
-            name = names.pop(0)
-            if name == ".": # special "identity" name
-                continue 
-            l = []
-            for x in current._memocollect():
-                if x.name == name:
-                    l.append(x)
-                elif x.fspath == current.fspath.join(name):
-                    l.append(x)
-                elif x.name == "()":
-                    names.insert(0, name)
-                    l.append(x)
-                    break
-            if not l:
-                raise ValueError("no node named %r below %r" %(name, current))
-            current = l[0]
-        return current
-
-    def totrail(self, node):
-        chain = node.listchain()
-        names = [self._getrelpath(chain[0].fspath)] 
-        names += [x.name for x in chain[1:]]
-        return names
-
-    def fromtrail(self, trail):
-        return self.config._rootcol.getbynames(trail)
-
-    def _getrelpath(self, fspath):
-        topdir = self.config.topdir
-        relpath = fspath.relto(topdir)
-        if not relpath:
-            if fspath == topdir:
-                relpath = "."
-            else:
-                raise ValueError("%r not relative to topdir %s" 
-                        %(self.fspath, topdir))
-        return relpath
-
-    def __getstate__(self):
-        return self.config
-
-    def __setstate__(self, config):
-        self.__init__(config)

diff --git a/pypy/doc/config/objspace.honor__builtins__.txt b/pypy/doc/config/objspace.honor__builtins__.txt
deleted file mode 100644

diff --git a/pypy/doc/config/objspace.std.withrangelist.txt b/pypy/doc/config/objspace.std.withrangelist.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withrangelist.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-Enable "range list" objects. They are an additional implementation of the Python
-``list`` type, indistinguishable for the normal user. Whenever the ``range``
-builtin is called, an range list is returned. As long as this list is not
-mutated (and for example only iterated over), it uses only enough memory to
-store the start, stop and step of the range. This makes using ``range`` as
-efficient as ``xrange``, as long as the result is only used in a ``for``-loop.
-
-See the section in `Standard Interpreter Optimizations`_ for more details.
-
-.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#range-lists
-

diff --git a/pypy/doc/config/objspace.std.optimized_comparison_op.txt b/pypy/doc/config/objspace.std.optimized_comparison_op.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.optimized_comparison_op.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Optimize the comparison of two integers a bit.

diff --git a/pypy/doc/config/objspace.soabi.txt b/pypy/doc/config/objspace.soabi.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.soabi.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-This option controls the tag included into extension module file names.  The
-default is something like `pypy-14`, which means that `import foo` will look for
-a file named `foo.pypy-14.so` (or `foo.pypy-14.pyd` on Windows).
-
-This is an implementation of PEP3149_, with two differences:
-
- * the filename without tag `foo.so` is not considered.
- * the feature is also available on Windows.
-
-When set to the empty string (with `--soabi=`), the interpreter will only look
-for a file named `foo.so`, and will crash if this file was compiled for another
-Python interpreter.
-
-.. _PEP3149: http://www.python.org/dev/peps/pep-3149/

diff --git a/py/bin/py.cleanup b/py/bin/py.cleanup
deleted file mode 100755
--- a/py/bin/py.cleanup
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from _findpy import py
-py.cmdline.pycleanup()
\ No newline at end of file

diff --git a/py/_path/gateway/channeltest2.py b/py/_path/gateway/channeltest2.py
deleted file mode 100644
--- a/py/_path/gateway/channeltest2.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import py
-from remotepath import RemotePath
-
-
-SRC = open('channeltest.py', 'r').read()
-
-SRC += '''
-import py
-srv = PathServer(channel.receive())
-channel.send(srv.p2c(py.path.local("/tmp")))
-'''
-
-
-#gw = execnet.SshGateway('codespeak.net')
-gw = execnet.PopenGateway()
-gw.remote_init_threads(5)
-c = gw.remote_exec(SRC, stdout=py.std.sys.stdout, stderr=py.std.sys.stderr)
-subchannel = gw._channelfactory.new()
-c.send(subchannel)
-
-p = RemotePath(subchannel, c.receive())

diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py
--- a/pypy/rpython/lltypesystem/rffi.py
+++ b/pypy/rpython/lltypesystem/rffi.py
@@ -185,6 +185,15 @@
                     # XXX leaks if a unicode2wcharp() fails with MemoryError
                     # and was not the first in this function
                     freeme = arg
+            elif TARGET is VOIDP:
+                if arg is None:
+                    arg = lltype.nullptr(VOIDP.TO)
+                elif isinstance(arg, str):
+                    arg = str2charp(arg)
+                    freeme = arg
+                elif isinstance(arg, unicode):
+                    arg = unicode2wcharp(arg)
+                    freeme = arg
             elif _isfunctype(TARGET) and not _isllptr(arg):
                 # XXX pass additional arguments
                 if invoke_around_handlers:
@@ -550,9 +559,8 @@
 r_singlefloat = rarithmetic.r_singlefloat
 
 # void *   - for now, represented as char *
-VOIDP = lltype.Ptr(lltype.Array(lltype.Char, hints={'nolength': True}))
-VOIDP_real = lltype.Ptr(lltype.Array(lltype.Char, hints={'nolength': True, 'render_as_void': True}))
-NULL = lltype.nullptr(VOIDP.TO)
+VOIDP = lltype.Ptr(lltype.Array(lltype.Char, hints={'nolength': True, 'render_as_void': True}))
+NULL = None
 
 # void **
 VOIDPP = CArrayPtr(VOIDP)
@@ -640,6 +648,7 @@
             data_start = cast_ptr_to_adr(llstrtype(data)) + \
                 offsetof(STRTYPE, 'chars') + itemoffsetof(STRTYPE.chars, 0)
             return cast(TYPEP, data_start)
+    get_nonmovingbuffer._annenforceargs_ = [strtype]
 
     # (str, char*) -> None
     def free_nonmovingbuffer(data, buf):
@@ -658,6 +667,7 @@
         keepalive_until_here(data)
         if not followed_2nd_path:
             lltype.free(buf, flavor='raw')
+    free_nonmovingbuffer._annenforceargs_ = [strtype, None]
 
     # int -> (char*, str)
     def alloc_buffer(count):
@@ -672,6 +682,7 @@
         raw_buf = lltype.malloc(TYPEP.TO, count, flavor='raw')
         return raw_buf, lltype.nullptr(STRTYPE)
     alloc_buffer._always_inline_ = True # to get rid of the returned tuple
+    alloc_buffer._annenforceargs_ = [int]
 
     # (char*, str, int, int) -> None
     def str_from_buffer(raw_buf, gc_buf, allocated_size, needed_size):

diff --git a/pypy/doc/config/objspace.std.withropeunicode.txt b/pypy/doc/config/objspace.std.withropeunicode.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withropeunicode.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-Use ropes to implement unicode strings (and also normal strings).
-
-See the section in `Standard Interpreter Optimizations`_ for more details.
-
-.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes
-
-

diff --git a/pypy/doc/config/objspace.usemodules.termios.txt b/pypy/doc/config/objspace.usemodules.termios.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.termios.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'termios' module. 
-This module is expected to be fully working.

diff --git a/pypy/doc/config/objspace.std.prebuiltintto.txt b/pypy/doc/config/objspace.std.prebuiltintto.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.prebuiltintto.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-See :config:`objspace.std.withprebuiltint`.

diff --git a/pypy/doc/config/objspace.std.multimethods.txt b/pypy/doc/config/objspace.std.multimethods.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.multimethods.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-Choose the multimethod implementation.
-
-* ``doubledispatch`` turns
-  a multimethod call into a sequence of normal method calls.
-
-* ``mrd`` uses a technique known as Multiple Row Displacement
-  which precomputes a few compact tables of numbers and
-  function pointers.

diff --git a/py/_plugin/hookspec.py b/py/_plugin/hookspec.py
deleted file mode 100644
--- a/py/_plugin/hookspec.py
+++ /dev/null
@@ -1,172 +0,0 @@
-"""
-hook specifications for py.test plugins 
-"""
-
-# -------------------------------------------------------------------------
-# Command line and configuration 
-# -------------------------------------------------------------------------
-
-def pytest_namespace():
-    "return dict of name->object which will get stored at py.test. namespace"
-
-def pytest_addoption(parser):
-    "add optparse-style options via parser.addoption."
-
-def pytest_addhooks(pluginmanager):
-    "add hooks via pluginmanager.registerhooks(module)"
-
-def pytest_configure(config):
-    """ called after command line options have been parsed. 
-        and all plugins and initial conftest files been loaded. 
-    """
-
-def pytest_unconfigure(config):
-    """ called before test process is exited.  """
-
-# -------------------------------------------------------------------------
-# collection hooks
-# -------------------------------------------------------------------------
-
-def pytest_ignore_collect(path, config):
-    """ return true value to prevent considering this path for collection. 
-    This hook is consulted for all files and directories prior to considering
-    collection hooks. 
-    """
-pytest_ignore_collect.firstresult = True
-
-def pytest_collect_directory(path, parent):
-    """ return Collection node or None for the given path. """
-pytest_collect_directory.firstresult = True
-
-def pytest_collect_file(path, parent):
-    """ return Collection node or None for the given path. """
-
-def pytest_collectstart(collector):
-    """ collector starts collecting. """
-
-def pytest_collectreport(report):
-    """ collector finished collecting. """
-
-def pytest_deselected(items):
-    """ called for test items deselected by keyword. """
-
-def pytest_make_collect_report(collector):
-    """ perform a collection and return a collection. """ 
-pytest_make_collect_report.firstresult = True
-
-# XXX rename to item_collected()?  meaning in distribution context? 
-def pytest_itemstart(item, node=None):
-    """ test item gets collected. """
-
-# -------------------------------------------------------------------------
-# Python test function related hooks
-# -------------------------------------------------------------------------
-
-def pytest_pycollect_makemodule(path, parent):
-    """ return a Module collector or None for the given path. 
-    This hook will be called for each matching test module path. 
-    The pytest_collect_file hook needs to be used if you want to 
-    create test modules for files that do not match as a test module.
-    """
-pytest_pycollect_makemodule.firstresult = True
-
-def pytest_pycollect_makeitem(collector, name, obj):
-    """ return custom item/collector for a python object in a module, or None.  """
-pytest_pycollect_makeitem.firstresult = True
-
-def pytest_pyfunc_call(pyfuncitem):
-    """ call underlying test function. """
-pytest_pyfunc_call.firstresult = True
-
-def pytest_generate_tests(metafunc):
-    """ generate (multiple) parametrized calls to a test function."""
-
-# -------------------------------------------------------------------------
-# generic runtest related hooks 
-# -------------------------------------------------------------------------
-
-def pytest_runtest_protocol(item):
-    """ implement fixture, run and report about the given test item. """
-pytest_runtest_protocol.firstresult = True
-
-def pytest_runtest_setup(item):
-    """ called before pytest_runtest_call(). """ 
-
-def pytest_runtest_call(item):
-    """ execute test item. """ 
-
-def pytest_runtest_teardown(item):
-    """ called after pytest_runtest_call(). """ 
-
-def pytest_runtest_makereport(item, call):
-    """ make a test report for the given item and call outcome. """
-pytest_runtest_makereport.firstresult = True
-
-def pytest_runtest_logreport(report):
-    """ process item test report. """ 
-
-# special handling for final teardown - somewhat internal for now
-def pytest__teardown_final(session):
-    """ called before test session finishes. """
-pytest__teardown_final.firstresult = True
-
-def pytest__teardown_final_logerror(report):
-    """ called if runtest_teardown_final failed. """ 
-
-# -------------------------------------------------------------------------
-# test session related hooks 
-# -------------------------------------------------------------------------
-
-def pytest_sessionstart(session):
-    """ before session.main() is called. """
-
-def pytest_sessionfinish(session, exitstatus):
-    """ whole test run finishes. """
-
-# -------------------------------------------------------------------------
-# hooks for influencing reporting (invoked from pytest_terminal)
-# -------------------------------------------------------------------------
-
-def pytest_report_header(config):
-    """ return a string to be displayed as header info for terminal reporting."""
-
-def pytest_report_teststatus(report):
-    """ return result-category, shortletter and verbose word for reporting."""
-pytest_report_teststatus.firstresult = True
-
-def pytest_terminal_summary(terminalreporter):
-    """ add additional section in terminal summary reporting. """
-
-def pytest_report_iteminfo(item):
-    """ return (fspath, lineno, name) for the item.
-        the information is used for result display and to sort tests
-    """
-pytest_report_iteminfo.firstresult = True
-
-# -------------------------------------------------------------------------
-# doctest hooks 
-# -------------------------------------------------------------------------
-
-def pytest_doctest_prepare_content(content):
-    """ return processed content for a given doctest"""
-pytest_doctest_prepare_content.firstresult = True
-
-
-# -------------------------------------------------------------------------
-# error handling and internal debugging hooks 
-# -------------------------------------------------------------------------
-
-def pytest_plugin_registered(plugin, manager):
-    """ a new py lib plugin got registered. """
-
-def pytest_plugin_unregistered(plugin):
-    """ a py lib plugin got unregistered. """
-
-def pytest_internalerror(excrepr):
-    """ called for internal errors. """
-
-def pytest_keyboard_interrupt(excinfo):
-    """ called for keyboard interrupt. """
-
-def pytest_trace(category, msg):
-    """ called for debug info. """ 

diff --git a/py/_plugin/pytest_genscript.py b/py/_plugin/pytest_genscript.py
deleted file mode 100755
--- a/py/_plugin/pytest_genscript.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#! /usr/bin/env python
-"""
-generate standalone test script to be distributed along with an application. 
-"""
-
-import os
-import sys
-def pytest_addoption(parser):
-    group = parser.getgroup("debugconfig")
-    group.addoption("--genscript", action="store", default=None, 
-        dest="genscript", metavar="path", 
-        help="create standalone py.test script at given target path.")
-
-def pytest_configure(config):
-    genscript = config.getvalue("genscript")
-    if genscript:
-        import py
-        mydir = py.path.local(__file__).dirpath()
-        infile = mydir.join("standalonetemplate.py")
-        pybasedir = py.path.local(py.__file__).dirpath().dirpath()
-        genscript = py.path.local(genscript)
-        main(pybasedir, outfile=genscript, infile=infile)
-        raise SystemExit(0)
-
-def main(pybasedir, outfile, infile):
-    import base64
-    import zlib
-    try:
-        import pickle
-    except Importerror:
-        import cPickle as pickle
-
-    outfile = str(outfile)
-    infile = str(infile)
-    assert os.path.isabs(outfile)
-    os.chdir(str(pybasedir))
-    files = []
-    for dirpath, dirnames, filenames in os.walk("py"):
-        for f in filenames:
-            if not f.endswith(".py"):
-                continue
-                
-            fn = os.path.join(dirpath, f)
-            files.append(fn)
-
-    name2src = {}
-    for f in files:
-        k = f.replace(os.sep, ".")[:-3]
-        name2src[k] = open(f, "r").read()
-
-    data = pickle.dumps(name2src, 2)
-    data = zlib.compress(data, 9)
-    data = base64.encodestring(data)
-    data = data.decode("ascii")
-
-    exe = open(infile, "r").read()
-    exe = exe.replace("@SOURCES@", data)
-
-    open(outfile, "w").write(exe)
-    os.chmod(outfile, 493)  # 0755
-    sys.stdout.write("generated standalone py.test at %r, have fun!\n" % outfile)
-
-if __name__=="__main__":
-    dn = os.path.dirname
-    here = os.path.abspath(dn(__file__)) # py/plugin/
-    pybasedir = dn(dn(here))
-    outfile = os.path.join(os.getcwd(), "py.test-standalone")
-    infile = os.path.join(here, 'standalonetemplate.py')
-    main(pybasedir, outfile, infile)

diff --git a/py/bin/py.convert_unittest b/py/bin/py.convert_unittest
deleted file mode 100755
--- a/py/bin/py.convert_unittest
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from _findpy import py
-py.cmdline.pyconvert_unittest()
\ No newline at end of file

diff --git a/pypy/doc/config/objspace.usemodules.signal.txt b/pypy/doc/config/objspace.usemodules.signal.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.signal.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'signal' module. 
-This module is expected to be fully working.

diff --git a/py/_test/__init__.py b/py/_test/__init__.py
deleted file mode 100644
--- a/py/_test/__init__.py
+++ /dev/null
@@ -1,1 +0,0 @@
-""" assertion and py.test helper API."""

diff --git a/pypy/doc/config/objspace.usemodules._io.txt b/pypy/doc/config/objspace.usemodules._io.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._io.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the '_io module.
-Used by the 'io' standard lib module. This module is expected to be working and is included by default.

diff --git a/py/_code/oldmagic2.py b/py/_code/oldmagic2.py
deleted file mode 100644
--- a/py/_code/oldmagic2.py
+++ /dev/null
@@ -1,6 +0,0 @@
-
-import py
-
-py.log._apiwarn("1.1", "py.magic.AssertionError is deprecated, use py.code._AssertionError", stacklevel=2)
-
-from py.code import _AssertionError as AssertionError

diff --git a/py/_plugin/pytest_default.py b/py/_plugin/pytest_default.py
deleted file mode 100644
--- a/py/_plugin/pytest_default.py
+++ /dev/null
@@ -1,131 +0,0 @@
-""" default hooks and general py.test options. """ 
-
-import sys
-import py
-
-def pytest_pyfunc_call(__multicall__, pyfuncitem):
-    if not __multicall__.execute():
-        testfunction = pyfuncitem.obj 
-        if pyfuncitem._isyieldedfunction():
-            testfunction(*pyfuncitem._args)
-        else:
-            funcargs = pyfuncitem.funcargs
-            testfunction(**funcargs)
-
-def pytest_collect_file(path, parent):
-    ext = path.ext 
-    pb = path.purebasename
-    if pb.startswith("test_") or pb.endswith("_test") or \
-       path in parent.config._argfspaths:
-        if ext == ".py":
-            return parent.ihook.pytest_pycollect_makemodule(
-                path=path, parent=parent)
-
-def pytest_pycollect_makemodule(path, parent):
-    return parent.Module(path, parent)
-
-def pytest_funcarg__pytestconfig(request):
-    """ the pytest config object with access to command line opts."""
-    return request.config
-
-def pytest_ignore_collect(path, config):
-    ignore_paths = config.getconftest_pathlist("collect_ignore", path=path) 
-    ignore_paths = ignore_paths or []
-    excludeopt = config.getvalue("ignore")
-    if excludeopt:
-        ignore_paths.extend([py.path.local(x) for x in excludeopt])
-    return path in ignore_paths
-    # XXX more refined would be: 
-    if ignore_paths:
-        for p in ignore_paths:
-            if path == p or path.relto(p):
-                return True
-
-
-def pytest_collect_directory(path, parent):
-    # XXX reconsider the following comment 
-    # not use parent.Directory here as we generally 
-    # want dir/conftest.py to be able to 
-    # define Directory(dir) already 
-    if not parent.recfilter(path): # by default special ".cvs", ... 
-        # check if cmdline specified this dir or a subdir directly
-        for arg in parent.config._argfspaths:
-            if path == arg or arg.relto(path):
-                break
-        else:
-            return 
-    Directory = parent.config._getcollectclass('Directory', path) 
-    return Directory(path, parent=parent)
-
-def pytest_report_iteminfo(item):
-    return item.reportinfo()
-
-def pytest_addoption(parser):
-    group = parser.getgroup("general", "running and selection options")
-    group._addoption('-x', '--exitfirst', action="store_true", default=False,
-               dest="exitfirst", 
-               help="exit instantly on first error or failed test."),
-    group._addoption('--maxfail', metavar="num",
-               action="store", type="int", dest="maxfail", default=0,
-               help="exit after first num failures or errors.")
-    group._addoption('-k',
-        action="store", dest="keyword", default='',
-        help="only run test items matching the given "
-             "space separated keywords.  precede a keyword with '-' to negate. "
-             "Terminate the expression with ':' to treat a match as a signal "
-             "to run all subsequent tests. ")
-
-    group = parser.getgroup("collect", "collection")
-    group.addoption('--collectonly',
-        action="store_true", dest="collectonly",
-        help="only collect tests, don't execute them."),
-    group.addoption("--ignore", action="append", metavar="path", 
-        help="ignore path during collection (multi-allowed).")
-    group.addoption('--confcutdir', dest="confcutdir", default=None, 
-        metavar="dir",
-        help="only load conftest.py's relative to specified dir.")
-
-    group = parser.getgroup("debugconfig", 
-        "test process debugging and configuration")
-    group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir",
-               help="base temporary directory for this test run.")
-
-def pytest_configure(config):
-    setsession(config)
-    # compat
-    if config.getvalue("exitfirst"):
-        config.option.maxfail = 1
-
-def setsession(config):
-    val = config.getvalue
-    if val("collectonly"):
-        from py._test.session import Session
-        config.setsessionclass(Session)
-      
-# pycollect related hooks and code, should move to pytest_pycollect.py
- 
-def pytest_pycollect_makeitem(__multicall__, collector, name, obj):
-    res = __multicall__.execute()
-    if res is not None:
-        return res
-    if collector._istestclasscandidate(name, obj):
-        res = collector._deprecated_join(name)
-        if res is not None:
-            return res 
-        return collector.Class(name, parent=collector)
-    elif collector.funcnamefilter(name) and hasattr(obj, '__call__'):
-        res = collector._deprecated_join(name)
-        if res is not None:
-            return res 
-        if is_generator(obj):
-            # XXX deprecation warning 
-            return collector.Generator(name, parent=collector)
-        else:
-            return collector._genfunctions(name, obj) 
-
-def is_generator(func):
-    try:
-        return py.code.getrawcode(func).co_flags & 32 # generator function 
-    except AttributeError: # builtin functions have no bytecode
-        # assume them to not be generators
-        return False 

diff --git a/pypy/rpython/lltypesystem/opimpl.py b/pypy/rpython/lltypesystem/opimpl.py
--- a/pypy/rpython/lltypesystem/opimpl.py
+++ b/pypy/rpython/lltypesystem/opimpl.py
@@ -227,6 +227,14 @@
     assert isinstance(y, int)
     return x | y
 
+def op_int_xor(x, y):
+    # used in computing hashes
+    if isinstance(x, AddressAsInt): x = llmemory.cast_adr_to_int(x.adr)
+    if isinstance(y, AddressAsInt): y = llmemory.cast_adr_to_int(y.adr)
+    assert isinstance(x, int)
+    assert isinstance(y, int)
+    return x ^ y
+
 def op_int_mul(x, y):
     assert isinstance(x, (int, llmemory.AddressOffset))
     assert isinstance(y, (int, llmemory.AddressOffset))

diff --git a/py/_path/gateway/remotepath.py b/py/_path/gateway/remotepath.py
deleted file mode 100644
--- a/py/_path/gateway/remotepath.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import py, itertools
-from py._path import common
-
-COUNTER = itertools.count()
-
-class RemotePath(common.PathBase):
-    sep = '/'
-
-    def __init__(self, channel, id, basename=None):
-        self._channel = channel
-        self._id = id
-        self._basename = basename
-        self._specs = {}
-
-    def __del__(self):
-        self._channel.send(('DEL', self._id))
-
-    def __repr__(self):
-        return 'RemotePath(%s)' % self.basename
-
-    def listdir(self, *args):
-        self._channel.send(('LIST', self._id) + args)
-        return [RemotePath(self._channel, id, basename)
-                for (id, basename) in self._channel.receive()]
-
-    def dirpath(self):
-        id = ~COUNTER.next()
-        self._channel.send(('DIRPATH', self._id, id))
-        return RemotePath(self._channel, id)
-
-    def join(self, *args):
-        id = ~COUNTER.next()
-        self._channel.send(('JOIN', self._id, id) + args)
-        return RemotePath(self._channel, id)
-
-    def _getbyspec(self, spec):
-        parts = spec.split(',')
-        ask = [x for x in parts  if x not in self._specs]
-        if ask:
-            self._channel.send(('GET', self._id, ",".join(ask)))
-            for part, value in zip(ask, self._channel.receive()):
-                self._specs[part] = value
-        return [self._specs[x] for x in parts]
-
-    def read(self):
-        self._channel.send(('READ', self._id))
-        return self._channel.receive()

diff --git a/pypy/doc/config/objspace.usemodules.parser.txt b/pypy/doc/config/objspace.usemodules.parser.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.parser.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Use the 'parser' module. 
-This is PyPy implementation of the standard library 'parser' module (e.g. if
-this option is enabled and you say ``import parser`` you get this module).
-It is enabled by default.

diff --git a/py/bin/win32/py.test.cmd b/py/bin/win32/py.test.cmd
deleted file mode 100644
--- a/py/bin/win32/py.test.cmd
+++ /dev/null
@@ -1,2 +0,0 @@
- at echo off
-python "%~dp0\..\py.test" %*
\ No newline at end of file

diff --git a/pypy/doc/cli-backend.txt b/pypy/doc/cli-backend.txt
deleted file mode 100644
--- a/pypy/doc/cli-backend.txt
+++ /dev/null
@@ -1,455 +0,0 @@
-===============
-The CLI backend
-===============
-
-The goal of GenCLI is to compile RPython programs to the CLI virtual
-machine.
-
-
-Target environment and language
-===============================
-
-The target of GenCLI is the Common Language Infrastructure environment
-as defined by the `Standard Ecma 335`_.
-
-While in an ideal world we might suppose GenCLI to run fine with
-every implementation conforming to that standard, we know the world we
-live in is far from ideal, so extra efforts can be needed to maintain
-compatibility with more than one implementation.
-
-At the moment of writing the two most popular implementations of the
-standard are supported: Microsoft Common Language Runtime (CLR) and
-Mono.
-
-Then we have to choose how to generate the real executables. There are
-two main alternatives: generating source files in some high level
-language (such as C#) or generating assembly level code in
-Intermediate Language (IL).
-
-The IL approach is much faster during the code generation
-phase, because it doesn't need to call a compiler. By contrast the
-high level approach has two main advantages:
-
-  - the code generation part could be easier because the target
-    language supports high level control structures such as
-    structured loops;
-  
-  - the generated executables take advantage of compiler's
-    optimizations.
-
-In reality the first point is not an advantage in the PyPy context,
-because the `flow graph`_ we start from is quite low level and Python
-loops are already expressed in terms of branches (i.e., gotos).
-
-About the compiler optimizations we must remember that the flow graph
-we receive from earlier stages is already optimized: PyPy implements
-a number of optimizations such a constant propagation and
-dead code removal, so it's not obvious if the compiler could
-do more.
-
-Moreover by emitting IL instruction we are not constrained to rely on
-compiler choices but can directly choose how to map CLI opcodes: since
-the backend often know more than the compiler about the context, we
-might expect to produce more efficient code by selecting the most
-appropriate instruction; e.g., we can check for arithmetic overflow
-only when strictly necessary.
-
-The last but not least reason for choosing the low level approach is
-flexibility in how to get an executable starting from the IL code we
-generate:
-
-  - write IL code to a file, then call the ilasm assembler;
-  
-  - directly generate code on the fly by accessing the facilities
-    exposed by the System.Reflection.Emit API.
-
-
-Handling platform differences
-=============================
-
-Since our goal is to support both Microsoft CLR we have to handle the
-differences between the twos; in particular the main differences are
-in the name of the helper tools we need to call:
-
-=============== ======== ======
-Tool            CLR      Mono
-=============== ======== ======
-IL assembler    ilasm    ilasm2
-C# compiler     csc      gmcs
-Runtime         ...      mono
-=============== ======== ======
-
-The code that handles these differences is located in the sdk.py
-module: it defines an abstract class which exposes some methods
-returning the name of the helpers and one subclass for each of the two
-supported platforms.
-
-Since Microsoft ``ilasm`` is not capable of compiling the PyPy
-standard interpreter due to its size, on Windows machines we also look
-for an existing Mono installation: if present, we use CLR for
-everything except the assembling phase, for which we use Mono's
-``ilasm2``.
-
-
-Targeting the CLI Virtual Machine
-=================================
-
-In order to write a CLI backend we have to take a number of decisions.
-First, we have to choose the typesystem to use: given that CLI
-natively supports primitives like classes and instances,
-ootypesystem is the most natural choice.
-
-Once the typesystem has been chosen there is a number of steps we have
-to do for completing the backend:
-
-  - map ootypesystem's types to CLI Common Type System's
-    types;
-  
-  - map ootypesystem's low level operation to CLI instructions;
-  
-  - map Python exceptions to CLI exceptions;
-  
-  - write a code generator that translates a flow graph
-    into a list of CLI instructions;
-  
-  - write a class generator that translates ootypesystem
-    classes into CLI classes.
-
-
-Mapping primitive types
------------------------
-
-The `rtyper`_ give us a flow graph annotated with types belonging to
-ootypesystem: in order to produce CLI code we need to translate these
-types into their Common Type System equivalents.
-
-For numeric types the conversion is straightforward, since
-there is a one-to-one mapping between the two typesystems, so that
-e.g. Float maps to float64.
-
-For character types the choice is more difficult: RPython has two
-distinct types for plain ASCII and Unicode characters (named UniChar),
-while .NET only supports Unicode with the char type. There are at
-least two ways to map plain Char to CTS:
-
-  - map UniChar to char, thus maintaining the original distinction
-    between the two types: this has the advantage of being a
-    one-to-one translation, but has the disadvantage that RPython
-    strings will not be recognized as .NET strings, since they only
-    would be sequences of bytes;
-  
-  - map both char, so that Python strings will be treated as strings
-    also by .NET: in this case there could be problems with existing
-    Python modules that use strings as sequences of byte, such as the
-    built-in struct module, so we need to pay special attention.
-
-We think that mapping Python strings to .NET strings is
-fundamental, so we chose the second option.
-
-Mapping built-in types
-----------------------
-
-As we saw in section ootypesystem defines a set of types that take
-advantage of built-in types offered by the platform.
-
-For the sake of simplicity we decided to write wrappers
-around .NET classes in order to match the signatures required by
-pypylib.dll:
-
-=================== ===========================================
-ootype              CLI
-=================== ===========================================
-String              System.String
-StringBuilder       System.Text.StringBuilder
-List                System.Collections.Generic.List<T>
-Dict                System.Collections.Generic.Dictionary<K, V>
-CustomDict          pypy.runtime.Dict
-DictItemsIterator   pypy.runtime.DictItemsIterator
-=================== ===========================================
-
-Wrappers exploit inheritance for wrapping the original classes, so,
-for example, pypy.runtime.List<T> is a subclass of
-System.Collections.Generic.List<T> that provides methods whose names
-match those found in the _GENERIC_METHODS of ootype.List
-
-The only exception to this rule is the String class, which is not
-wrapped since in .NET we can not subclass System.String.  Instead, we
-provide a bunch of static methods in pypylib.dll that implement the
-methods declared by ootype.String._GENERIC_METHODS, then we call them
-by explicitly passing the string object in the argument list.
-
-
-Mapping instructions
---------------------
-
-PyPy's low level operations are expressed in Static Single Information
-(SSI) form, such as this::
-
-    v2 = int_add(v0, v1)
-
-By contrast the CLI virtual machine is stack based, which means the
-each operation pops its arguments from the top of the stacks and
-pushes its result there. The most straightforward way to translate SSI
-operations into stack based operations is to explicitly load the
-arguments and store the result into the appropriate places::
-
-    LOAD v0
-    LOAD v1
-    int_add
-    STORE v2
-
-The code produced works correctly but has some inefficiency issue that
-can be addressed during the optimization phase.
-
-The CLI Virtual Machine is fairly expressive, so the conversion
-between PyPy's low level operations and CLI instruction is relatively
-simple: many operations maps directly to the correspondent
-instruction, e.g int_add and sub.
-
-By contrast some instructions do not have a direct correspondent and
-have to be rendered as a sequence of CLI instructions: this is the
-case of the "less-equal" and "greater-equal" family of instructions,
-that are rendered as "greater" or "less" followed by a boolean "not",
-respectively.
-
-Finally, there are some instructions that cannot be rendered directly
-without increasing the complexity of the code generator, such as
-int_abs (which returns the absolute value of its argument).  These
-operations are translated by calling some helper function written in
-C#.
-
-The code that implements the mapping is in the modules opcodes.py.
-
-Mapping exceptions
-------------------
-
-Both RPython and CLI have its own set of exception classes: some of
-these are pretty similar; e.g., we have OverflowError,
-ZeroDivisionError and IndexError on the first side and
-OverflowException, DivideByZeroException and IndexOutOfRangeException
-on the other side.
-
-The first attempt was to map RPython classes to their corresponding
-CLI ones: this worked for simple cases, but it would have triggered
-subtle bugs in more complex ones, because the two exception
-hierarchies don't completely overlap.
-
-At the moment we've chosen to build an RPython exception hierarchy
-completely independent from the CLI one, but this means that we can't
-rely on exceptions raised by built-in operations.  The currently
-implemented solution is to do an exception translation on-the-fly.
-
-As an example consider the RPython int_add_ovf operation, that sums
-two integers and raises an OverflowError exception in case of
-overflow. For implementing it we can use the built-in add.ovf CLI
-instruction that raises System.OverflowException when the result
-overflows, catch that exception and throw a new one::
-
-    .try 
-    { 
-        ldarg 'x_0'
-        ldarg 'y_0'
-        add.ovf 
-        stloc 'v1'
-        leave __check_block_2 
-    } 
-    catch [mscorlib]System.OverflowException 
-    { 
-        newobj instance void class OverflowError::.ctor() 
-        throw 
-    } 
-
-
-Translating flow graphs
------------------------
-
-As we saw previously in PyPy function and method bodies are
-represented by flow graphs that we need to translate CLI IL code. Flow
-graphs are expressed in a format that is very suitable for being
-translated to low level code, so that phase is quite straightforward,
-though the code is a bit involved because we need to take care of three
-different types of blocks.
-
-The code doing this work is located in the Function.render
-method in the file function.py.
-
-First of all it searches for variable names and types used by
-each block; once they are collected it emits a .local IL
-statement used for indicating the virtual machine the number and type
-of local variables used.
-
-Then it sequentially renders all blocks in the graph, starting from the
-start block; special care is taken for the return block which is
-always rendered at last to meet CLI requirements.
-
-Each block starts with an unique label that is used for jumping
-across, followed by the low level instructions the block is composed
-of; finally there is some code that jumps to the appropriate next
-block.
-
-Conditional and unconditional jumps are rendered with their
-corresponding IL instructions: brtrue, brfalse.
-
-Blocks that needs to catch exceptions use the native facilities
-offered by the CLI virtual machine: the entire block is surrounded by
-a .try statement followed by as many catch as needed: each catching
-sub-block then branches to the appropriate block::
-
-
-  # RPython
-  try:
-      # block0
-      ...
-  except ValueError:
-      # block1
-      ...
-  except TypeError:
-      # block2
-      ...
-
-  // IL
-  block0: 
-    .try {
-        ...
-        leave block3
-     }
-     catch ValueError {
-        ...
-        leave block1
-      }
-      catch TypeError {
-        ...
-        leave block2
-      }
-  block1:
-      ...
-      br block3
-  block2:
-      ...
-      br block3
-  block3:
-      ...
-
-There is also an experimental feature that makes GenCLI to use its own
-exception handling mechanism instead of relying on the .NET
-one. Surprisingly enough, benchmarks are about 40% faster with our own
-exception handling machinery.
-
-
-Translating classes
--------------------
-
-As we saw previously, the semantic of ootypesystem classes
-is very similar to the .NET one, so the translation is mostly
-straightforward.
-
-The related code is located in the module class\_.py.  Rendered classes
-are composed of four parts:
-
-  - fields;
-  - user defined methods;
-  - default constructor;
-  - the ToString method, mainly for testing purposes
-
-Since ootype implicitly assumes all method calls to be late bound, as
-an optimization before rendering the classes we search for methods
-that are not overridden in subclasses, and declare as "virtual" only
-the one that needs to.
-
-The constructor does nothing more than calling the base class
-constructor and initializing class fields to their default value.
-
-Inheritance is straightforward too, as it is natively supported by
-CLI. The only noticeable thing is that we map ootypesystem's ROOT
-class to the CLI equivalent System.Object.
-
-The Runtime Environment
------------------------
-
-The runtime environment is a collection of helper classes and
-functions used and referenced by many of the GenCLI submodules. It is
-written in C#, compiled to a DLL (Dynamic Link Library), then linked
-to generated code at compile-time.
-
-The DLL is called pypylib and is composed of three parts:
-
-  - a set of helper functions used to implements complex RPython
-    low-level instructions such as runtimenew and ooparse_int;
-
-  - a set of helper classes wrapping built-in types
-
-  - a set of helpers used by the test framework
-
-
-The first two parts are contained in the pypy.runtime namespace, while
-the third is in the pypy.test one.
-
-
-Testing GenCLI
-==============
-
-As the rest of PyPy, GenCLI is a test-driven project: there is at
-least one unit test for almost each single feature of the
-backend. This development methodology allowed us to early discover
-many subtle bugs and to do some big refactoring of the code with the
-confidence not to break anything.
-
-The core of the testing framework is in the module
-pypy.translator.cli.test.runtest; one of the most important function
-of this module is compile_function(): it takes a Python function,
-compiles it to CLI and returns a Python object that runs the just
-created executable when called.
-
-This way we can test GenCLI generated code just as if it were a simple
-Python function; we can also directly run the generated executable,
-whose default name is main.exe, from a shell: the function parameters
-are passed as command line arguments, and the return value is printed
-on the standard output::
-
-    # Python source: foo.py
-    from pypy.translator.cli.test.runtest import compile_function
-
-    def foo(x, y):
-        return x+y, x*y
-
-    f = compile_function(foo, [int, int])
-    assert f(3, 4) == (7, 12)
-
-
-    # shell
-    $ mono main.exe 3 4
-    (7, 12)
-
-GenCLI supports only few RPython types as parameters: int, r_uint,
-r_longlong, r_ulonglong, bool, float and one-length strings (i.e.,
-chars). By contrast, most types are fine for being returned: these
-include all primitive types, list, tuples and instances.
-
-Installing Python for .NET on Linux
-===================================
-
-With the CLI backend, you can access .NET libraries from RPython;
-programs using .NET libraries will always run when translated, but you
-might also want to test them on top of CPython.
-
-To do so, you can install `Python for .NET`_. Unfortunately, it does
-not work out of the box under Linux.
-
-To make it working, download and unpack the source package of Python
-for .NET; the only version tested with PyPy is the 1.0-rc2, but it
-might work also with others. Then, you need to create a file named
-Python.Runtime.dll.config at the root of the unpacked archive; put the
-following lines inside the file (assuming you are using Python 2.4)::
-
-  <configuration>
-    <dllmap dll="python24" target="libpython2.4.so.1.0" os="!windows"/>
-  </configuration>
-
-The installation should be complete now. To run Python for .NET,
-simply type ``mono python.exe``.
-
-
-.. _`Standard Ecma 335`: http://www.ecma-international.org/publications/standards/Ecma-335.htm
-.. _`flow graph`: translation.html#the-flow-model
-.. _`rtyper`: rtyper.html
-.. _`Python for .NET`: http://pythonnet.sourceforge.net/

diff --git a/pypy/doc/config/translation.backendopt.none.txt b/pypy/doc/config/translation.backendopt.none.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.none.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Do not run any backend optimizations.

diff --git a/pypy/doc/config/objspace.usemodules.bz2.txt b/pypy/doc/config/objspace.usemodules.bz2.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.bz2.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'bz2' module. 
-This module is expected to be working and is included by default.

diff --git a/py/_plugin/pytest__pytest.py b/py/_plugin/pytest__pytest.py
deleted file mode 100644
--- a/py/_plugin/pytest__pytest.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import py
-
-from py._test.pluginmanager import HookRelay
-
-def pytest_funcarg___pytest(request):
-    return PytestArg(request)
-
-class PytestArg:
-    def __init__(self, request):
-        self.request = request 
-
-    def gethookrecorder(self, hook):
-        hookrecorder = HookRecorder(hook._registry)
-        hookrecorder.start_recording(hook._hookspecs)
-        self.request.addfinalizer(hookrecorder.finish_recording)
-        return hookrecorder 
-
-class ParsedCall:
-    def __init__(self, name, locals):
-        assert '_name' not in locals 
-        self.__dict__.update(locals)
-        self.__dict__.pop('self')
-        self._name = name 
-
-    def __repr__(self):
-        d = self.__dict__.copy()
-        del d['_name']
-        return "<ParsedCall %r(**%r)>" %(self._name, d)
-
-class HookRecorder:
-    def __init__(self, registry):
-        self._registry = registry
-        self.calls = []
-        self._recorders = {}
-        
-    def start_recording(self, hookspecs):
-        if not isinstance(hookspecs, (list, tuple)):
-            hookspecs = [hookspecs]
-        for hookspec in hookspecs:
-            assert hookspec not in self._recorders 
-            class RecordCalls: 
-                _recorder = self 
-            for name, method in vars(hookspec).items():
-                if name[0] != "_":
-                    setattr(RecordCalls, name, self._makecallparser(method))
-            recorder = RecordCalls()
-            self._recorders[hookspec] = recorder
-            self._registry.register(recorder)
-        self.hook = HookRelay(hookspecs, registry=self._registry, 
-            prefix="pytest_")
-
-    def finish_recording(self):
-        for recorder in self._recorders.values():
-            self._registry.unregister(recorder)
-        self._recorders.clear()
-
-    def _makecallparser(self, method):
-        name = method.__name__
-        args, varargs, varkw, default = py.std.inspect.getargspec(method)
-        if not args or args[0] != "self":
-            args.insert(0, 'self') 
-        fspec = py.std.inspect.formatargspec(args, varargs, varkw, default)
-        # we use exec because we want to have early type
-        # errors on wrong input arguments, using
-        # *args/**kwargs delays this and gives errors
-        # elsewhere
-        exec (py.code.compile("""
-            def %(name)s%(fspec)s: 
-                        self._recorder.calls.append(
-                            ParsedCall(%(name)r, locals()))
-        """ % locals()))
-        return locals()[name]
-
-    def getcalls(self, names):
-        if isinstance(names, str):
-            names = names.split()
-        for name in names:
-            for cls in self._recorders:
-                if name in vars(cls):
-                    break
-            else:
-                raise ValueError("callname %r not found in %r" %(
-                name, self._recorders.keys()))
-        l = []
-        for call in self.calls:
-            if call._name in names:
-                l.append(call)
-        return l
-
-    def popcall(self, name):
-        for i, call in enumerate(self.calls):
-            if call._name == name:
-                del self.calls[i]
-                return call 
-        raise ValueError("could not find call %r" %(name, ))
-
-    def getcall(self, name):
-        l = self.getcalls(name)
-        assert len(l) == 1, (name, l)
-        return l[0]
-

diff --git a/pypy/doc/config/objspace.usemodules.txt b/pypy/doc/config/objspace.usemodules.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-..  intentionally empty

diff --git a/pypy/doc/config/objspace.usemodules.clr.txt b/pypy/doc/config/objspace.usemodules.clr.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.clr.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Use the 'clr' module. 

diff --git a/pypy/doc/config/objspace.usemodules._warnings.txt b/pypy/doc/config/objspace.usemodules._warnings.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._warnings.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Use the '_warning' module. This module is expected to be working and is included by default.

diff --git a/py/bin/win32/py.countloc.cmd b/py/bin/win32/py.countloc.cmd
deleted file mode 100644
--- a/py/bin/win32/py.countloc.cmd
+++ /dev/null
@@ -1,2 +0,0 @@
- at echo off
-python "%~dp0\..\py.countloc" %*
\ No newline at end of file

diff --git a/pypy/doc/config/objspace.usemodules._demo.txt b/pypy/doc/config/objspace.usemodules._demo.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._demo.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Use the '_demo' module. 
-
-This is the demo module for mixed modules. Not enabled by default.

diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withcelldict.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Enable cell-dicts. This optimization is not helpful without the JIT. In the
-presence of the JIT, it greatly helps looking up globals.

diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Internal option. Switch to a different weight heuristic for inlining.
-This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`).
-
-.. internal

diff --git a/pypy/doc/config/objspace.usemodules._pickle_support.txt b/pypy/doc/config/objspace.usemodules._pickle_support.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._pickle_support.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Use the '_pickle_support' module. 
-Internal helpers for pickling runtime builtin types (frames, cells, etc)
-for `stackless`_ tasklet pickling support.
-.. _`stackless`: ../stackless.html
-
-.. internal

diff --git a/pypy/doc/clr-module.txt b/pypy/doc/clr-module.txt
deleted file mode 100644
--- a/pypy/doc/clr-module.txt
+++ /dev/null
@@ -1,143 +0,0 @@
-===============================
-The ``clr`` module for PyPy.NET
-===============================
-
-PyPy.NET give you access to the surrounding .NET environment via the
-``clr`` module. This module is still experimental: some features are
-still missing and its interface might change in next versions, but
-it's still useful to experiment a bit with PyPy.NET.
-
-PyPy.NET provides an import hook that lets you to import .NET namespaces
-seamlessly as they were normal Python modules.  Then, 
-
-PyPY.NET native classes try to behave as much as possible in the
-"expected" way both for the developers used to .NET and for the ones
-used to Python.
-
-In particular, the following features are mapped one to one because
-they exist in both worlds:
-
-  - .NET constructors are mapped to the Python __init__ method;
-
-  - .NET instance methods are mapped to Python methods;
-
-  - .NET static methods are mapped to Python static methods (belonging
-    to the class);
-
-  - .NET properties are mapped to property-like Python objects (very
-    similar to the Python ``property`` built-in);
-
-  - .NET indexers are mapped to Python __getitem__ and __setitem__;
-
-  - .NET enumerators are mapped to Python iterators.
-
-Moreover, all the usual Python features such as bound and unbound
-methods are available as well.
-
-Example of usage
-================
-
-Here is an example of interactive session using the ``clr`` module::
-
-    >>>> from System.Collections import ArrayList
-    >>>> obj = ArrayList()
-    >>>> obj.Add(1)
-    0
-    >>>> obj.Add(2)
-    1
-    >>>> obj.Add("foo")
-    2
-    >>>> print obj[0], obj[1], obj[2]
-    1 2 foo
-    >>>> print obj.Count
-    3
-
-Conversion of parameters
-========================
-
-When calling a .NET method Python objects are converted to .NET
-objects.  Lots of effort have been taken to make the conversion as
-much transparent as possible; in particular, all the primitive types
-such as int, float and string are converted to the corresponding .NET
-types (e.g., ``System.Int32``, ``System.Float64`` and
-``System.String``).
-
-Python objects without a corresponding .NET types (e.g., instances of
-user classes) are passed as "black boxes", for example to be stored in
-some sort of collection.
-
-The opposite .NET to Python conversions happens for the values returned
-by the methods. Again, primitive types are converted in a
-straightforward way; non-primitive types are wrapped in a Python object, 
-so that they can be treated as usual.
-
-Overload resolution
-===================
-
-When calling an overloaded method, PyPy.NET tries to find the best
-overload for the given arguments; for example, consider the
-``System.Math.Abs`` method::
-
-
-    >>>> from System import Math
-    >>>> Math.Abs(-42)
-    42
-    >>>> Math.Abs(-42.0)
-    42.0
-
-``System.Math.Abs`` has got overloadings both for integers and floats:
-in the first case we call the method ``System.Math.Abs(int32)``, while
-in the second one we call the method ``System.Math.Abs(float64)``.
-
-If the system can't find a best overload for the given parameters, a
-TypeError exception is raised.
-
-
-Generic classes
-================
-
-Generic classes are fully supported.  To instantiate a generic class, you need
-to use the ``[]`` notation::
-
-    >>>> from System.Collections.Generic import List
-    >>>> mylist = List[int]()
-    >>>> mylist.Add(42)
-    >>>> mylist.Add(43)
-    >>>> mylist.Add("foo")
-    Traceback (most recent call last):
-      File "<console>", line 1, in <interactive>
-    TypeError: No overloads for Add could match
-    >>>> mylist[0]
-    42
-    >>>> for item in mylist: print item
-    42
-    43
-
-
-External assemblies and Windows Forms
-=====================================
-
-By default, you can only import .NET namespaces that belongs to already loaded
-assemblies.  To load additional .NET assemblies, you can use
-``clr.AddReferenceByPartialName``.  The following example loads
-``System.Windows.Forms`` and ``System.Drawing`` to display a simple Windows
-Form displaying the usual "Hello World" message::
-
-    >>>> import clr
-    >>>> clr.AddReferenceByPartialName("System.Windows.Forms")
-    >>>> clr.AddReferenceByPartialName("System.Drawing")
-    >>>> from System.Windows.Forms import Application, Form, Label
-    >>>> from System.Drawing import Point
-    >>>>
-    >>>> frm = Form()
-    >>>> frm.Text = "The first pypy-cli Windows Forms app ever"
-    >>>> lbl = Label()
-    >>>> lbl.Text = "Hello World!"
-    >>>> lbl.AutoSize = True
-    >>>> lbl.Location = Point(100, 100)
-    >>>> frm.Controls.Add(lbl)
-    >>>> Application.Run(frm)
-
-Unfortunately at the moment you can't do much more than this with Windows
-Forms, because we still miss support for delegates and so it's not possible
-to handle events.

diff --git a/pypy/doc/config/objspace.allworkingmodules.txt b/pypy/doc/config/objspace.allworkingmodules.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.allworkingmodules.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-This option enables the usage of all modules that are known to be working well
-and that translate without problems.
-
-Note that this option defaults to True (except when running
-``py.py`` because it takes a long time to start).  To force it
-to False, use ``--no-allworkingmodules``.

diff --git a/pypy/translator/c/funcgen.py b/pypy/translator/c/funcgen.py
--- a/pypy/translator/c/funcgen.py
+++ b/pypy/translator/c/funcgen.py
@@ -299,7 +299,6 @@
 
     def gen_link(self, link):
         "Generate the code to jump across the given Link."
-        is_alive = {}
         assignments = []
         for a1, a2 in zip(link.args, link.target.inputargs):
             a2type, a2typename = self.illtypes[a2]
@@ -644,9 +643,17 @@
         return '%s = %s;' % (self.expr(op.result), items)
 
     def OP_DIRECT_PTRADD(self, op):
-        return '%s = %s + %s;' % (self.expr(op.result),
-                                  self.expr(op.args[0]),
-                                  self.expr(op.args[1]))
+        ARRAY = self.lltypemap(op.args[0]).TO
+        if ARRAY._hints.get("render_as_void"):
+            return '%s = (char *)%s + %s;' % (
+                self.expr(op.result), 
+                self.expr(op.args[0]),
+                self.expr(op.args[1]))
+        else:
+            return '%s = %s + %s;' % (
+                self.expr(op.result),
+                self.expr(op.args[0]),
+                self.expr(op.args[1]))
 
     def OP_CAST_POINTER(self, op):
         TYPE = self.lltypemap(op.result)
@@ -819,7 +826,6 @@
         from pypy.rpython.lltypesystem.rstr import STR
         msg = op.args[0]
         assert msg.concretetype == Ptr(STR)
-        argv = []
         if isinstance(msg, Constant):
             msg = c_string_constant(''.join(msg.value.chars))
         else:

diff --git a/pypy/doc/config/translation.noprofopt.txt b/pypy/doc/config/translation.noprofopt.txt
deleted file mode 100644

diff --git a/py/bin/win32/py.which.cmd b/py/bin/win32/py.which.cmd
deleted file mode 100644
--- a/py/bin/win32/py.which.cmd
+++ /dev/null
@@ -1,2 +0,0 @@
- at echo off
-python "%~dp0\..\py.which" %*
\ No newline at end of file

diff --git a/pypy/doc/config/objspace.txt b/pypy/doc/config/objspace.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-..  intentionally empty

diff --git a/pypy/doc/config/objspace.usemodules.array.txt b/pypy/doc/config/objspace.usemodules.array.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.array.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Use interpreter-level version of array module (on by default).

diff --git a/pypy/doc/config/translation.cli.exception_transformer.txt b/pypy/doc/config/translation.cli.exception_transformer.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.cli.exception_transformer.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Use the exception transformer instead of the native .NET exceptions to
-implement RPython exceptions. Enable this option only if you know what
-you are doing.

diff --git a/py/_plugin/pytest_tmpdir.py b/py/_plugin/pytest_tmpdir.py
deleted file mode 100644
--- a/py/_plugin/pytest_tmpdir.py
+++ /dev/null
@@ -1,22 +0,0 @@
-"""provide temporary directories to test functions. 
-
-usage example::
-
-    def test_plugin(tmpdir):
-        tmpdir.join("hello").write("hello")
-
-.. _`py.path.local`: ../../path.html
-
-"""
-import py
-
-def pytest_funcarg__tmpdir(request):
-    """return a temporary directory path object
-    unique to each test function invocation,
-    created as a sub directory of the base temporary
-    directory.  The returned object is a `py.path.local`_
-    path object. 
-    """
-    name = request.function.__name__ 
-    x = request.config.mktemp(name, numbered=True)
-    return x.realpath()

diff --git a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt b/pypy/doc/config/translation.builtins_can_raise_exceptions.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Internal option.
-
-.. internal

diff --git a/py/_test/session.py b/py/_test/session.py
deleted file mode 100644
--- a/py/_test/session.py
+++ /dev/null
@@ -1,135 +0,0 @@
-""" basic test session implementation. 
-
-* drives collection of tests 
-* triggers executions of tests   
-* produces events used by reporting 
-"""
-
-import py
-
-# exitcodes for the command line
-EXIT_OK = 0
-EXIT_TESTSFAILED = 1
-EXIT_INTERRUPTED = 2
-EXIT_INTERNALERROR = 3
-EXIT_NOHOSTS = 4
-
-# imports used for genitems()
-Item = py.test.collect.Item
-Collector = py.test.collect.Collector
-
-class Session(object): 
-    nodeid = ""
-    class Interrupted(KeyboardInterrupt):
-        """ signals an interrupted test run. """
-        __module__ = 'builtins' # for py3
-        
-    def __init__(self, config):
-        self.config = config
-        self.pluginmanager = config.pluginmanager # shortcut 
-        self.pluginmanager.register(self)
-        self._testsfailed = 0
-        self._nomatch = False
-        self.shouldstop = False
-
-    def genitems(self, colitems, keywordexpr=None):
-        """ yield Items from iterating over the given colitems. """
-        if colitems:
-            colitems = list(colitems)
-        while colitems: 
-            next = colitems.pop(0)
-            if isinstance(next, (tuple, list)):
-                colitems[:] = list(next) + colitems 
-                continue
-            assert self.pluginmanager is next.config.pluginmanager
-            if isinstance(next, Item):
-                remaining = self.filteritems([next])
-                if remaining:
-                    self.config.hook.pytest_itemstart(item=next)
-                    yield next 
-            else:
-                assert isinstance(next, Collector)
-                self.config.hook.pytest_collectstart(collector=next)
-                rep = self.config.hook.pytest_make_collect_report(collector=next)
-                if rep.passed:
-                    for x in self.genitems(rep.result, keywordexpr):
-                        yield x 
-                self.config.hook.pytest_collectreport(report=rep)
-            if self.shouldstop:
-                raise self.Interrupted(self.shouldstop)
-
-    def filteritems(self, colitems):
-        """ return items to process (some may be deselected)"""
-        keywordexpr = self.config.option.keyword 
-        if not keywordexpr or self._nomatch:
-            return colitems
-        if keywordexpr[-1] == ":": 
-            keywordexpr = keywordexpr[:-1]
-        remaining = []
-        deselected = []
-        for colitem in colitems:
-            if isinstance(colitem, Item):
-                if colitem._skipbykeyword(keywordexpr):
-                    deselected.append(colitem)
-                    continue
-            remaining.append(colitem)
-        if deselected: 
-            self.config.hook.pytest_deselected(items=deselected)
-            if self.config.option.keyword.endswith(":"):
-                self._nomatch = True
-        return remaining 
-
-    def collect(self, colitems): 
-        keyword = self.config.option.keyword
-        for x in self.genitems(colitems, keyword):
-            yield x
-
-    def sessionstarts(self):
-        """ setup any neccessary resources ahead of the test run. """
-        self.config.hook.pytest_sessionstart(session=self)
-        
-    def pytest_runtest_logreport(self, report):
-        if report.failed:
-            self._testsfailed += 1
-            maxfail = self.config.getvalue("maxfail")
-            if maxfail and self._testsfailed >= maxfail:
-                self.shouldstop = "stopping after %d failures" % (
-                    self._testsfailed)
-    pytest_collectreport = pytest_runtest_logreport
-
-    def sessionfinishes(self, exitstatus):
-        """ teardown any resources after a test run. """ 
-        self.config.hook.pytest_sessionfinish(
-            session=self, 
-            exitstatus=exitstatus, 
-        )
-
-    def main(self, colitems):
-        """ main loop for running tests. """
-        self.shouldstop = False 
-        self.sessionstarts()
-        exitstatus = EXIT_OK
-        try:
-            self._mainloop(colitems)
-            if self._testsfailed:
-                exitstatus = EXIT_TESTSFAILED
-            self.sessionfinishes(exitstatus=exitstatus)
-        except KeyboardInterrupt:
-            excinfo = py.code.ExceptionInfo()
-            self.config.hook.pytest_keyboard_interrupt(excinfo=excinfo)
-            exitstatus = EXIT_INTERRUPTED
-        except:
-            excinfo = py.code.ExceptionInfo()
-            self.config.pluginmanager.notify_exception(excinfo)
-            exitstatus = EXIT_INTERNALERROR
-        if exitstatus in (EXIT_INTERNALERROR, EXIT_INTERRUPTED):
-            self.sessionfinishes(exitstatus=exitstatus)
-        return exitstatus
-
-    def _mainloop(self, colitems):
-        for item in self.collect(colitems): 
-            if not self.config.option.collectonly: 
-                item.config.hook.pytest_runtest_protocol(item=item)
-            if self.shouldstop:
-                raise self.Interrupted(self.shouldstop)
-

diff --git a/pypy/config/translationoption.py b/pypy/config/translationoption.py
--- a/pypy/config/translationoption.py
+++ b/pypy/config/translationoption.py
@@ -117,7 +117,6 @@
     ChoiceOption("jit_profiler", "integrate profiler support into the JIT",
                  ["off", "oprofile"],
                  default="off"),
-    BoolOption("jit_ffi", "optimize libffi calls", default=False),
 
     # misc
     BoolOption("verbose", "Print extra information", default=False),

diff --git a/pypy/doc/config/objspace.usemodules._winreg.txt b/pypy/doc/config/objspace.usemodules._winreg.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._winreg.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the built-in '_winreg' module, provides access to the Windows registry.
-This module is expected to be working and is included by default on Windows.

diff --git a/pypy/doc/config/objspace.usemodules._minimal_curses.txt b/pypy/doc/config/objspace.usemodules._minimal_curses.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._minimal_curses.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the '_curses' module.
-This module is just a stub.  It only implements a few functions.

diff --git a/py/bin/py.lookup b/py/bin/py.lookup
deleted file mode 100755
--- a/py/bin/py.lookup
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from _findpy import py
-py.cmdline.pylookup()
\ No newline at end of file

diff --git a/pypy/doc/config/translation.ootype.mangle.txt b/pypy/doc/config/translation.ootype.mangle.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.ootype.mangle.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Mangle the names of user defined attributes of the classes, in order
-to ensure that every name is unique. Default is true, and it should
-not be turned off unless you know what you are doing.

diff --git a/py/_plugin/pytest_junitxml.py b/py/_plugin/pytest_junitxml.py
deleted file mode 100644
--- a/py/_plugin/pytest_junitxml.py
+++ /dev/null
@@ -1,171 +0,0 @@
-"""
-   logging of test results in JUnit-XML format, for use with Hudson 
-   and build integration servers.  Based on initial code from Ross Lawley.
-"""
-
-import py
-import time
-
-def pytest_addoption(parser):
-    group = parser.getgroup("terminal reporting")
-    group.addoption('--junitxml', action="store", dest="xmlpath", 
-           metavar="path", default=None,
-           help="create junit-xml style report file at given path.")
-
-def pytest_configure(config):
-    xmlpath = config.option.xmlpath
-    if xmlpath:
-        config._xml = LogXML(xmlpath)
-        config.pluginmanager.register(config._xml)
-
-def pytest_unconfigure(config):
-    xml = getattr(config, '_xml', None)
-    if xml:
-        del config._xml 
-        config.pluginmanager.unregister(xml)
-
-class LogXML(object):
-    def __init__(self, logfile):
-        self.logfile = logfile
-        self.test_logs = []
-        self.passed = self.skipped = 0
-        self.failed = self.errors = 0
-        self._durations = {}
-  
-    def _opentestcase(self, report):
-        node = report.item 
-        d = {'time': self._durations.pop(report.item, "0")}
-        names = [x.replace(".py", "") for x in node.listnames() if x != "()"]
-        d['classname'] = ".".join(names[:-1])
-        d['name'] = names[-1]
-        attrs = ['%s="%s"' % item for item in sorted(d.items())]
-        self.test_logs.append("\n<testcase %s>" % " ".join(attrs))
-
-    def _closetestcase(self):
-        self.test_logs.append("</testcase>")
-
-    def appendlog(self, fmt, *args):
-        args = tuple([py.xml.escape(arg) for arg in args])
-        self.test_logs.append(fmt % args)
-         
-    def append_pass(self, report):
-        self.passed += 1
-        self._opentestcase(report)
-        self._closetestcase()
-
-    def append_failure(self, report):
-        self._opentestcase(report)
-        #msg = str(report.longrepr.reprtraceback.extraline)
-        if "xfail" in report.keywords:
-            self.appendlog(
-                '<skipped message="xfail-marked test passes unexpectedly"/>')
-            self.skipped += 1
-        else:
-            self.appendlog('<failure message="test failure">%s</failure>', 
-                report.longrepr)
-            self.failed += 1
-        self._closetestcase()
-
-    def _opentestcase_collectfailure(self, report):
-        node = report.collector
-        d = {'time': '???'}
-        names = [x.replace(".py", "") for x in node.listnames() if x != "()"]
-        d['classname'] = ".".join(names[:-1])
-        d['name'] = names[-1]
-        attrs = ['%s="%s"' % item for item in sorted(d.items())]
-        self.test_logs.append("\n<testcase %s>" % " ".join(attrs))
-
-    def append_collect_failure(self, report):
-        self._opentestcase_collectfailure(report)
-        #msg = str(report.longrepr.reprtraceback.extraline)
-        self.appendlog('<failure message="collection failure">%s</failure>', 
-            report.longrepr)
-        self._closetestcase()
-        self.errors += 1
-
-    def append_collect_skipped(self, report):
-        self._opentestcase_collectfailure(report)
-        #msg = str(report.longrepr.reprtraceback.extraline)
-        self.appendlog('<skipped message="collection skipped">%s</skipped>',
-            report.longrepr)
-        self._closetestcase()
-        self.skipped += 1
-
-    def append_error(self, report):
-        self._opentestcase(report)
-        self.appendlog('<error message="test setup failure">%s</error>', 
-            report.longrepr)
-        self._closetestcase()
-        self.errors += 1
-
-    def append_skipped(self, report):
-        self._opentestcase(report)
-        if "xfail" in report.keywords:
-            self.appendlog(
-                '<skipped message="expected test failure">%s</skipped>', 
-                report.keywords['xfail'])
-        else:
-            self.appendlog("<skipped/>")
-        self._closetestcase()
-        self.skipped += 1
-
-    def pytest_runtest_logreport(self, report):
-        if report.passed:
-            self.append_pass(report)
-        elif report.failed:
-            if report.when != "call":
-                self.append_error(report)
-            else:
-                self.append_failure(report)
-        elif report.skipped:
-            self.append_skipped(report)
-        
-    def pytest_runtest_call(self, item, __multicall__):
-        start = time.time()
-        try:
-            return __multicall__.execute()
-        finally:
-            self._durations[item] = time.time() - start
-    
-    def pytest_collectreport(self, report):
-        if not report.passed:
-            if report.failed:
-                self.append_collect_failure(report)
-            else:
-                self.append_collect_skipped(report)
-
-    def pytest_internalerror(self, excrepr):
-        self.errors += 1
-        data = py.xml.escape(excrepr)
-        self.test_logs.append(
-            '\n<testcase classname="pytest" name="internal">'
-            '    <error message="internal error">'
-            '%s</error></testcase>' % data)
-
-    def pytest_sessionstart(self, session):
-        self.suite_start_time = time.time()
-
-    def pytest_sessionfinish(self, session, exitstatus, __multicall__):
-        if py.std.sys.version_info[0] < 3:
-            logfile = py.std.codecs.open(self.logfile, 'w', encoding='utf-8')
-        else:
-            logfile = open(self.logfile, 'w', encoding='utf-8')
-            
-        suite_stop_time = time.time()
-        suite_time_delta = suite_stop_time - self.suite_start_time
-        numtests = self.passed + self.failed
-        logfile.write('<?xml version="1.0" encoding="utf-8"?>')
-        logfile.write('<testsuite ')
-        logfile.write('name="" ')
-        logfile.write('errors="%i" ' % self.errors)
-        logfile.write('failures="%i" ' % self.failed)
-        logfile.write('skips="%i" ' % self.skipped)
-        logfile.write('tests="%i" ' % numtests)
-        logfile.write('time="%.3f"' % suite_time_delta)
-        logfile.write(' >')
-        logfile.writelines(self.test_logs)
-        logfile.write('</testsuite>')
-        logfile.close()
-        tw = session.config.pluginmanager.getplugin("terminalreporter")._tw
-        tw.line()
-        tw.sep("-", "generated xml file: %s" %(self.logfile))

diff --git a/pypy/doc/config/objspace.usemodules.zipimport.txt b/pypy/doc/config/objspace.usemodules.zipimport.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.zipimport.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-This module implements zipimport mechanism described
-in PEP 302. It's supposed to work and translate, so it's included
-by default
\ No newline at end of file

diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.jit_ffi.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Internal option: enable OptFfiCall in the jit optimizations.

diff --git a/pypy/doc/config/objspace.usemodules.cpyext.txt b/pypy/doc/config/objspace.usemodules.cpyext.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.cpyext.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Use (experimental) cpyext module, that tries to load and run CPython extension modules

diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py
--- a/pypy/objspace/std/test/test_mapdict.py
+++ b/pypy/objspace/std/test/test_mapdict.py
@@ -52,6 +52,13 @@
 
     assert aa.get_terminator() is aa.back.back
 
+def test_huge_chain():
+    current = Terminator(space, "cls")
+    for i in range(20000):
+        current = PlainAttribute((str(i), DICT), current)
+    assert current.index(("0", DICT)) == 0
+
+
 def test_search():
     aa = PlainAttribute(("b", DICT), PlainAttribute(("a", DICT), Terminator(None, None)))
     assert aa.search(DICT) is aa
@@ -224,8 +231,8 @@
     obj.setdictvalue(space, "a", 51)
     obj.setdictvalue(space, "b", 61)
     obj.setdictvalue(space, "c", 71)
-    assert obj.getdict() is obj.getdict()
-    assert obj.getdict().length() == 3
+    assert obj.getdict(space) is obj.getdict(space)
+    assert obj.getdict(space).length() == 3
 
 
 def test_materialize_r_dict():
@@ -283,7 +290,7 @@
 def get_impl(self):
     cls = Class()
     w_obj = cls.instantiate(self.fakespace)
-    return w_obj.getdict()
+    return w_obj.getdict(self.fakespace)
 class TestMapDictImplementation(BaseTestRDictImplementation):
     ImplementionClass = MapDictImplementation
     get_impl = get_impl
@@ -294,8 +301,8 @@
 # ___________________________________________________________
 # tests that check the obj interface after the dict has devolved
 
-def devolve_dict(obj):
-    w_d = obj.getdict()
+def devolve_dict(space, obj):
+    w_d = obj.getdict(space)
     w_d._as_rdict()
 
 def test_get_setdictvalue_after_devolve():
@@ -311,7 +318,7 @@
     obj.setdictvalue(space, "b", 6)
     obj.setdictvalue(space, "c", 7)
     obj.setdictvalue(space, "weakref", 42)
-    devolve_dict(obj)
+    devolve_dict(space, obj)
     assert obj.getdictvalue(space, "a") == 5
     assert obj.getdictvalue(space, "b") == 6
     assert obj.getdictvalue(space, "c") == 7
@@ -349,10 +356,10 @@
     obj.setdictvalue(space, "a", 5)
     obj.setdictvalue(space, "b", 6)
     obj.setdictvalue(space, "c", 7)
-    w_d = obj.getdict()
+    w_d = obj.getdict(space)
     obj2 = cls.instantiate()
     obj2.setdictvalue(space, "d", 8)
-    obj.setdict(space, obj2.getdict())
+    obj.setdict(space, obj2.getdict(space))
     assert obj.getdictvalue(space, "a") is None
     assert obj.getdictvalue(space, "b") is None
     assert obj.getdictvalue(space, "c") is None
@@ -387,7 +394,7 @@
         obj.user_setup(space, cls)
         obj.setdictvalue(space, "a", w1)
         if objectcls._nmin1 == 0 and not compressptr:
-            assert rerased.unerase(obj._value0, W_Root) is w1
+            assert unerase_item(obj._value0) is w1
         else:
             assert obj._value0 is w1
         assert obj.getdictvalue(space, "a") is w1
@@ -395,7 +402,7 @@
         assert obj.getdictvalue(space, "c") is None
         obj.setdictvalue(space, "a", w2)
         if objectcls._nmin1 == 0 and not compressptr:
-            assert rerased.unerase(obj._value0, W_Root) is w2
+            assert unerase_item(obj._value0) is w2
         else:
             assert obj._value0 is w2
         assert obj.getdictvalue(space, "a") == w2
@@ -416,7 +423,7 @@
         res = obj.deldictvalue(space, "a")
         assert res
         if objectcls._nmin1 == 0 and not compressptr:
-            assert rerased.unerase(obj._value0, W_Root) is w4
+            assert unerase_item(obj._value0) is w4
         else:
             assert obj._value0 is w4
         assert obj.getdictvalue(space, "a") is None
@@ -885,6 +892,38 @@
         res = self.check(f, 'm')
         assert res == (0, 2, 1)
 
+    def test_dont_keep_class_alive(self):
+        import weakref
+        import gc
+        def f():
+            class C(object):
+                def m(self):
+                    pass
+            r = weakref.ref(C)
+            # Trigger cache.
+            C().m()
+            del C
+            gc.collect(); gc.collect(); gc.collect()
+            assert r() is None
+            return 42
+        f()
+
+    def test_instance_keeps_class_alive(self):
+        import weakref
+        import gc
+        def f():
+            class C(object):
+                def m(self):
+                    return 42
+            r = weakref.ref(C)
+            c = C()
+            del C
+            gc.collect(); gc.collect(); gc.collect()
+            return c.m()
+        val = f()
+        assert val == 42
+        f() 
+
 class AppTestGlobalCaching(AppTestWithMapDict):
     def setup_class(cls):
         cls.space = gettestobjspace(

diff --git a/pypy/doc/config/objspace.std.withmethodcachecounter.txt b/pypy/doc/config/objspace.std.withmethodcachecounter.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withmethodcachecounter.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Testing/debug option for :config:`objspace.std.withmethodcache`.

diff --git a/pypy/doc/config/translation.list_comprehension_operations.txt b/pypy/doc/config/translation.list_comprehension_operations.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.list_comprehension_operations.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Experimental optimization for list comprehensions in RPython.
-

diff --git a/py/_plugin/pytest_capture.py b/py/_plugin/pytest_capture.py
deleted file mode 100644
--- a/py/_plugin/pytest_capture.py
+++ /dev/null
@@ -1,288 +0,0 @@
-"""
-configurable per-test stdout/stderr capturing mechanisms. 
-
-This plugin captures stdout/stderr output for each test separately. 
-In case of test failures this captured output is shown grouped 
-togtther with the test. 
-
-The plugin also provides test function arguments that help to
-assert stdout/stderr output from within your tests, see the 
-`funcarg example`_. 
-
-
-Capturing of input/output streams during tests 
----------------------------------------------------
-
-By default ``sys.stdout`` and ``sys.stderr`` are substituted with
-temporary streams during the execution of tests and setup/teardown code.  
-During the whole testing process it will re-use the same temporary 
-streams allowing to play well with the logging module which easily
-takes ownership on these streams. 
-
-Also, 'sys.stdin' is substituted with a file-like "null" object that 
-does not return any values.  This is to immediately error out
-on tests that wait on reading something from stdin. 
-
-You can influence output capturing mechanisms from the command line::
-
-    py.test -s            # disable all capturing
-    py.test --capture=sys # replace sys.stdout/stderr with in-mem files
-    py.test --capture=fd  # point filedescriptors 1 and 2 to temp file
-
-If you set capturing values in a conftest file like this::
-
-    # conftest.py
-    option_capture = 'fd'
-
-then all tests in that directory will execute with "fd" style capturing. 
-
-sys-level capturing 
-------------------------------------------
-
-Capturing on 'sys' level means that ``sys.stdout`` and ``sys.stderr`` 
-will be replaced with in-memory files (``py.io.TextIO`` to be precise)  
-that capture writes and decode non-unicode strings to a unicode object
-(using a default, usually, UTF-8, encoding). 
-
-FD-level capturing and subprocesses
-------------------------------------------
-
-The ``fd`` based method means that writes going to system level files
-based on the standard file descriptors will be captured, for example 
-writes such as ``os.write(1, 'hello')`` will be captured properly. 
-Capturing on fd-level will include output generated from 
-any subprocesses created during a test. 
-
-.. _`funcarg example`:
-
-Example Usage of the capturing Function arguments
----------------------------------------------------
-
-You can use the `capsys funcarg`_ and `capfd funcarg`_ to 
-capture writes to stdout and stderr streams.  Using the
-funcargs frees your test from having to care about setting/resetting 
-the old streams and also interacts well with py.test's own 
-per-test capturing.  Here is an example test function:
-
-.. sourcecode:: python
-
-    def test_myoutput(capsys):
-        print ("hello")
-        sys.stderr.write("world\\n")
-        out, err = capsys.readouterr()
-        assert out == "hello\\n"
-        assert err == "world\\n"
-        print "next"
-        out, err = capsys.readouterr()
-        assert out == "next\\n" 
-
-The ``readouterr()`` call snapshots the output so far - 
-and capturing will be continued.  After the test 
-function finishes the original streams will 
-be restored.  If you want to capture on 
-the filedescriptor level you can use the ``capfd`` function
-argument which offers the same interface. 
-"""
-
-import py
-import os
-
-def pytest_addoption(parser):
-    group = parser.getgroup("general")
-    group._addoption('--capture', action="store", default=None,
-        metavar="method", type="choice", choices=['fd', 'sys', 'no'],
-        help="per-test capturing method: one of fd (default)|sys|no.")
-    group._addoption('-s', action="store_const", const="no", dest="capture", 
-        help="shortcut for --capture=no.")
-
-def addouterr(rep, outerr):
-    repr = getattr(rep, 'longrepr', None)
-    if not hasattr(repr, 'addsection'):
-        return
-    for secname, content in zip(["out", "err"], outerr):
-        if content:
-            repr.addsection("Captured std%s" % secname, content.rstrip())
-
-def pytest_configure(config):
-    config.pluginmanager.register(CaptureManager(), 'capturemanager')
-
-class NoCapture:
-    def startall(self):
-        pass
-    def resume(self):
-        pass
-    def suspend(self):
-        return "", ""
-
-class CaptureManager:
-    def __init__(self):
-        self._method2capture = {}
-
-    def _maketempfile(self):
-        f = py.std.tempfile.TemporaryFile()
-        newf = py.io.dupfile(f, encoding="UTF-8") 
-        return newf
-
-    def _makestringio(self):
-        return py.io.TextIO() 
-
-    def _getcapture(self, method):
-        if method == "fd": 
-            return py.io.StdCaptureFD(now=False,
-                out=self._maketempfile(), err=self._maketempfile()
-            )
-        elif method == "sys":
-            return py.io.StdCapture(now=False,
-                out=self._makestringio(), err=self._makestringio()
-            )
-        elif method == "no":
-            return NoCapture()
-        else:
-            raise ValueError("unknown capturing method: %r" % method)
-
-    def _getmethod(self, config, fspath):
-        if config.option.capture:
-            method = config.option.capture
-        else:
-            try: 
-                method = config._conftest.rget("option_capture", path=fspath)
-            except KeyError:
-                method = "fd"
-        if method == "fd" and not hasattr(os, 'dup'): # e.g. jython 
-            method = "sys" 
-        return method
-
-    def resumecapture_item(self, item):
-        method = self._getmethod(item.config, item.fspath)
-        if not hasattr(item, 'outerr'):
-            item.outerr = ('', '') # we accumulate outerr on the item
-        return self.resumecapture(method)
-
-    def resumecapture(self, method):
-        if hasattr(self, '_capturing'):
-            raise ValueError("cannot resume, already capturing with %r" % 
-                (self._capturing,))
-        cap = self._method2capture.get(method)
-        self._capturing = method 
-        if cap is None:
-            self._method2capture[method] = cap = self._getcapture(method)
-            cap.startall()
-        else:
-            cap.resume()
-
-    def suspendcapture(self, item=None):
-        self.deactivate_funcargs()
-        if hasattr(self, '_capturing'):
-            method = self._capturing
-            cap = self._method2capture.get(method)
-            if cap is not None:
-                outerr = cap.suspend()
-            del self._capturing
-            if item:
-                outerr = (item.outerr[0] + outerr[0], 
-                          item.outerr[1] + outerr[1])
-            return outerr 
-        return "", ""
-
-    def activate_funcargs(self, pyfuncitem):
-        if not hasattr(pyfuncitem, 'funcargs'):
-            return
-        assert not hasattr(self, '_capturing_funcargs')
-        self._capturing_funcargs = capturing_funcargs = []
-        for name, capfuncarg in pyfuncitem.funcargs.items():
-            if name in ('capsys', 'capfd'):
-                capturing_funcargs.append(capfuncarg)
-                capfuncarg._start()
-
-    def deactivate_funcargs(self):
-        capturing_funcargs = getattr(self, '_capturing_funcargs', None)
-        if capturing_funcargs is not None:
-            while capturing_funcargs:
-                capfuncarg = capturing_funcargs.pop()
-                capfuncarg._finalize()
-            del self._capturing_funcargs
-
-    def pytest_make_collect_report(self, __multicall__, collector):
-        method = self._getmethod(collector.config, collector.fspath)
-        self.resumecapture(method)
-        try:
-            rep = __multicall__.execute()
-        finally:
-            outerr = self.suspendcapture()
-        addouterr(rep, outerr)
-        return rep
-
-    def pytest_runtest_setup(self, item):
-        self.resumecapture_item(item)
-
-    def pytest_runtest_call(self, item):
-        self.resumecapture_item(item)
-        self.activate_funcargs(item)
-
-    def pytest_runtest_teardown(self, item):
-        self.resumecapture_item(item)
-
-    def pytest__teardown_final(self, __multicall__, session):
-        method = self._getmethod(session.config, None)
-        self.resumecapture(method)
-        try:
-            rep = __multicall__.execute()
-        finally:
-            outerr = self.suspendcapture()
-        if rep:
-            addouterr(rep, outerr)
-        return rep
-
-    def pytest_keyboard_interrupt(self, excinfo):
-        if hasattr(self, '_capturing'):
-            self.suspendcapture()
-
-    def pytest_runtest_makereport(self, __multicall__, item, call):
-        self.deactivate_funcargs()
-        rep = __multicall__.execute()
-        outerr = self.suspendcapture(item)
-        if not rep.passed:
-            addouterr(rep, outerr)
-        if not rep.passed or rep.when == "teardown":
-            outerr = ('', '')
-        item.outerr = outerr 
-        return rep
-
-def pytest_funcarg__capsys(request):
-    """captures writes to sys.stdout/sys.stderr and makes 
-    them available successively via a ``capsys.readouterr()`` method 
-    which returns a ``(out, err)`` tuple of captured snapshot strings. 
-    """ 
-    return CaptureFuncarg(request, py.io.StdCapture)
-
-def pytest_funcarg__capfd(request):
-    """captures writes to file descriptors 1 and 2 and makes 
-    snapshotted ``(out, err)`` string tuples available 
-    via the ``capsys.readouterr()`` method.  If the underlying
-    platform does not have ``os.dup`` (e.g. Jython) tests using
-    this funcarg will automatically skip. 
-    """ 
-    if not hasattr(os, 'dup'):
-        py.test.skip("capfd funcarg needs os.dup")
-    return CaptureFuncarg(request, py.io.StdCaptureFD)
-
-
-class CaptureFuncarg:
-    def __init__(self, request, captureclass):
-        self._cclass = captureclass
-        self.capture = self._cclass(now=False)
-        #request.addfinalizer(self._finalize)
-
-    def _start(self):
-        self.capture.startall()
-
-    def _finalize(self):
-        if hasattr(self, 'capture'):
-            self.capture.reset()
-            del self.capture 
-
-    def readouterr(self):
-        return self.capture.readouterr()
-
-    def close(self):
-        self._finalize()

diff --git a/py/_plugin/pytest_doctest.py b/py/_plugin/pytest_doctest.py
deleted file mode 100644
--- a/py/_plugin/pytest_doctest.py
+++ /dev/null
@@ -1,100 +0,0 @@
-"""
-collect and execute doctests from modules and test files. 
-
-Usage
--------------
-
-By default all files matching the ``test*.txt`` pattern will 
-be run through the python standard ``doctest`` module.  Issue::
-
-    py.test --doctest-glob='*.rst'
-
-to change the pattern.  Additionally you can trigger running of
-tests in all python modules (including regular python test modules)::
-
-    py.test --doctest-modules
-
-You can also make these changes permanent in your project by 
-putting them into a conftest.py file like this::
-
-    # content of conftest.py 
-    option_doctestmodules = True
-    option_doctestglob = "*.rst"
-"""
-
-import py
-from py._code.code import TerminalRepr, ReprFileLocation
-import doctest
-
-def pytest_addoption(parser):
-    group = parser.getgroup("collect")
-    group.addoption("--doctest-modules", 
-        action="store_true", default=False, 
-        help="run doctests in all .py modules",
-        dest="doctestmodules")
-    group.addoption("--doctest-glob",
-        action="store", default="test*.txt", metavar="pat",
-        help="doctests file matching pattern, default: test*.txt",
-        dest="doctestglob")
-
-def pytest_collect_file(path, parent):
-    config = parent.config
-    if path.ext == ".py":
-        if config.getvalue("doctestmodules"):
-            return DoctestModule(path, parent)
-    elif path.check(fnmatch=config.getvalue("doctestglob")):
-        return DoctestTextfile(path, parent)
-
-class ReprFailDoctest(TerminalRepr):
-    def __init__(self, reprlocation, lines):
-        self.reprlocation = reprlocation
-        self.lines = lines
-    def toterminal(self, tw):
-        for line in self.lines:
-            tw.line(line)
-        self.reprlocation.toterminal(tw)
-             
-class DoctestItem(py.test.collect.Item):
-    def __init__(self, path, parent):
-        name = self.__class__.__name__ + ":" + path.basename
-        super(DoctestItem, self).__init__(name=name, parent=parent)
-        self.fspath = path 
-
-    def repr_failure(self, excinfo):
-        if excinfo.errisinstance(doctest.DocTestFailure):
-            doctestfailure = excinfo.value
-            example = doctestfailure.example
-            test = doctestfailure.test
-            filename = test.filename 
-            lineno = test.lineno + example.lineno + 1
-            message = excinfo.type.__name__
-            reprlocation = ReprFileLocation(filename, lineno, message)
-            checker = doctest.OutputChecker() 
-            REPORT_UDIFF = doctest.REPORT_UDIFF
-            filelines = py.path.local(filename).readlines(cr=0)
-            i = max(test.lineno, max(0, lineno - 10)) # XXX? 
-            lines = []
-            for line in filelines[i:lineno]:
-                lines.append("%03d %s" % (i+1, line))
-                i += 1
-            lines += checker.output_difference(example, 
-                    doctestfailure.got, REPORT_UDIFF).split("\n")
-            return ReprFailDoctest(reprlocation, lines)
-        elif excinfo.errisinstance(doctest.UnexpectedException):
-            excinfo = py.code.ExceptionInfo(excinfo.value.exc_info)
-            return super(DoctestItem, self).repr_failure(excinfo)
-        else: 
-            return super(DoctestItem, self).repr_failure(excinfo)
-
-class DoctestTextfile(DoctestItem):
-    def runtest(self):
-        if not self._deprecated_testexecution():
-            failed, tot = doctest.testfile(
-                str(self.fspath), module_relative=False, 
-                raise_on_error=True, verbose=0)
-
-class DoctestModule(DoctestItem):
-    def runtest(self):
-        module = self.fspath.pyimport()
-        failed, tot = doctest.testmod(
-            module, raise_on_error=True, verbose=0)

diff --git a/pypy/doc/config/translation.instrumentctl.txt b/pypy/doc/config/translation.instrumentctl.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.instrumentctl.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Internal option.
-
-.. internal

diff --git a/pypy/doc/config/translation.cc.txt b/pypy/doc/config/translation.cc.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.cc.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Specify which C compiler to use.

diff --git a/pypy/doc/config/translation.backendopt.stack_optimization.txt b/pypy/doc/config/translation.backendopt.stack_optimization.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.stack_optimization.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Enable the optimized code generation for stack based machine, if the backend support it

diff --git a/pypy/doc/config/objspace.std.prebuiltintfrom.txt b/pypy/doc/config/objspace.std.prebuiltintfrom.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.prebuiltintfrom.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-see :config:`objspace.std.withprebuiltint`.

diff --git a/pypy/doc/config/objspace.usemodules.operator.txt b/pypy/doc/config/objspace.usemodules.operator.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.operator.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'operator' module. 
-This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/objspace.std.txt b/pypy/doc/config/objspace.std.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-..  intentionally empty

diff --git a/py/_plugin/pytest_skipping.py b/py/_plugin/pytest_skipping.py
deleted file mode 100644
--- a/py/_plugin/pytest_skipping.py
+++ /dev/null
@@ -1,347 +0,0 @@
-"""
-advanced skipping for python test functions, classes or modules.
-
-With this plugin you can mark test functions for conditional skipping 
-or as "xfail", expected-to-fail.  Skipping a test will avoid running it
-while xfail-marked tests will run and result in an inverted outcome:
-a pass becomes a failure and a fail becomes a semi-passing one. 
-
-The need for skipping a test is usually connected to a condition.  
-If a test fails under all conditions then it's probably better
-to mark your test as 'xfail'. 
-
-By passing ``-rxs`` to the terminal reporter you will see extra
-summary information on skips and xfail-run tests at the end of a test run. 
-
-.. _skipif:
-
-Skipping a single function 
--------------------------------------------
-
-Here is an example for marking a test function to be skipped
-when run on a Python3 interpreter::
-
-    @py.test.mark.skipif("sys.version_info >= (3,0)")
-    def test_function():
-        ...
-
-During test function setup the skipif condition is 
-evaluated by calling ``eval(expr, namespace)``.  The namespace
-contains the  ``sys`` and ``os`` modules and the test 
-``config`` object.  The latter allows you to skip based 
-on a test configuration value e.g. like this::
-
-    @py.test.mark.skipif("not config.getvalue('db')")
-    def test_function(...):
-        ...
-
-Create a shortcut for your conditional skip decorator 
-at module level like this::
-
-    win32only = py.test.mark.skipif("sys.platform != 'win32'")
-
-    @win32only
-    def test_function():
-        ...
-
-
-skip groups of test functions 
---------------------------------------
-
-As with all metadata function marking you can do it at
-`whole class- or module level`_.  Here is an example 
-for skipping all methods of a test class based on platform::
-
-    class TestPosixCalls:
-        pytestmark = py.test.mark.skipif("sys.platform == 'win32'")
-    
-        def test_function(self):
-            # will not be setup or run under 'win32' platform
-            #
-
-The ``pytestmark`` decorator will be applied to each test function.
-If your code targets python2.6 or above you can equivalently use 
-the skipif decorator on classes::
-
-    @py.test.mark.skipif("sys.platform == 'win32'")
-    class TestPosixCalls:
-    
-        def test_function(self):
-            # will not be setup or run under 'win32' platform
-            #
-
-It is fine in general to apply multiple "skipif" decorators
-on a single function - this means that if any of the conditions
-apply the function will be skipped. 
-
-.. _`whole class- or module level`: mark.html#scoped-marking
-
-
-mark a test function as **expected to fail**
--------------------------------------------------------
-
-You can use the ``xfail`` marker to indicate that you
-expect the test to fail:: 
-
-    @py.test.mark.xfail
-    def test_function():
-        ...
-
-This test will be run but no traceback will be reported
-when it fails. Instead terminal reporting will list it in the
-"expected to fail" or "unexpectedly passing" sections.
-
-Same as with skipif_ you can also selectively expect a failure
-depending on platform::
-
-    @py.test.mark.xfail("sys.version_info >= (3,0)")
-    def test_function():
-        ...
-
-To not run a test and still regard it as "xfailed"::
-
-    @py.test.mark.xfail(..., run=False)
-
-To specify an explicit reason to be shown with xfailure detail::
-
-    @py.test.mark.xfail(..., reason="my reason")
-
-imperative xfail from within a test or setup function
-------------------------------------------------------
-
-If you cannot declare xfail-conditions at import time
-you can also imperatively produce an XFail-outcome from 
-within test or setup code.  Example::
-
-    def test_function():
-        if not valid_config():
-            py.test.xfail("unsuppored configuration")
-
-
-skipping on a missing import dependency
---------------------------------------------------
-
-You can use the following import helper at module level 
-or within a test or test setup function::
-
-    docutils = py.test.importorskip("docutils")
-
-If ``docutils`` cannot be imported here, this will lead to a
-skip outcome of the test.  You can also skip dependeing if
-if a library does not come with a high enough version::
-
-    docutils = py.test.importorskip("docutils", minversion="0.3")
-
-The version will be read from the specified module's ``__version__`` attribute.
-
-imperative skip from within a test or setup function
-------------------------------------------------------
-
-If for some reason you cannot declare skip-conditions
-you can also imperatively produce a Skip-outcome from 
-within test or setup code.  Example::
-
-    def test_function():
-        if not valid_config():
-            py.test.skip("unsuppored configuration")
-
-"""
-
-import py
-
-def pytest_addoption(parser):
-    group = parser.getgroup("general")
-    group.addoption('--runxfail', 
-           action="store_true", dest="runxfail", default=False,
-           help="run tests even if they are marked xfail")
-
-class MarkEvaluator:
-    def __init__(self, item, name):
-        self.item = item
-        self.name = name
-        self.holder = getattr(item.obj, name, None)
-
-    def __bool__(self):
-        return bool(self.holder)
-    __nonzero__ = __bool__
-
-    def istrue(self):
-        if self.holder:
-            d = {'os': py.std.os, 'sys': py.std.sys, 'config': self.item.config}
-            if self.holder.args:
-                self.result = False
-                for expr in self.holder.args:
-                    self.expr = expr
-                    if isinstance(expr, str):
-                        result = cached_eval(self.item.config, expr, d)
-                    else:
-                        result = expr
-                    if result:
-                        self.result = True
-                        self.expr = expr
-                        break
-            else:
-                self.result = True
-        return getattr(self, 'result', False)
-
-    def get(self, attr, default=None):
-        return self.holder.kwargs.get(attr, default)
-
-    def getexplanation(self):
-        expl = self.get('reason', None)
-        if not expl:
-            if not hasattr(self, 'expr'):
-                return ""
-            else:
-                return "condition: " + self.expr
-        return expl
-        
-
-def pytest_runtest_setup(item):
-    if not isinstance(item, py.test.collect.Function):
-        return
-    evalskip = MarkEvaluator(item, 'skipif')
-    if evalskip.istrue():
-        py.test.skip(evalskip.getexplanation())
-    item._evalxfail = MarkEvaluator(item, 'xfail')
-    if not item.config.getvalue("runxfail"):
-        if item._evalxfail.istrue():
-            if not item._evalxfail.get('run', True):
-                py.test.skip("xfail")
-
-def pytest_runtest_makereport(__multicall__, item, call):
-    if not isinstance(item, py.test.collect.Function):
-        return
-    if not (call.excinfo and 
-        call.excinfo.errisinstance(py.test.xfail.Exception)):
-        evalxfail = getattr(item, '_evalxfail', None)
-        if not evalxfail:
-            return
-    if call.excinfo and call.excinfo.errisinstance(py.test.xfail.Exception):
-        if not item.config.getvalue("runxfail"):
-            rep = __multicall__.execute()
-            rep.keywords['xfail'] = "reason: " + call.excinfo.value.msg
-            rep.skipped = True
-            rep.failed = False
-            return rep
-    if call.when == "setup":
-        rep = __multicall__.execute()
-        if rep.skipped and evalxfail.istrue():
-            expl = evalxfail.getexplanation()
-            if not evalxfail.get("run", True):
-                expl = "[NOTRUN] " + expl
-            rep.keywords['xfail'] = expl
-        return rep
-    elif call.when == "call":
-        rep = __multicall__.execute()
-        if not item.config.getvalue("runxfail") and evalxfail.istrue():
-            if call.excinfo:
-                rep.skipped = True
-                rep.failed = rep.passed = False
-            else:
-                rep.skipped = rep.passed = False
-                rep.failed = True
-            rep.keywords['xfail'] = evalxfail.getexplanation()
-        else:
-            if 'xfail' in rep.keywords:
-                del rep.keywords['xfail']
-        return rep
-
-# called by terminalreporter progress reporting
-def pytest_report_teststatus(report):
-    if 'xfail' in report.keywords:
-        if report.skipped:
-            return "xfailed", "x", "xfail"
-        elif report.failed:
-            return "xpassed", "X", "XPASS"
-
-# called by the terminalreporter instance/plugin
-def pytest_terminal_summary(terminalreporter):
-    tr = terminalreporter
-    if not tr.reportchars:
-        #for name in "xfailed skipped failed xpassed":
-        #    if not tr.stats.get(name, 0):
-        #        tr.write_line("HINT: use '-r' option to see extra "
-        #              "summary info about tests")
-        #        break
-        return
-
-    lines = []
-    for char in tr.reportchars:
-        if char == "x":
-            show_xfailed(terminalreporter, lines)
-        elif char == "X":
-            show_xpassed(terminalreporter, lines)
-        elif char == "f":
-            show_failed(terminalreporter, lines)
-        elif char == "s":
-            show_skipped(terminalreporter, lines)
-    if lines:
-        tr._tw.sep("=", "short test summary info")
-        for line in lines:
-            tr._tw.line(line)
-
-def show_failed(terminalreporter, lines):
-    tw = terminalreporter._tw
-    failed = terminalreporter.stats.get("failed")
-    if failed:
-        for rep in failed:
-            pos = terminalreporter.gettestid(rep.item)
-            lines.append("FAIL %s" %(pos, ))
-
-def show_xfailed(terminalreporter, lines):
-    xfailed = terminalreporter.stats.get("xfailed")
-    if xfailed:
-        for rep in xfailed:
-            pos = terminalreporter.gettestid(rep.item)
-            reason = rep.keywords['xfail']
-            lines.append("XFAIL %s %s" %(pos, reason))
-
-def show_xpassed(terminalreporter, lines):
-    xpassed = terminalreporter.stats.get("xpassed")
-    if xpassed:
-        for rep in xpassed:
-            pos = terminalreporter.gettestid(rep.item)
-            reason = rep.keywords['xfail']
-            lines.append("XPASS %s %s" %(pos, reason))
-
-def cached_eval(config, expr, d):
-    if not hasattr(config, '_evalcache'):
-        config._evalcache = {}
-    try:
-        return config._evalcache[expr]
-    except KeyError:
-        #import sys
-        #print >>sys.stderr, ("cache-miss: %r" % expr)
-        config._evalcache[expr] = x = eval(expr, d)
-        return x
-
-
-def folded_skips(skipped):
-    d = {}
-    for event in skipped:
-        entry = event.longrepr.reprcrash 
-        key = entry.path, entry.lineno, entry.message
-        d.setdefault(key, []).append(event)
-    l = []
-    for key, events in d.items(): 
-        l.append((len(events),) + key)
-    return l 
-
-def show_skipped(terminalreporter, lines):
-    tr = terminalreporter
-    skipped = tr.stats.get('skipped', [])
-    if skipped:
-        #if not tr.hasopt('skipped'):
-        #    tr.write_line(
-        #        "%d skipped tests, specify -rs for more info" %
-        #        len(skipped))
-        #    return
-        fskips = folded_skips(skipped)
-        if fskips:
-            #tr.write_sep("_", "skipped test summary")
-            for num, fspath, lineno, reason in fskips:
-                if reason.startswith("Skipped: "):
-                    reason = reason[9:]
-                lines.append("SKIP [%d] %s:%d: %s" %
-                    (num, fspath, lineno, reason))

diff --git a/pypy/doc/config/objspace.usemodules.__pypy__.txt b/pypy/doc/config/objspace.usemodules.__pypy__.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.__pypy__.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-Use the '__pypy__' module. 
-This module is expected to be working and is included by default.
-It contains special PyPy-specific functionality.
-For example most of the special functions described in the `object space proxies`
-document are in the module.
-See the `__pypy__ module documentation`_ for more details.
-
-.. _`object space proxy`: ../objspace-proxies.html
-.. _`__pypy__ module documentation`: ../__pypy__-module.html

diff --git a/py/_plugin/pytest_nose.py b/py/_plugin/pytest_nose.py
deleted file mode 100644
--- a/py/_plugin/pytest_nose.py
+++ /dev/null
@@ -1,98 +0,0 @@
-"""nose-compatibility plugin: allow to run nose test suites natively. 
-
-This is an experimental plugin for allowing to run tests written 
-in 'nosetests style with py.test.   
-
-Usage
--------------
-
-type::
-
-    py.test  # instead of 'nosetests'
-
-and you should be able to run nose style tests and at the same
-time can make full use of py.test's capabilities.  
-
-Supported nose Idioms
-----------------------
-
-* setup and teardown at module/class/method level
-* SkipTest exceptions and markers 
-* setup/teardown decorators
-* yield-based tests and their setup 
-* general usage of nose utilities 
-
-Unsupported idioms / issues
-----------------------------------
-
-- nose-style doctests are not collected and executed correctly,
-  also fixtures don't work. 
-
-- no nose-configuration is recognized 
-
-If you find other issues or have suggestions please run:: 
-
-    py.test --pastebin=all 
-
-and send the resulting URL to a py.test contact channel,
-at best to the mailing list. 
-"""
-import py
-import inspect
-import sys
-
-def pytest_runtest_makereport(__multicall__, item, call):
-    SkipTest = getattr(sys.modules.get('nose', None), 'SkipTest', None)
-    if SkipTest:
-        if call.excinfo and call.excinfo.errisinstance(SkipTest):
-            # let's substitute the excinfo with a py.test.skip one 
-            call2 = call.__class__(lambda: py.test.skip(str(call.excinfo.value)), call.when)
-            call.excinfo = call2.excinfo 
-
-def pytest_report_iteminfo(item):
-    # nose 0.11.1 uses decorators for "raises" and other helpers. 
-    # for reporting progress by filename we fish for the filename 
-    if isinstance(item, py.test.collect.Function):
-        obj = item.obj
-        if hasattr(obj, 'compat_co_firstlineno'):
-            fn = sys.modules[obj.__module__].__file__ 
-            if fn.endswith(".pyc"):
-                fn = fn[:-1]
-            #assert 0
-            #fn = inspect.getsourcefile(obj) or inspect.getfile(obj)
-            lineno = obj.compat_co_firstlineno    
-            return py.path.local(fn), lineno, obj.__module__
-    
-def pytest_runtest_setup(item):
-    if isinstance(item, (py.test.collect.Function)):
-        if isinstance(item.parent, py.test.collect.Generator):
-            gen = item.parent 
-            if not hasattr(gen, '_nosegensetup'):
-                call_optional(gen.obj, 'setup')
-                if isinstance(gen.parent, py.test.collect.Instance):
-                    call_optional(gen.parent.obj, 'setup')
-                gen._nosegensetup = True
-        if not call_optional(item.obj, 'setup'):
-            # call module level setup if there is no object level one
-            call_optional(item.parent.obj, 'setup')
-
-def pytest_runtest_teardown(item):
-    if isinstance(item, py.test.collect.Function):
-        if not call_optional(item.obj, 'teardown'):
-            call_optional(item.parent.obj, 'teardown')
-        #if hasattr(item.parent, '_nosegensetup'):
-        #    #call_optional(item._nosegensetup, 'teardown')
-        #    del item.parent._nosegensetup
-
-def pytest_make_collect_report(collector):
-    if isinstance(collector, py.test.collect.Generator):
-        call_optional(collector.obj, 'setup')
-
-def call_optional(obj, name):
-    method = getattr(obj, name, None)
-    if method:
-        ismethod = inspect.ismethod(method)
-        rawcode = py.code.getrawcode(method)
-        if not rawcode.co_varnames[ismethod:]:
-            method()
-            return True

diff --git a/pypy/doc/config/objspace.usemodules.micronumpy.txt b/pypy/doc/config/objspace.usemodules.micronumpy.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.micronumpy.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Use the micronumpy module.
-This module provides a very basic numpy-like interface. Major use-case
-is to show how jit scales for other code.

diff --git a/pypy/doc/config/objspace.usemodules._ast.txt b/pypy/doc/config/objspace.usemodules._ast.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._ast.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the '_ast' module. 
-This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt b/pypy/doc/config/translation.backendopt.merge_if_blocks.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-This optimization converts parts of flow graphs that result from
-chains of ifs and elifs like this into merged blocks.
-
-By default flow graphing this kind of code::
-
-    if x == 0:
-        f()
-    elif x == 1:
-        g()
-    elif x == 4:
-        h()
-    else:
-        j()
-
-will result in a chain of blocks with two exits, somewhat like this:
-
-.. image:: unmergedblocks.png
-
-(reflecting how Python would interpret this code).  Running this
-optimization will transform the block structure to contain a single
-"choice block" with four exits:
-
-.. image:: mergedblocks.png
-
-This can then be turned into a switch by the C backend, allowing the C
-compiler to produce more efficient code.

diff --git a/pypy/doc/config/translation.fork_before.txt b/pypy/doc/config/translation.fork_before.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.fork_before.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-This is an option mostly useful when working on the PyPy toolchain. If you use
-it, translate.py will fork before the specified phase. If the translation
-crashes after that fork, you can fix the bug in the toolchain, and continue
-translation at the fork-point.

diff --git a/py/_plugin/pytest_mark.py b/py/_plugin/pytest_mark.py
deleted file mode 100644
--- a/py/_plugin/pytest_mark.py
+++ /dev/null
@@ -1,174 +0,0 @@
-"""
-generic mechanism for marking python functions. 
-
-By using the ``py.test.mark`` helper you can instantiate
-decorators that will set named meta data on test functions. 
-
-Marking a single function 
-----------------------------------------------------
-
-You can "mark" a test function with meta data like this::
-
-    @py.test.mark.webtest
-    def test_send_http():
-        ... 
-
-This will set a "Marker" instance as a function attribute named "webtest". 
-You can also specify parametrized meta data like this::
-
-    @py.test.mark.webtest(firefox=30)
-    def test_receive():
-        ...
-
-The named marker can be accessed like this later::
-
-    test_receive.webtest.kwargs['firefox'] == 30
-
-In addition to set key-value pairs you can also use positional arguments::
-
-    @py.test.mark.webtest("triangular")
-    def test_receive():
-        ...
-
-and later access it with ``test_receive.webtest.args[0] == 'triangular``.
-
-.. _`scoped-marking`:
-
-Marking whole classes or modules 
-----------------------------------------------------
-
-If you are programming with Python2.6 you may use ``py.test.mark`` decorators
-with classes to apply markers to all its test methods::
-
-    @py.test.mark.webtest
-    class TestClass:
-        def test_startup(self):
-            ...
-        def test_startup_and_more(self):
-            ...
-
-This is equivalent to directly applying the decorator to the
-two test functions. 
-
-To remain compatible with Python2.5 you can also set a 
-``pytestmark`` attribute on a TestClass like this::
-
-    import py
-
-    class TestClass:
-        pytestmark = py.test.mark.webtest
-
-or if you need to use multiple markers you can use a list::
-
-    import py
-
-    class TestClass:
-        pytestmark = [py.test.mark.webtest, pytest.mark.slowtest]
-
-You can also set a module level marker::
-
-    import py
-    pytestmark = py.test.mark.webtest
-
-in which case it will be applied to all functions and 
-methods defined in the module.  
-
-Using "-k MARKNAME" to select tests
-----------------------------------------------------
-
-You can use the ``-k`` command line option to select
-tests::
-
-    py.test -k webtest  # will only run tests marked as webtest
-
-"""
-import py
-
-def pytest_namespace():
-    return {'mark': MarkGenerator()}
-
-class MarkGenerator:
-    """ non-underscore attributes of this object can be used as decorators for 
-    marking test functions. Example: @py.test.mark.slowtest in front of a 
-    function will set the 'slowtest' marker object on it. """
-    def __getattr__(self, name):
-        if name[0] == "_":
-            raise AttributeError(name)
-        return MarkDecorator(name)
-
-class MarkDecorator:
-    """ decorator for setting function attributes. """
-    def __init__(self, name):
-        self.markname = name
-        self.kwargs = {}
-        self.args = []
-
-    def __repr__(self):
-        d = self.__dict__.copy()
-        name = d.pop('markname')
-        return "<MarkDecorator %r %r>" %(name, d)
-
-    def __call__(self, *args, **kwargs):
-        """ if passed a single callable argument: decorate it with mark info. 
-            otherwise add *args/**kwargs in-place to mark information. """
-        if args:
-            func = args[0]
-            if len(args) == 1 and hasattr(func, '__call__') or \
-               hasattr(func, '__bases__'):
-                if hasattr(func, '__bases__'):
-                    if hasattr(func, 'pytestmark'):
-                        l = func.pytestmark
-                        if not isinstance(l, list):
-                           func.pytestmark = [l, self]
-                        else: 
-                           l.append(self)
-                    else:
-                       func.pytestmark = [self]
-                else:
-                    holder = getattr(func, self.markname, None)
-                    if holder is None:
-                        holder = MarkInfo(self.markname, self.args, self.kwargs)
-                        setattr(func, self.markname, holder)
-                    else:
-                        holder.kwargs.update(self.kwargs)
-                        holder.args.extend(self.args)
-                return func
-            else:
-                self.args.extend(args)
-        self.kwargs.update(kwargs)
-        return self
-        
-class MarkInfo:
-    def __init__(self, name, args, kwargs):
-        self._name = name
-        self.args = args
-        self.kwargs = kwargs
-
-    def __getattr__(self, name):
-        if name[0] != '_' and name in self.kwargs:
-            py.log._apiwarn("1.1", "use .kwargs attribute to access key-values")
-            return self.kwargs[name]
-        raise AttributeError(name)
-
-    def __repr__(self):
-        return "<MarkInfo %r args=%r kwargs=%r>" % (
-                self._name, self.args, self.kwargs)
-            
-
-def pytest_pycollect_makeitem(__multicall__, collector, name, obj):
-    item = __multicall__.execute()
-    if isinstance(item, py.test.collect.Function):
-        cls = collector.getparent(py.test.collect.Class)
-        mod = collector.getparent(py.test.collect.Module)
-        func = item.obj
-        func = getattr(func, '__func__', func) # py3
-        func = getattr(func, 'im_func', func)  # py2
-        for parent in [x for x in (mod, cls) if x]:
-            marker = getattr(parent.obj, 'pytestmark', None)
-            if marker is not None:
-                if not isinstance(marker, list):
-                    marker = [marker]
-                for mark in marker:
-                    if isinstance(mark, MarkDecorator):
-                        mark(func)
-    return item

diff --git a/pypy/doc/config/objspace.std.withstrbuf.txt b/pypy/doc/config/objspace.std.withstrbuf.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withstrbuf.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Enable "string buffer" objects.
-
-Similar to "string join" objects, but using a StringBuilder to represent
-a string built by repeated application of ``+=``.

diff --git a/pypy/doc/config/objspace.usemodules._rawffi.txt b/pypy/doc/config/objspace.usemodules._rawffi.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._rawffi.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-An experimental module providing very low-level interface to
-C-level libraries, for use when implementing ctypes, not
-intended for a direct use at all.
\ No newline at end of file

diff --git a/pypy/translator/c/node.py b/pypy/translator/c/node.py
--- a/pypy/translator/c/node.py
+++ b/pypy/translator/c/node.py
@@ -12,7 +12,7 @@
 from pypy.translator.c.support import c_char_array_constant, barebonearray
 from pypy.translator.c.primitive import PrimitiveType, name_signed
 from pypy.rlib import exports
-from pypy.rlib.rarithmetic import isinf, isnan
+from pypy.rlib.rfloat import isinf, isnan
 from pypy.rlib.rstackovf import _StackOverflow
 from pypy.translator.c import extfunc
 from pypy.translator.tool.cbuild import ExternalCompilationInfo
@@ -338,12 +338,15 @@
         self.varlength = varlength
         self.dependencies = {}
         contained_type = ARRAY.OF
-        if ARRAY._hints.get("render_as_void"):
-            contained_type = Void
+        # There is no such thing as an array of voids:
+        # we use a an array of chars instead; only the pointer can be void*.
         self.itemtypename = db.gettype(contained_type, who_asks=self)
         self.fulltypename = self.itemtypename.replace('@', '(@)[%d]' %
                                                       (self.varlength,))
-        self.fullptrtypename = self.itemtypename.replace('@', '*@')
+        if ARRAY._hints.get("render_as_void"):
+            self.fullptrtypename = 'void *@'
+        else:
+            self.fullptrtypename = self.itemtypename.replace('@', '*@')
 
     def setup(self):
         """Array loops are forbidden by ForwardReference.become() because
@@ -364,7 +367,10 @@
         return self.itemindex_access_expr(baseexpr, index)
 
     def itemindex_access_expr(self, baseexpr, indexexpr):
-        return 'RPyBareItem(%s, %s)' % (baseexpr, indexexpr)
+        if self.ARRAY._hints.get("render_as_void"):
+            return 'RPyBareItem((char*)%s, %s)' % (baseexpr, indexexpr)
+        else:
+            return 'RPyBareItem(%s, %s)' % (baseexpr, indexexpr)
 
     def definition(self):
         return []    # no declaration is needed

diff --git a/pypy/doc/config/objspace.std.withmethodcache.txt b/pypy/doc/config/objspace.std.withmethodcache.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withmethodcache.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Enable method caching. See the section "Method Caching" in `Standard
-Interpreter Optimizations <../interpreter-optimizations.html#method-caching>`__.

diff --git a/pypy/doc/config/objspace.usemodules._random.txt b/pypy/doc/config/objspace.usemodules._random.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._random.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the '_random' module. It is necessary to use the module "random" from the standard library.
-This module is expected to be working and is included by default.

diff --git a/py/_plugin/pytest_restdoc.py b/py/_plugin/pytest_restdoc.py
deleted file mode 100644
--- a/py/_plugin/pytest_restdoc.py
+++ /dev/null
@@ -1,429 +0,0 @@
-"""
-perform ReST syntax, local and remote reference tests on .rst/.txt files. 
-"""
-import py
-import sys, os, re
-
-def pytest_addoption(parser):
-    group = parser.getgroup("ReST", "ReST documentation check options")
-    group.addoption('-R', '--urlcheck',
-           action="store_true", dest="urlcheck", default=False, 
-           help="urlopen() remote links found in ReST text files.") 
-    group.addoption('--urltimeout', action="store", metavar="secs",
-        type="int", dest="urlcheck_timeout", default=5,
-        help="timeout in seconds for remote urlchecks")
-    group.addoption('--forcegen',
-           action="store_true", dest="forcegen", default=False,
-           help="force generation of html files.")
-
-def pytest_collect_file(path, parent):
-    if path.ext in (".txt", ".rst"):
-        project = getproject(path)
-        if project is not None:
-            return ReSTFile(path, parent=parent, project=project)
-
-def getproject(path):
-    for parent in path.parts(reverse=True):
-        confrest = parent.join("confrest.py")
-        if confrest.check():
-            Project = confrest.pyimport().Project
-            return Project(parent)
-
-class ReSTFile(py.test.collect.File):
-    def __init__(self, fspath, parent, project):
-        super(ReSTFile, self).__init__(fspath=fspath, parent=parent)
-        self.project = project
-
-    def collect(self):
-        return [
-            ReSTSyntaxTest("ReSTSyntax", parent=self, project=self.project),
-            LinkCheckerMaker("checklinks", parent=self),
-            DoctestText("doctest", parent=self),
-        ]
-
-def deindent(s, sep='\n'):
-    leastspaces = -1
-    lines = s.split(sep)
-    for line in lines:
-        if not line.strip():
-            continue
-        spaces = len(line) - len(line.lstrip())
-        if leastspaces == -1 or spaces < leastspaces:
-            leastspaces = spaces
-    if leastspaces == -1:
-        return s
-    for i, line in enumerate(lines):
-        if not line.strip():
-            lines[i] = ''
-        else:
-            lines[i] = line[leastspaces:]
-    return sep.join(lines)
-
-class ReSTSyntaxTest(py.test.collect.Item): 
-    def __init__(self, name, parent, project):
-        super(ReSTSyntaxTest, self).__init__(name=name, parent=parent)
-        self.project = project
-
-    def reportinfo(self):
-        return self.fspath, None, "syntax check"
-
-    def runtest(self):
-        self.restcheck(py.path.svnwc(self.fspath))
-
-    def restcheck(self, path):
-        py.test.importorskip("docutils")
-        self.register_linkrole()
-        from docutils.utils import SystemMessage
-        try: 
-            self._checkskip(path, self.project.get_htmloutputpath(path))
-            self.project.process(path)
-        except KeyboardInterrupt: 
-            raise 
-        except SystemMessage: 
-            # we assume docutils printed info on stdout 
-            py.test.fail("docutils processing failed, see captured stderr") 
-
-    def register_linkrole(self):
-        #directive.register_linkrole('api', self.resolve_linkrole)
-        #directive.register_linkrole('source', self.resolve_linkrole)
-#
-#        # XXX fake sphinx' "toctree" and refs
-#        directive.register_linkrole('ref', self.resolve_linkrole)
-        
-        from docutils.parsers.rst import directives
-        def toctree_directive(name, arguments, options, content, lineno,
-                      content_offset, block_text, state, state_machine):
-            return []
-        toctree_directive.content = 1
-        toctree_directive.options = {'maxdepth': int, 'glob': directives.flag,
-                             'hidden': directives.flag}
-        directives.register_directive('toctree', toctree_directive)
-        self.register_pygments()
-
-    def register_pygments(self):
-        # taken from pygments-main/external/rst-directive.py 
-        from docutils.parsers.rst import directives
-        try:
-            from pygments.formatters import HtmlFormatter
-        except ImportError:
-            def pygments_directive(name, arguments, options, content, lineno,
-                                   content_offset, block_text, state, state_machine):
-                return []
-            pygments_directive.options = {}
-        else:
-            # The default formatter
-            DEFAULT = HtmlFormatter(noclasses=True)
-            # Add name -> formatter pairs for every variant you want to use
-            VARIANTS = {
-                # 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True),
-            }
-
-            from docutils import nodes
-
-            from pygments import highlight
-            from pygments.lexers import get_lexer_by_name, TextLexer
-
-            def pygments_directive(name, arguments, options, content, lineno,
-                                   content_offset, block_text, state, state_machine):
-                try:
-                    lexer = get_lexer_by_name(arguments[0])
-                except ValueError:
-                    # no lexer found - use the text one instead of an exception
-                    lexer = TextLexer()
-                # take an arbitrary option if more than one is given
-                formatter = options and VARIANTS[options.keys()[0]] or DEFAULT
-                parsed = highlight('\n'.join(content), lexer, formatter)
-                return [nodes.raw('', parsed, format='html')]
-
-            pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS])
-
-        pygments_directive.arguments = (1, 0, 1)
-        pygments_directive.content = 1
-        directives.register_directive('sourcecode', pygments_directive)
-
-    def resolve_linkrole(self, name, text, check=True):
-        apigen_relpath = self.project.apigen_relpath
-    
-        if name == 'api':
-            if text == 'py':
-                return ('py', apigen_relpath + 'api/index.html')
-            else:
-                assert text.startswith('py.'), (
-                    'api link "%s" does not point to the py package') % (text,)
-                dotted_name = text
-                if dotted_name.find('(') > -1:
-                    dotted_name = dotted_name[:text.find('(')]
-                # remove pkg root
-                path = dotted_name.split('.')[1:]
-                dotted_name = '.'.join(path)
-                obj = py
-                if check:
-                    for chunk in path:
-                        try:
-                            obj = getattr(obj, chunk)
-                        except AttributeError:
-                            raise AssertionError(
-                                'problem with linkrole :api:`%s`: can not resolve '
-                                'dotted name %s' % (text, dotted_name,))
-                return (text, apigen_relpath + 'api/%s.html' % (dotted_name,))
-        elif name == 'source':
-            assert text.startswith('py/'), ('source link "%s" does not point '
-                                            'to the py package') % (text,)
-            relpath = '/'.join(text.split('/')[1:])
-            if check:
-                pkgroot = py._pydir
-                abspath = pkgroot.join(relpath)
-                assert pkgroot.join(relpath).check(), (
-                        'problem with linkrole :source:`%s`: '
-                        'path %s does not exist' % (text, relpath))
-            if relpath.endswith('/') or not relpath:
-                relpath += 'index.html'
-            else:
-                relpath += '.html'
-            return (text, apigen_relpath + 'source/%s' % (relpath,))
-        elif name == 'ref':
-            return ("", "") 
-
-    def _checkskip(self, lpath, htmlpath=None):
-        if not self.config.getvalue("forcegen"):
-            lpath = py.path.local(lpath)
-            if htmlpath is not None:
-                htmlpath = py.path.local(htmlpath)
-            if lpath.ext == '.txt': 
-                htmlpath = htmlpath or lpath.new(ext='.html')
-                if htmlpath.check(file=1) and htmlpath.mtime() >= lpath.mtime(): 
-                    py.test.skip("html file is up to date, use --forcegen to regenerate")
-                    #return [] # no need to rebuild 
-
-class DoctestText(py.test.collect.Item): 
-    def reportinfo(self):
-        return self.fspath, None, "doctest"
-
-    def runtest(self): 
-        content = self._normalize_linesep()
-        newcontent = self.config.hook.pytest_doctest_prepare_content(content=content)
-        if newcontent is not None:
-            content = newcontent 
-        s = content 
-        l = []
-        prefix = '.. >>> '
-        mod = py.std.types.ModuleType(self.fspath.purebasename) 
-        skipchunk = False
-        for line in deindent(s).split('\n'):
-            stripped = line.strip()
-            if skipchunk and line.startswith(skipchunk):
-                py.builtin.print_("skipping", line)
-                continue
-            skipchunk = False 
-            if stripped.startswith(prefix):
-                try:
-                    py.builtin.exec_(py.code.Source(
-                            stripped[len(prefix):]).compile(),  mod.__dict__)
-                except ValueError:
-                    e = sys.exc_info()[1]
-                    if e.args and e.args[0] == "skipchunk":
-                        skipchunk = " " * (len(line) - len(line.lstrip()))
-                    else:
-                        raise
-            else:
-                l.append(line)
-        docstring = "\n".join(l)
-        mod.__doc__ = docstring 
-        failed, tot = py.std.doctest.testmod(mod, verbose=1)
-        if failed: 
-            py.test.fail("doctest %s: %s failed out of %s" %(
-                         self.fspath, failed, tot))
-
-    def _normalize_linesep(self):
-        # XXX quite nasty... but it works (fixes win32 issues)
-        s = self.fspath.read()
-        linesep = '\n'
-        if '\r' in s:
-            if '\n' not in s:
-                linesep = '\r'
-            else:
-                linesep = '\r\n'
-        s = s.replace(linesep, '\n')
-        return s
-        
-class LinkCheckerMaker(py.test.collect.Collector): 
-    def collect(self):
-        return list(self.genlinkchecks())
-
-    def genlinkchecks(self):
-        path = self.fspath
-        # generating functions + args as single tests 
-        timeout = self.config.getvalue("urlcheck_timeout")
-        for lineno, line in enumerate(path.readlines()): 
-            line = line.strip()
-            if line.startswith('.. _'): 
-                if line.startswith('.. _`'):
-                    delim = '`:'
-                else:
-                    delim = ':'
-                l = line.split(delim, 1)
-                if len(l) != 2: 
-                    continue
-                tryfn = l[1].strip() 
-                name = "%s:%d" %(tryfn, lineno)
-                if tryfn.startswith('http:') or tryfn.startswith('https'): 
-                    if self.config.getvalue("urlcheck"):
-                        yield CheckLink(name, parent=self, 
-                            args=(tryfn, path, lineno, timeout), checkfunc=urlcheck)
-                elif tryfn.startswith('webcal:'):
-                    continue
-                else: 
-                    i = tryfn.find('#') 
-                    if i != -1: 
-                        checkfn = tryfn[:i]
-                    else: 
-                        checkfn = tryfn 
-                    if checkfn.strip() and (1 or checkfn.endswith('.html')): 
-                        yield CheckLink(name, parent=self, 
-                            args=(tryfn, path, lineno), checkfunc=localrefcheck)
-        
-class CheckLink(py.test.collect.Item):
-    def __init__(self, name, parent, args, checkfunc):
-        super(CheckLink, self).__init__(name, parent)
-        self.args = args
-        self.checkfunc = checkfunc
-
-    def runtest(self):
-        return self.checkfunc(*self.args)
-
-    def reportinfo(self, basedir=None):
-        return (self.fspath, self.args[2], "checklink: %s" % self.args[0])
-
-def urlcheck(tryfn, path, lineno, TIMEOUT_URLOPEN): 
-    old = py.std.socket.getdefaulttimeout()
-    py.std.socket.setdefaulttimeout(TIMEOUT_URLOPEN)
-    try:
-        try: 
-            py.builtin.print_("trying remote", tryfn)
-            py.std.urllib2.urlopen(tryfn)
-        finally:
-            py.std.socket.setdefaulttimeout(old)
-    except (py.std.urllib2.URLError, py.std.urllib2.HTTPError): 
-        e = sys.exc_info()[1]
-        if getattr(e, 'code', None) in (401, 403): # authorization required, forbidden
-            py.test.skip("%s: %s" %(tryfn, str(e)))
-        else:
-            py.test.fail("remote reference error %r in %s:%d\n%s" %(
-                         tryfn, path.basename, lineno+1, e))
-
-def localrefcheck(tryfn, path, lineno): 
-    # assume it should be a file 
-    i = tryfn.find('#')
-    if tryfn.startswith('javascript:'):
-        return # don't check JS refs
-    if i != -1: 
-        anchor = tryfn[i+1:]
-        tryfn = tryfn[:i]
-    else: 
-        anchor = ''
-    fn = path.dirpath(tryfn) 
-    ishtml = fn.ext == '.html' 
-    fn = ishtml and fn.new(ext='.txt') or fn
-    py.builtin.print_("filename is", fn)
-    if not fn.check(): # not ishtml or not fn.check(): 
-        if not py.path.local(tryfn).check(): # the html could be there 
-            py.test.fail("reference error %r in %s:%d" %(
-                          tryfn, path.basename, lineno+1))
-    if anchor: 
-        source = unicode(fn.read(), 'latin1')
-        source = source.lower().replace('-', ' ') # aehem
-
-        anchor = anchor.replace('-', ' ') 
-        match2 = ".. _`%s`:" % anchor 
-        match3 = ".. _%s:" % anchor 
-        candidates = (anchor, match2, match3)
-        py.builtin.print_("candidates", repr(candidates))
-        for line in source.split('\n'): 
-            line = line.strip()
-            if line in candidates: 
-                break 
-        else: 
-            py.test.fail("anchor reference error %s#%s in %s:%d" %(
-                tryfn, anchor, path.basename, lineno+1))
-
-if hasattr(sys.stdout, 'fileno') and os.isatty(sys.stdout.fileno()):
-    def log(msg):
-        print(msg)
-else:
-    def log(msg):
-        pass
-
-def convert_rest_html(source, source_path, stylesheet=None, encoding='latin1'):
-    """ return html latin1-encoded document for the given input. 
-        source  a ReST-string
-        sourcepath where to look for includes (basically)
-        stylesheet path (to be used if any)
-    """
-    from docutils.core import publish_string
-    kwargs = {
-        'stylesheet' : stylesheet, 
-        'stylesheet_path': None,
-        'traceback' : 1, 
-        'embed_stylesheet': 0,
-        'output_encoding' : encoding, 
-        #'halt' : 0, # 'info',
-        'halt_level' : 2, 
-    }
-    # docutils uses os.getcwd() :-(
-    source_path = os.path.abspath(str(source_path))
-    prevdir = os.getcwd()
-    try:
-        #os.chdir(os.path.dirname(source_path))
-        return publish_string(source, source_path, writer_name='html',
-                              settings_overrides=kwargs)
-    finally:
-        os.chdir(prevdir)
-
-def process(txtpath, encoding='latin1'):
-    """ process a textfile """
-    log("processing %s" % txtpath)
-    assert txtpath.check(ext='.txt')
-    if isinstance(txtpath, py.path.svnwc):
-        txtpath = txtpath.localpath
-    htmlpath = txtpath.new(ext='.html')
-    #svninfopath = txtpath.localpath.new(ext='.svninfo')
-
-    style = txtpath.dirpath('style.css')
-    if style.check():
-        stylesheet = style.basename
-    else:
-        stylesheet = None
-    content = unicode(txtpath.read(), encoding)
-    doc = convert_rest_html(content, txtpath, stylesheet=stylesheet, encoding=encoding)
-    htmlpath.open('wb').write(doc)
-    #log("wrote %r" % htmlpath)
-    #if txtpath.check(svnwc=1, versioned=1): 
-    #    info = txtpath.info()
-    #    svninfopath.dump(info) 
-
-if sys.version_info > (3, 0):
-    def _uni(s): return s
-else:
-    def _uni(s):
-        return unicode(s)
-
-rex1 = re.compile(r'.*<body>(.*)</body>.*', re.MULTILINE | re.DOTALL)
-rex2 = re.compile(r'.*<div class="document">(.*)</div>.*', re.MULTILINE | re.DOTALL)
-
-def strip_html_header(string, encoding='utf8'):
-    """ return the content of the body-tag """ 
-    uni = unicode(string, encoding)
-    for rex in rex1,rex2: 
-        match = rex.search(uni) 
-        if not match: 
-            break 
-        uni = match.group(1) 
-    return uni 
-
-class Project: # used for confrest.py files 
-    def __init__(self, sourcepath):
-        self.sourcepath = sourcepath
-    def process(self, path):
-        return process(path)
-    def get_htmloutputpath(self, path):
-        return path.new(ext='html')

diff --git a/pypy/doc/config/translation.backendopt.txt b/pypy/doc/config/translation.backendopt.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-This group contains options about various backend optimization passes. Most of
-them are described in the `EU report about optimization`_
-
-.. _`EU report about optimization`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf
-

diff --git a/pypy/doc/config/index.txt b/pypy/doc/config/index.txt
deleted file mode 100644
--- a/pypy/doc/config/index.txt
+++ /dev/null
@@ -1,52 +0,0 @@
-==============================
-Configuration Options for PyPy
-==============================
-
-This directory contains documentation for the many `configuration`_
-options that can be used to affect PyPy's behaviour.  There are two
-main classes of option, `object space options`_ and `translation
-options`_.
-
-There are two main entry points that accept options: ``py.py``, which
-implements Python on top of another Python interpreter and accepts all
-the `object space options`_:
-
-.. parsed-literal::
-
-    ./py.py <`objspace options`_>
-
-and the ``translate.py`` translation entry
-point which takes arguments of this form:
-
-.. parsed-literal::
-
-    ./translate.py <`translation options`_> <target>
-
-For the common case of ``<target>`` being ``targetpypystandalone.py``,
-you can then pass the `object space options`_ after
-``targetpypystandalone.py``, i.e. like this:
-
-.. parsed-literal::
-
-    ./translate.py <`translation options`_> targetpypystandalone.py <`objspace options`_>
-
-There is an `overview`_ of all command line arguments that can be
-passed in either position.
-
-Many of the more interesting object space options enable optimizations,
-which are described in `Standard Interpreter Optimizations`_, or allow
-the creation of objects that can barely be imagined in CPython, which
-are documented in `What PyPy can do for your objects`_.
-
-The following diagram gives some hints about which PyPy features work together
-with which other PyPy features:
-
-.. image:: ../image/compat-matrix.png
-
-.. _`configuration`: ../configuration.html
-.. _`objspace options`: commandline.html#objspace
-.. _`object space options`: commandline.html#objspace
-.. _`translation options`: commandline.html#translation
-.. _`overview`: commandline.html
-.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html
-.. _`What PyPy can do for your objects`: ../objspace-proxies.html

diff --git a/pypy/doc/config/translation.jit_profiler.txt b/pypy/doc/config/translation.jit_profiler.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.jit_profiler.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Integrate profiler support into the JIT

diff --git a/py/_plugin/pytest_helpconfig.py b/py/_plugin/pytest_helpconfig.py
deleted file mode 100644
--- a/py/_plugin/pytest_helpconfig.py
+++ /dev/null
@@ -1,164 +0,0 @@
-""" provide version info, conftest/environment config names. 
-"""
-import py
-import inspect, sys
-
-def pytest_addoption(parser):
-    group = parser.getgroup('debugconfig')
-    group.addoption('--version', action="store_true", 
-            help="display py lib version and import information.")
-    group._addoption('-p', action="append", dest="plugins", default = [],
-               metavar="name", 
-               help="early-load given plugin (multi-allowed).")
-    group.addoption('--traceconfig',
-               action="store_true", dest="traceconfig", default=False,
-               help="trace considerations of conftest.py files."),
-    group._addoption('--nomagic',
-               action="store_true", dest="nomagic", default=False,
-               help="don't reinterpret asserts, no traceback cutting. ")
-    group.addoption('--debug',
-               action="store_true", dest="debug", default=False,
-               help="generate and show internal debugging information.")
-    group.addoption("--help-config", action="store_true", dest="helpconfig", 
-            help="show available conftest.py and ENV-variable names.")
-
-
-def pytest_configure(__multicall__, config):
-    if config.option.version:
-        p = py.path.local(py.__file__).dirpath()
-        sys.stderr.write("This is py.test version %s, imported from %s\n" % 
-            (py.__version__, p))
-        sys.exit(0)
-    if not config.option.helpconfig:
-        return
-    __multicall__.execute()
-    options = []
-    for group in config._parser._groups:
-        options.extend(group.options)
-    widths = [0] * 10 
-    tw = py.io.TerminalWriter()
-    tw.sep("-")
-    tw.line("%-13s | %-18s | %-25s | %s" %(
-            "cmdline name", "conftest.py name", "ENV-variable name", "help"))
-    tw.sep("-")
-
-    options = [opt for opt in options if opt._long_opts]
-    options.sort(key=lambda x: x._long_opts)
-    for opt in options:
-        if not opt._long_opts or not opt.dest:
-            continue
-        optstrings = list(opt._long_opts) # + list(opt._short_opts)
-        optstrings = filter(None, optstrings)
-        optstring = "|".join(optstrings)
-        line = "%-13s | %-18s | %-25s | %s" %(
-            optstring, 
-            "option_%s" % opt.dest, 
-            "PYTEST_OPTION_%s" % opt.dest.upper(),
-            opt.help and opt.help or "", 
-            )
-        tw.line(line[:tw.fullwidth])
-    for name, help in conftest_options:
-        line = "%-13s | %-18s | %-25s | %s" %(
-            "", 
-            name, 
-            "",
-            help, 
-            )
-        tw.line(line[:tw.fullwidth])
-        
-    tw.sep("-")
-    sys.exit(0)
-
-conftest_options = (
-    ('pytest_plugins', 'list of plugin names to load'),
-    ('collect_ignore', '(relative) paths ignored during collection'), 
-    ('rsyncdirs', 'to-be-rsynced directories for dist-testing'), 
-)
-
-def pytest_report_header(config):
-    lines = []
-    if config.option.debug or config.option.traceconfig:
-        lines.append("using py lib: %s" % (py.path.local(py.__file__).dirpath()))
-    if config.option.traceconfig:
-        lines.append("active plugins:")
-        plugins = []
-        items = config.pluginmanager._name2plugin.items()
-        for name, plugin in items:
-            lines.append("    %-20s: %s" %(name, repr(plugin)))
-    return lines
-
-
-# =====================================================
-# validate plugin syntax and hooks 
-# =====================================================
-
-def pytest_plugin_registered(manager, plugin):
-    methods = collectattr(plugin)
-    hooks = {}
-    for hookspec in manager.hook._hookspecs:
-        hooks.update(collectattr(hookspec))
-    
-    stringio = py.io.TextIO()
-    def Print(*args):
-        if args:
-            stringio.write(" ".join(map(str, args)))
-        stringio.write("\n")
-
-    fail = False
-    while methods:
-        name, method = methods.popitem()
-        #print "checking", name
-        if isgenerichook(name):
-            continue
-        if name not in hooks: 
-            if not getattr(method, 'optionalhook', False):
-                Print("found unknown hook:", name)
-                fail = True
-        else:
-            #print "checking", method
-            method_args = getargs(method)
-            #print "method_args", method_args
-            if '__multicall__' in method_args:
-                method_args.remove('__multicall__')
-            hook = hooks[name]
-            hookargs = getargs(hook)
-            for arg in method_args:
-                if arg not in hookargs:
-                    Print("argument %r not available"  %(arg, ))
-                    Print("actual definition: %s" %(formatdef(method)))
-                    Print("available hook arguments: %s" % 
-                            ", ".join(hookargs))
-                    fail = True
-                    break 
-            #if not fail:
-            #    print "matching hook:", formatdef(method)
-        if fail:
-            name = getattr(plugin, '__name__', plugin)
-            raise PluginValidationError("%s:\n%s" %(name, stringio.getvalue()))
-
-class PluginValidationError(Exception):
-    """ plugin failed validation. """
-
-def isgenerichook(name):
-    return name == "pytest_plugins" or \
-           name.startswith("pytest_funcarg__")
-
-def getargs(func):
-    args = inspect.getargs(py.code.getrawcode(func))[0]
-    startindex = inspect.ismethod(func) and 1 or 0
-    return args[startindex:]
-
-def collectattr(obj, prefixes=("pytest_",)):
-    methods = {}
-    for apiname in dir(obj):
-        for prefix in prefixes:
-            if apiname.startswith(prefix):
-                methods[apiname] = getattr(obj, apiname) 
-    return methods 
-
-def formatdef(func):
-    return "%s%s" %(
-        func.__name__, 
-        inspect.formatargspec(*inspect.getargspec(func))
-    )
-

diff --git a/py/bin/py.which b/py/bin/py.which
deleted file mode 100755
--- a/py/bin/py.which
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from _findpy import py
-py.cmdline.pywhich()
\ No newline at end of file

diff --git a/pypy/doc/config/objspace.usemodules.cmath.txt b/pypy/doc/config/objspace.usemodules.cmath.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.cmath.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'cmath' module. 
-This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/objspace.usemodules.mmap.txt b/pypy/doc/config/objspace.usemodules.mmap.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.mmap.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'mmap' module. 
-This module is expected to be fully working.

diff --git a/pypy/rpython/lltypesystem/rstr.py b/pypy/rpython/lltypesystem/rstr.py
--- a/pypy/rpython/lltypesystem/rstr.py
+++ b/pypy/rpython/lltypesystem/rstr.py
@@ -515,7 +515,6 @@
         return count
 
     @classmethod
-    @purefunction
     def ll_find(cls, s1, s2, start, end):
         if start < 0:
             start = 0
@@ -529,11 +528,10 @@
             return start
         elif m == 1:
             return cls.ll_find_char(s1, s2.chars[0], start, end)
-        
+
         return cls.ll_search(s1, s2, start, end, FAST_FIND)
 
     @classmethod
-    @purefunction
     def ll_rfind(cls, s1, s2, start, end):
         if start < 0:
             start = 0
@@ -547,11 +545,10 @@
             return end
         elif m == 1:
             return cls.ll_rfind_char(s1, s2.chars[0], start, end)
-        
+
         return cls.ll_search(s1, s2, start, end, FAST_RFIND)
 
     @classmethod
-    @purefunction
     def ll_count(cls, s1, s2, start, end):
         if start < 0:
             start = 0
@@ -565,7 +562,7 @@
             return end - start + 1
         elif m == 1:
             return cls.ll_count_char(s1, s2.chars[0], start, end)
-            
+
         res = cls.ll_search(s1, s2, start, end, FAST_COUNT)
         # For a few cases ll_search can return -1 to indicate an "impossible"
         # condition for a string match, count just returns 0 in these cases.

diff --git a/pypy/doc/config/translation.backend.txt b/pypy/doc/config/translation.backend.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backend.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Which backend to use when translating, see `translation documentation`_.
-
-.. _`translation documentation`: ../translation.html

diff --git a/py/bin/py.countloc b/py/bin/py.countloc
deleted file mode 100755
--- a/py/bin/py.countloc
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from _findpy import py
-py.cmdline.pycountloc()
\ No newline at end of file

diff --git a/pypy/doc/config/objspace.usemodules.oracle.txt b/pypy/doc/config/objspace.usemodules.oracle.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.oracle.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'oracle' module.
-This module is off by default, requires oracle client installed.

diff --git a/pypy/doc/config/objspace.usemodules._bisect.txt b/pypy/doc/config/objspace.usemodules._bisect.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._bisect.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Use the '_bisect' module.
-Used, optionally,  by the 'bisect' standard lib module. This module is expected to be working and is included by default.
-
-

diff --git a/pypy/doc/config/translation.jit_backend.txt b/pypy/doc/config/translation.jit_backend.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.jit_backend.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Choose the backend to use for the JIT.
-By default, this is the best backend for the current platform.

diff --git a/pypy/doc/config/commandline.txt b/pypy/doc/config/commandline.txt
deleted file mode 100644
--- a/pypy/doc/config/commandline.txt
+++ /dev/null
@@ -1,33 +0,0 @@
-
-.. contents::
-    
-
-.. _objspace:
-.. _`overview-of-command-line-options-for-objspace`:
-
--------------------------------
-PyPy Python interpreter options
--------------------------------
-
-The following options can be used after ``translate.py
-targetpypystandalone`` or as options to ``py.py``.
-
-.. GENERATE: objspace
-
-
-.. _translation:
-.. _`overview-of-command-line-options-for-translation`:
-
----------------------------
-General translation options
----------------------------
-
-The following are options of ``translate.py``.  They must be
-given before the ``targetxxx`` on the command line.
-
-* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]`
-
-.. __: opt.html
-
-.. GENERATE: translation
-

diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Internal option. Switch to a different weight heuristic for inlining.
-This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`).
-
-.. internal

diff --git a/py/bin/env.cmd b/py/bin/env.cmd
deleted file mode 100644
--- a/py/bin/env.cmd
+++ /dev/null
@@ -1,2 +0,0 @@
- at echo off
-for /F "usebackq delims=" %%i in (`python "%~dp0\env.py"`) do %%i

diff --git a/pypy/doc/config/objspace.usemodules.time.txt b/pypy/doc/config/objspace.usemodules.time.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.time.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Use the 'time' module. 
-
-Obsolete; use :config:`objspace.usemodules.rctime` for our up-to-date version
-of the application-level 'time' module.

diff --git a/pypy/doc/config/objspace.usemodules._socket.txt b/pypy/doc/config/objspace.usemodules._socket.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._socket.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-Use the '_socket' module. 
-
-This is our implementation of '_socket', the Python builtin module
-exposing socket primitives, which is wrapped and used by the standard
-library 'socket.py' module. It is based on `rffi`_.
-
-.. _`rffi`: ../rffi.html

diff --git a/py/_path/gateway/__init__.py b/py/_path/gateway/__init__.py
deleted file mode 100644
--- a/py/_path/gateway/__init__.py
+++ /dev/null
@@ -1,1 +0,0 @@
-#

diff --git a/pypy/doc/config/objspace.usemodules._hashlib.txt b/pypy/doc/config/objspace.usemodules._hashlib.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._hashlib.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the '_hashlib' module.
-Used by the 'hashlib' standard lib module, and indirectly by the various cryptographic libs. This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/translation.backendopt.inline_threshold.txt b/pypy/doc/config/translation.backendopt.inline_threshold.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.inline_threshold.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Weight threshold used to decide whether to inline flowgraphs.
-This is for basic inlining (:config:`translation.backendopt.inline`).

diff --git a/pypy/doc/config/objspace.usemodules.gc.txt b/pypy/doc/config/objspace.usemodules.gc.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.gc.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Use the 'gc' module. 
-This module is expected to be working and is included by default.
-Note that since the gc module is highly implementation specific, it contains
-only the ``collect`` function in PyPy, which forces a collection when compiled
-with the framework or with Boehm.

diff --git a/pypy/doc/config/objspace.usemodules._multiprocessing.txt b/pypy/doc/config/objspace.usemodules._multiprocessing.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._multiprocessing.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the '_multiprocessing' module.
-Used by the 'multiprocessing' standard lib module. This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/objspace.std.withsmalllong.txt b/pypy/doc/config/objspace.std.withsmalllong.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withsmalllong.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Enable "small longs", an additional implementation of the Python
-type "long", implemented with a C long long.  It is mostly useful
-on 32-bit; on 64-bit, a C long long is the same as a C long, so
-its usefulness is limited to Python objects of type "long" that
-would anyway fit in an "int".

diff --git a/pypy/doc/config/objspace.usemodules._weakref.txt b/pypy/doc/config/objspace.usemodules._weakref.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._weakref.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Use the '_weakref' module, necessary for the standard lib 'weakref' module.
-PyPy's weakref implementation is not completely stable yet. The first
-difference to CPython is that weak references only go away after the next
-garbage collection, not immediately. The other problem seems to be that under
-certain circumstances (that we have not determined) weak references keep the
-object alive.

diff --git a/py/_plugin/pytest_recwarn.py b/py/_plugin/pytest_recwarn.py
deleted file mode 100644
--- a/py/_plugin/pytest_recwarn.py
+++ /dev/null
@@ -1,121 +0,0 @@
-"""
-helpers for asserting deprecation and other warnings. 
-
-Example usage 
----------------------
-
-You can use the ``recwarn`` funcarg to track 
-warnings within a test function:
-
-.. sourcecode:: python
-
-    def test_hello(recwarn):
-        from warnings import warn
-        warn("hello", DeprecationWarning)
-        w = recwarn.pop(DeprecationWarning)
-        assert issubclass(w.category, DeprecationWarning)
-        assert 'hello' in str(w.message)
-        assert w.filename
-        assert w.lineno
-
-You can also call a global helper for checking
-taht a certain function call yields a Deprecation
-warning:
-
-.. sourcecode:: python
-
-    import py
-            
-    def test_global():
-        py.test.deprecated_call(myfunction, 17)
-        
-        
-"""
-
-import py
-import os
-
-def pytest_funcarg__recwarn(request):
-    """Return a WarningsRecorder instance that provides these methods:
-
-    * ``pop(category=None)``: return last warning matching the category.
-    * ``clear()``: clear list of warnings 
-    """
-    warnings = WarningsRecorder()
-    request.addfinalizer(warnings.finalize)
-    return warnings
-
-def pytest_namespace():
-    return {'deprecated_call': deprecated_call}
-
-def deprecated_call(func, *args, **kwargs):
-    """ assert that calling func(*args, **kwargs)
-        triggers a DeprecationWarning. 
-    """ 
-    warningmodule = py.std.warnings
-    l = []
-    oldwarn_explicit = getattr(warningmodule, 'warn_explicit')
-    def warn_explicit(*args, **kwargs): 
-        l.append(args) 
-        oldwarn_explicit(*args, **kwargs)
-    oldwarn = getattr(warningmodule, 'warn')
-    def warn(*args, **kwargs): 
-        l.append(args) 
-        oldwarn(*args, **kwargs)
-        
-    warningmodule.warn_explicit = warn_explicit
-    warningmodule.warn = warn
-    try:
-        ret = func(*args, **kwargs)
-    finally:
-        warningmodule.warn_explicit = warn_explicit
-        warningmodule.warn = warn
-    if not l:
-        #print warningmodule
-        __tracebackhide__ = True
-        raise AssertionError("%r did not produce DeprecationWarning" %(func,))
-    return ret
-
-
-class RecordedWarning:
-    def __init__(self, message, category, filename, lineno, line):
-        self.message = message
-        self.category = category
-        self.filename = filename
-        self.lineno = lineno
-        self.line = line
-
-class WarningsRecorder:
-    def __init__(self):
-        warningmodule = py.std.warnings
-        self.list = []
-        def showwarning(message, category, filename, lineno, line=0):
-            self.list.append(RecordedWarning(
-                message, category, filename, lineno, line))
-            try:
-                self.old_showwarning(message, category, 
-                    filename, lineno, line=line)
-            except TypeError:
-                # < python2.6 
-                self.old_showwarning(message, category, filename, lineno)
-        self.old_showwarning = warningmodule.showwarning
-        warningmodule.showwarning = showwarning
-
-    def pop(self, cls=Warning):
-        """ pop the first recorded warning, raise exception if not exists."""
-        for i, w in enumerate(self.list):
-            if issubclass(w.category, cls):
-                return self.list.pop(i)
-        __tracebackhide__ = True
-        assert 0, "%r not found in %r" %(cls, self.list)
-
-    #def resetregistry(self):
-    #    import warnings
-    #    warnings.onceregistry.clear()
-    #    warnings.__warningregistry__.clear()
-
-    def clear(self): 
-        self.list[:] = []
-
-    def finalize(self):
-        py.std.warnings.showwarning = self.old_showwarning

diff --git a/pypy/doc/config/translation.backendopt.really_remove_asserts.txt b/pypy/doc/config/translation.backendopt.really_remove_asserts.txt
deleted file mode 100644

diff --git a/pypy/doc/config/translation.force_make.txt b/pypy/doc/config/translation.force_make.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.force_make.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Force executing makefile instead of using platform.

diff --git a/pypy/doc/config/objspace.usemodules._md5.txt b/pypy/doc/config/objspace.usemodules._md5.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._md5.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Use the built-in '_md5' module.
-This module is expected to be working and is included by default.
-There is also a pure Python version in lib_pypy which is used
-if the built-in is disabled, but it is several orders of magnitude 
-slower.

diff --git a/py/bin/py.svnwcrevert b/py/bin/py.svnwcrevert
deleted file mode 100755
--- a/py/bin/py.svnwcrevert
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from _findpy import py
-py.cmdline.pysvnwcrevert()
\ No newline at end of file

diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-Try to inline flowgraphs based on whether doing so would enable malloc
-removal (:config:`translation.backendopt.mallocs`.) by eliminating
-calls that result in escaping. This is an experimental optimization,
-also right now some eager inlining is necessary for helpers doing
-malloc itself to be inlined first for this to be effective.
-This option enable also an extra subsequent malloc removal phase.
-
-Callee flowgraphs are considered candidates based on a weight heuristic like
-for basic inlining. (see :config:`translation.backendopt.inline`,
-:config:`translation.backendopt.clever_malloc_removal_threshold` ).

diff --git a/py/_plugin/pytest_pdb.py b/py/_plugin/pytest_pdb.py
deleted file mode 100644
--- a/py/_plugin/pytest_pdb.py
+++ /dev/null
@@ -1,105 +0,0 @@
-"""
-interactive debugging with the Python Debugger.
-"""
-import py
-import pdb, sys, linecache
-
-def pytest_addoption(parser):
-    group = parser.getgroup("general") 
-    group._addoption('--pdb',
-               action="store_true", dest="usepdb", default=False,
-               help="start the interactive Python debugger on errors.")
-
-def pytest_configure(config):
-    if config.getvalue("usepdb"):
-        config.pluginmanager.register(PdbInvoke(), 'pdb')
-
-class PdbInvoke:
-    def pytest_runtest_makereport(self, item, call):
-        if call.excinfo and not \
-           call.excinfo.errisinstance(py.test.skip.Exception): 
-            # play well with capturing, slightly hackish
-            capman = item.config.pluginmanager.getplugin('capturemanager')
-            capman.suspendcapture() 
-
-            tw = py.io.TerminalWriter()
-            repr = call.excinfo.getrepr()
-            repr.toterminal(tw) 
-            post_mortem(call.excinfo._excinfo[2])
-
-            capman.resumecapture_item(item)
-
-class Pdb(py.std.pdb.Pdb):
-    def do_list(self, arg):
-        self.lastcmd = 'list'
-        last = None
-        if arg:
-            try:
-                x = eval(arg, {}, {})
-                if type(x) == type(()):
-                    first, last = x
-                    first = int(first)
-                    last = int(last)
-                    if last < first:
-                        # Assume it's a count
-                        last = first + last
-                else:
-                    first = max(1, int(x) - 5)
-            except:
-                print ('*** Error in argument: %s' % repr(arg))
-                return
-        elif self.lineno is None:
-            first = max(1, self.curframe.f_lineno - 5)
-        else:
-            first = self.lineno + 1
-        if last is None:
-            last = first + 10
-        filename = self.curframe.f_code.co_filename
-        breaklist = self.get_file_breaks(filename)
-        try:
-            for lineno in range(first, last+1):
-                # start difference from normal do_line
-                line = self._getline(filename, lineno)
-                # end difference from normal do_line
-                if not line:
-                    print ('[EOF]')
-                    break
-                else:
-                    s = repr(lineno).rjust(3)
-                    if len(s) < 4: s = s + ' '
-                    if lineno in breaklist: s = s + 'B'
-                    else: s = s + ' '
-                    if lineno == self.curframe.f_lineno:
-                        s = s + '->'
-                    sys.stdout.write(s + '\t' + line)
-                    self.lineno = lineno
-        except KeyboardInterrupt:
-            pass
-    do_l = do_list
-
-    def _getline(self, filename, lineno):
-        if hasattr(filename, "__source__"):
-            try:
-                return filename.__source__.lines[lineno - 1] + "\n"
-            except IndexError:
-                return None
-        return linecache.getline(filename, lineno)
-
-    def get_stack(self, f, t):
-        # Modified from bdb.py to be able to walk the stack beyond generators,
-        # which does not work in the normal pdb :-(
-        stack, i = pdb.Pdb.get_stack(self, f, t)
-        if f is None:
-            i = max(0, len(stack) - 1)
-            while i and stack[i][0].f_locals.get("__tracebackhide__", False):
-                i-=1
-        return stack, i
-
-def post_mortem(t):
-    p = Pdb()
-    p.reset()
-    p.interaction(None, t)
-
-def set_trace():
-    # again, a copy of the version in pdb.py
-    Pdb().set_trace(sys._getframe().f_back)

diff --git a/pypy/doc/config/objspace.lonepycfiles.txt b/pypy/doc/config/objspace.lonepycfiles.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.lonepycfiles.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-If turned on, PyPy accepts to import a module ``x`` if it finds a
-file ``x.pyc`` even if there is no file ``x.py``.
-
-This is the way that CPython behaves, but it is disabled by
-default for PyPy because it is a common cause of issues: most
-typically, the ``x.py`` file is removed (manually or by a
-version control system) but the ``x`` module remains
-accidentally importable because the ``x.pyc`` file stays
-around.
-
-The usual reason for wanting this feature is to distribute
-non-open-source Python programs by distributing ``pyc`` files
-only, but this use case is not practical for PyPy at the
-moment because multiple versions of PyPy compiled with various
-optimizations might be unable to load each other's ``pyc``
-files.

diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py
--- a/pypy/interpreter/pycode.py
+++ b/pypy/interpreter/pycode.py
@@ -9,8 +9,7 @@
 from pypy.interpreter import eval
 from pypy.interpreter.argument import Signature
 from pypy.interpreter.error import OperationError
-from pypy.interpreter.gateway import NoneNotWrapped 
-from pypy.interpreter.baseobjspace import ObjSpace, W_Root
+from pypy.interpreter.gateway import NoneNotWrapped, unwrap_spec
 from pypy.interpreter.astcompiler.consts import (CO_OPTIMIZED,
     CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS, CO_NESTED,
     CO_GENERATOR, CO_CONTAINSGLOBALS)
@@ -87,7 +86,7 @@
         self._init_flags()
         # Precompute what arguments need to be copied into cellvars
         self._args_as_cellvars = []
-        
+
         if self.co_cellvars:
             argcount = self.co_argcount
             assert argcount >= 0     # annotator hint
@@ -147,7 +146,7 @@
 
     def signature(self):
         return self._signature
-    
+
     @classmethod
     def _from_code(cls, space, code, hidden_applevel=False, code_hook=None):
         """ Initialize the code object from a real (CPython) one.
@@ -183,7 +182,7 @@
                       list(code.co_cellvars),
                       hidden_applevel, cpython_magic)
 
-    
+
     def _compute_flatcall(self):
         # Speed hack!
         self.fast_natural_arity = eval.Code.HOPELESS
@@ -193,7 +192,7 @@
             return
         if self.co_argcount > 0xff:
             return
-        
+
         self.fast_natural_arity = eval.Code.FLATPYCALL | self.co_argcount
 
     def funcrun(self, func, args):
@@ -205,7 +204,7 @@
                                       fresh_virtualizable=True)
         args_matched = args.parse_into_scope(None, fresh_frame.fastlocals_w,
                                              func.name,
-                                             sig, func.defs_w)
+                                             sig, func.defs)
         fresh_frame.init_cells()
         return frame.run()
 
@@ -215,10 +214,10 @@
         sig = self._signature
         # speed hack
         fresh_frame = jit.hint(frame, access_directly=True,
-                                      fresh_virtualizable=True)        
+                                      fresh_virtualizable=True)
         args_matched = args.parse_into_scope(w_obj, fresh_frame.fastlocals_w,
                                              func.name,
-                                             sig, func.defs_w)
+                                             sig, func.defs)
         fresh_frame.init_cells()
         return frame.run()
 
@@ -268,20 +267,20 @@
         co = self._to_code()
         dis.dis(co)
 
-    def fget_co_consts(space, self):
+    def fget_co_consts(self, space):
         return space.newtuple(self.co_consts_w)
-    
-    def fget_co_names(space, self):
+
+    def fget_co_names(self, space):
         return space.newtuple(self.co_names_w)
 
-    def fget_co_varnames(space, self):
+    def fget_co_varnames(self, space):
         return space.newtuple([space.wrap(name) for name in self.co_varnames])
 
-    def fget_co_cellvars(space, self):
+    def fget_co_cellvars(self, space):
         return space.newtuple([space.wrap(name) for name in self.co_cellvars])
 
-    def fget_co_freevars(space, self):
-        return space.newtuple([space.wrap(name) for name in self.co_freevars])    
+    def fget_co_freevars(self, space):
+        return space.newtuple([space.wrap(name) for name in self.co_freevars])
 
     def descr_code__eq__(self, w_other):
         space = self.space
@@ -330,14 +329,10 @@
             w_result = space.xor(w_result, space.hash(w_const))
         return w_result
 
-    unwrap_spec =        [ObjSpace, W_Root, 
-                          int, int, int, int,
-                          str, W_Root, W_Root, 
-                          W_Root, str, str, int, 
-                          str, W_Root, 
-                          W_Root, int]
-
-
+    @unwrap_spec(argcount=int, nlocals=int, stacksize=int, flags=int,
+                 codestring=str,
+                 filename=str, name=str, firstlineno=int,
+                 lnotab=str, magic=int)
     def descr_code__new__(space, w_subtype,
                           argcount, nlocals, stacksize, flags,
                           codestring, w_constants, w_names,
@@ -369,7 +364,6 @@
         PyCode.__init__(code, space, argcount, nlocals, stacksize, flags, codestring, consts_w[:], names,
                       varnames, filename, name, firstlineno, lnotab, freevars, cellvars, magic=magic)
         return space.wrap(code)
-    descr_code__new__.unwrap_spec = unwrap_spec 
 
     def descr__reduce__(self, space):
         from pypy.interpreter.mixedmodule import MixedModule
@@ -378,18 +372,18 @@
         new_inst = mod.get('code_new')
         w        = space.wrap
         tup      = [
-            w(self.co_argcount), 
-            w(self.co_nlocals), 
-            w(self.co_stacksize), 
+            w(self.co_argcount),
+            w(self.co_nlocals),
+            w(self.co_stacksize),
             w(self.co_flags),
-            w(self.co_code), 
-            space.newtuple(self.co_consts_w), 
-            space.newtuple(self.co_names_w), 
-            space.newtuple([w(v) for v in self.co_varnames]), 
+            w(self.co_code),
+            space.newtuple(self.co_consts_w),
+            space.newtuple(self.co_names_w),
+            space.newtuple([w(v) for v in self.co_varnames]),
             w(self.co_filename),
-            w(self.co_name), 
+            w(self.co_name),
             w(self.co_firstlineno),
-            w(self.co_lnotab), 
+            w(self.co_lnotab),
             space.newtuple([w(v) for v in self.co_freevars]),
             space.newtuple([w(v) for v in self.co_cellvars]),
             w(self.magic),
@@ -402,4 +396,3 @@
 
     def repr(self, space):
         return space.wrap(self.get_repr())
-    repr.unwrap_spec = ['self', ObjSpace]

diff --git a/pypy/doc/config/objspace.usemodules.binascii.txt b/pypy/doc/config/objspace.usemodules.binascii.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.binascii.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Use the RPython 'binascii' module.

diff --git a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt b/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Internal option. Transformation required by the LLVM backend.
-
-.. internal

diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py
--- a/pypy/objspace/std/mapdict.py
+++ b/pypy/objspace/std/mapdict.py
@@ -1,3 +1,4 @@
+import weakref
 from pypy.rlib import jit, objectmodel, debug
 from pypy.rlib.rarithmetic import intmask, r_uint
 
@@ -92,6 +93,10 @@
         return index
 
     def _index(self, selector):
+        while isinstance(self, PlainAttribute):
+            if selector == self.selector:
+                return self.position
+            self = self.back
         return -1
 
     def copy(self, obj):
@@ -215,15 +220,15 @@
 class DevolvedDictTerminator(Terminator):
     def _read_terminator(self, obj, selector):
         if selector[1] == DICT:
-            w_dict = obj.getdict()
             space = self.space
+            w_dict = obj.getdict(space)
             return space.finditem_str(w_dict, selector[0])
         return Terminator._read_terminator(self, obj, selector)
 
     def _write_terminator(self, obj, selector, w_value):
         if selector[1] == DICT:
-            w_dict = obj.getdict()
             space = self.space
+            w_dict = obj.getdict(space)
             space.setitem_str(w_dict, selector[0], w_value)
             return True
         return Terminator._write_terminator(self, obj, selector, w_value)
@@ -231,8 +236,8 @@
     def delete(self, obj, selector):
         from pypy.interpreter.error import OperationError
         if selector[1] == DICT:
-            w_dict = obj.getdict()
             space = self.space
+            w_dict = obj.getdict(space)
             try:
                 space.delitem(w_dict, space.wrap(selector[0]))
             except OperationError, ex:
@@ -272,11 +277,6 @@
             self._copy_attr(obj, new_obj)
         return new_obj
 
-    def _index(self, selector):
-        if selector == self.selector:
-            return self.position
-        return self.back._index(selector)
-
     def copy(self, obj):
         new_obj = self.back.copy(obj)
         self._copy_attr(obj, new_obj)
@@ -375,12 +375,12 @@
         self._become(new_obj)
         return True
 
-    def getdict(self):
+    def getdict(self, space):
         w_dict = self._get_mapdict_map().read(self, ("dict", SPECIAL))
         if w_dict is not None:
             assert isinstance(w_dict, W_DictMultiObject)
             return w_dict
-        w_dict = MapDictImplementation(self.space, self)
+        w_dict = MapDictImplementation(space, self)
         flag = self._get_mapdict_map().write(self, ("dict", SPECIAL), w_dict)
         assert flag
         return w_dict
@@ -388,7 +388,7 @@
     def setdict(self, space, w_dict):
         from pypy.interpreter.typedef import check_new_dictionary
         w_dict = check_new_dictionary(space, w_dict)
-        w_olddict = self.getdict()
+        w_olddict = self.getdict(space)
         assert isinstance(w_dict, W_DictMultiObject)
         if w_olddict.r_dict_content is None:
             w_olddict._as_rdict()
@@ -506,6 +506,9 @@
 memo_get_subclass_of_correct_size._annspecialcase_ = "specialize:memo"
 _subclass_cache = {}
 
+erase_item, unerase_item = rerased.new_erasing_pair("mapdict storage item")
+erase_list, unerase_list = rerased.new_erasing_pair("mapdict storage list")
+
 def _make_subclass_size_n(supercls, n, use_erased=True):
     from pypy.rlib import unroll
     rangen = unroll.unrolling_iterable(range(n))
@@ -535,19 +538,22 @@
         def _mapdict_get_storage_list(self):
             erased = getattr(self, "_value%s" % nmin1)
             if use_erased:
-                return rerased.unerase_fixedsizelist(erased, W_Root)
+                return unerase_list(erased)
             else:
                 assert isinstance(erased, ExtraAttributes)
                 return erased.storage
 
         def _mapdict_read_storage(self, index):
-            for i in rangenmin1:
-                if index == i:
-                    return getattr(self, "_value%s" % i)
+            assert index >= 0
+            if index < nmin1:
+                for i in rangenmin1:
+                    if index == i:
+                        erased = getattr(self, "_value%s" % i)
+                        return unerase_item(erased)
             if self._has_storage_list():
                 return self._mapdict_get_storage_list()[index - nmin1]
             erased = getattr(self, "_value%s" % nmin1)
-            return unerase(erased, W_Root)
+            return unerase_item(erased)
 
         def _mapdict_write_storage(self, index, value):
             for i in rangenmin1:
@@ -557,7 +563,7 @@
             if self._has_storage_list():
                 self._mapdict_get_storage_list()[index - nmin1] = value
                 return
-            erased = erase(value)
+            erased = erase_item(value)
             setattr(self, "_value%s" % nmin1, erased)
 
         def _mapdict_storage_length(self):
@@ -577,21 +583,21 @@
             has_storage_list = self._has_storage_list()
             if len_storage < n:
                 assert not has_storage_list
-                erased = erase(None)
+                erased = erase_item(None)
             elif len_storage == n:
                 assert not has_storage_list
-                erased = erase(storage[nmin1])
+                erased = erase_item(storage[nmin1])
             elif not has_storage_list:
                 # storage is longer than self.map.length() only due to
                 # overallocation
-                erased = erase(storage[nmin1])
+                erased = erase_item(storage[nmin1])
                 # in theory, we should be ultra-paranoid and check all entries,
                 # but checking just one should catch most problems anyway:
                 assert storage[n] is None
             else:
                 storage_list = storage[nmin1:]
                 if use_erased:
-                    erased = rerased.erase_fixedsizelist(storage_list, W_Root)
+                    erased = erase_list(storage_list)
                 else:
                     erased = ExtraAttributes(storage_list)
             setattr(self, "_value%s" % nmin1, erased)
@@ -680,7 +686,7 @@
 
 def materialize_r_dict(space, obj, w_d):
     map = obj._get_mapdict_map()
-    assert obj.getdict() is w_d
+    assert obj.getdict(space) is w_d
     new_obj = map.materialize_r_dict(space, obj, w_d)
     _become(obj, new_obj)
 
@@ -709,7 +715,6 @@
 # Magic caching
 
 class CacheEntry(object):
-    map = None
     version_tag = None
     index = 0
     w_method = None # for callmethod
@@ -720,8 +725,11 @@
         map = w_obj._get_mapdict_map()
         return self.is_valid_for_map(map)
 
+    @jit.dont_look_inside
     def is_valid_for_map(self, map):
-        if map is self.map:
+        # note that 'map' can be None here
+        mymap = self.map_wref()
+        if mymap is not None and mymap is map:
             version_tag = map.terminator.w_cls.version_tag()
             if version_tag is self.version_tag:
                 # everything matches, it's incredibly fast
@@ -730,22 +738,23 @@
                 return True
         return False
 
+_invalid_cache_entry_map = objectmodel.instantiate(AbstractAttribute)
+_invalid_cache_entry_map.terminator = None
 INVALID_CACHE_ENTRY = CacheEntry()
-INVALID_CACHE_ENTRY.map = objectmodel.instantiate(AbstractAttribute)
-                             # different from any real map ^^^
-INVALID_CACHE_ENTRY.map.terminator = None
-
+INVALID_CACHE_ENTRY.map_wref = weakref.ref(_invalid_cache_entry_map)
+                                 # different from any real map ^^^
 
 def init_mapdict_cache(pycode):
     num_entries = len(pycode.co_names_w)
     pycode._mapdict_caches = [INVALID_CACHE_ENTRY] * num_entries
 
+ at jit.dont_look_inside
 def _fill_cache(pycode, nameindex, map, version_tag, index, w_method=None):
     entry = pycode._mapdict_caches[nameindex]
     if entry is INVALID_CACHE_ENTRY:
         entry = CacheEntry()
         pycode._mapdict_caches[nameindex] = entry
-    entry.map = map
+    entry.map_wref = weakref.ref(map)
     entry.version_tag = version_tag
     entry.index = index
     entry.w_method = w_method

diff --git a/pypy/doc/config/objspace.geninterp.txt b/pypy/doc/config/objspace.geninterp.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.geninterp.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-This option enables `geninterp`_. This will usually make the PyPy interpreter
-significantly faster (but also a bit bigger).
-
-.. _`geninterp`: ../geninterp.html

diff --git a/py/_plugin/pytest_unittest.py b/py/_plugin/pytest_unittest.py
deleted file mode 100644
--- a/py/_plugin/pytest_unittest.py
+++ /dev/null
@@ -1,81 +0,0 @@
-"""
-automatically discover and run traditional "unittest.py" style tests. 
-
-Usage
-----------------
-
-This plugin collects and runs Python `unittest.py style`_ tests. 
-It will automatically collect ``unittest.TestCase`` subclasses 
-and their ``test`` methods from the test modules of a project
-(usually following the ``test_*.py`` pattern). 
-
-This plugin is enabled by default. 
-
-.. _`unittest.py style`: http://docs.python.org/library/unittest.html
-"""
-import py
-import sys
-
-def pytest_pycollect_makeitem(collector, name, obj):
-    if 'unittest' not in sys.modules:
-        return # nobody derived unittest.TestCase
-    try:
-        isunit = issubclass(obj, py.std.unittest.TestCase)
-    except KeyboardInterrupt:
-        raise
-    except Exception:
-        pass
-    else:
-        if isunit:
-            return UnitTestCase(name, parent=collector)
-
-class UnitTestCase(py.test.collect.Class):
-    def collect(self):
-        return [UnitTestCaseInstance("()", self)]
-
-    def setup(self):
-        pass
-
-    def teardown(self):
-        pass
-
-_dummy = object()
-class UnitTestCaseInstance(py.test.collect.Instance):
-    def collect(self):
-        loader = py.std.unittest.TestLoader()
-        names = loader.getTestCaseNames(self.obj.__class__)
-        l = []
-        for name in names:
-            callobj = getattr(self.obj, name)
-            if py.builtin.callable(callobj):
-                l.append(UnitTestFunction(name, parent=self))
-        return l
-
-    def _getobj(self):
-        x = self.parent.obj
-        return self.parent.obj(methodName='run')
-        
-class UnitTestFunction(py.test.collect.Function):
-    def __init__(self, name, parent, args=(), obj=_dummy, sort_value=None):
-        super(UnitTestFunction, self).__init__(name, parent)
-        self._args = args
-        if obj is not _dummy:
-            self._obj = obj
-        self._sort_value = sort_value
-        if hasattr(self.parent, 'newinstance'):
-            self.parent.newinstance()
-            self.obj = self._getobj()
-
-    def runtest(self):
-        target = self.obj
-        args = self._args
-        target(*args)
-
-    def setup(self):
-        instance = py.builtin._getimself(self.obj)
-        instance.setUp()
-
-    def teardown(self):
-        instance = py.builtin._getimself(self.obj)
-        instance.tearDown()
-

diff --git a/pypy/doc/config/objspace.logbytecodes.txt b/pypy/doc/config/objspace.logbytecodes.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.logbytecodes.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Internal option.
-
-.. internal

diff --git a/pypy/doc/config/objspace.std.withtypeversion.txt b/pypy/doc/config/objspace.std.withtypeversion.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withtypeversion.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-This (mostly internal) option enables "type versions": Every type object gets an
-(only internally visible) version that is updated when the type's dict is
-changed. This is e.g. used for invalidating caches. It does not make sense to
-enable this option alone.
-
-.. internal

diff --git a/pypy/doc/config/translation.cli.trace_calls.txt b/pypy/doc/config/translation.cli.trace_calls.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.cli.trace_calls.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Internal. Debugging aid for the CLI backend.
-
-.. internal

diff --git a/pypy/doc/config/objspace.usemodules.struct.txt b/pypy/doc/config/objspace.usemodules.struct.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.struct.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Use the built-in 'struct' module.
-This module is expected to be working and is included by default.
-There is also a pure Python version in lib_pypy which is used
-if the built-in is disabled, but it is several orders of magnitude
-slower.

diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.txt
deleted file mode 100644
--- a/pypy/doc/architecture.txt
+++ /dev/null
@@ -1,264 +0,0 @@
-==================================================
-PyPy - Goals and Architecture Overview 
-==================================================
-
-.. contents::
-.. sectnum::
-
-This document gives an overview of the goals and architecture of PyPy.
-See `getting started`_ for a practical introduction and starting points. 
-
-Mission statement 
-====================
-
-We aim to provide:
-
- * a common translation and support framework for producing
-   implementations of dynamic languages, emphasizing a clean
-   separation between language specification and implementation
-   aspects.
-
- * a compliant, flexible and fast implementation of the Python_ Language 
-   using the above framework to enable new advanced features without having
-   to encode low level details into it.
-
-By separating concerns in this way, we intend for our implementation
-of Python - and other dynamic languages - to become robust against almost 
-all implementation decisions, including target platform, memory and 
-threading models, optimizations applied, up to to the point of being able to
-automatically *generate* Just-in-Time compilers for dynamic languages.
-
-Conversely, our implementation techniques, including the JIT compiler 
-generator, should become robust against changes in the languages 
-implemented. 
-
-
-High Level Goals
-=============================
-
-PyPy - the Translation Framework 
------------------------------------------------
-
-Traditionally, language interpreters are written in a target platform language
-like C/Posix, Java or C#.  Each such implementation fundamentally provides 
-a mapping from application source code to the target environment.  One of 
-the goals of the "all-encompassing" environments, like the .NET framework
-and to some extent the Java virtual machine, is to provide standardized
-and higher level functionalities in order to support language implementers
-for writing language implementations. 
-
-PyPy is experimenting with a more ambitious approach.  We are using a
-subset of the high-level language Python, called RPython_, in which we
-write languages as simple interpreters with few references to and
-dependencies on lower level details.  Our translation framework then
-produces a concrete virtual machine for the platform of our choice by
-inserting appropriate lower level aspects.  The result can be customized
-by selecting other feature and platform configurations.
-
-Our goal is to provide a possible solution to the problem of language
-implementers: having to write ``l * o * p`` interpreters for ``l``
-dynamic languages and ``p`` platforms with ``o`` crucial design
-decisions.  PyPy aims at having any one of these parameters changeable
-independently from each other:
-
-* ``l``: the language that we analyze can be evolved or entirely replaced;
-
-* ``o``: we can tweak and optimize the translation process to produce 
-  platform specific code based on different models and trade-offs;
-
-* ``p``: we can write new translator back-ends to target different
-  physical and virtual platforms.
-
-By contrast, a standardized target environment - say .NET -
-enforces ``p=1`` as far as it's concerned.  This helps making ``o`` a
-bit smaller by providing a higher-level base to build upon.  Still,
-we believe that enforcing the use of one common environment 
-is not necessary.  PyPy's goal is to give weight to this claim - at least 
-as far as language implementation is concerned - showing an approach
-to the ``l * o * p`` problem that does not rely on standardization.
-
-The most ambitious part of this goal is to `generate Just-In-Time
-Compilers`_ in a language-independent way, instead of only translating
-the source interpreter into an interpreter for the target platform.
-This is an area of language implementation that is commonly considered
-very challenging because of the involved complexity.
-
-
-PyPy - the Python Interpreter 
---------------------------------------------
-
-Our main motivation for developing the translation framework is to
-provide a full featured, customizable, fast_ and `very compliant`_ Python
-implementation, working on and interacting with a large variety of
-platforms and allowing the quick introduction of new advanced language
-features.
-
-This Python implementation is written in RPython as a relatively simple
-interpreter, in some respects easier to understand than CPython, the C
-reference implementation of Python.  We are using its high level and
-flexibility to quickly experiment with features or implementation
-techniques in ways that would, in a traditional approach, require
-pervasive changes to the source code.  For example, PyPy's Python
-interpreter can optionally provide lazily computed objects - a small
-extension that would require global changes in CPython.  Another example
-is the garbage collection technique: changing CPython to use a garbage
-collector not based on reference counting would be a major undertaking,
-whereas in PyPy it is an issue localized in the translation framework,
-and fully orthogonal to the interpreter source code.
-
-
-PyPy Architecture 
-===========================
-
-As you would expect from a project implemented using ideas from the world
-of `Extreme Programming`_, the architecture of PyPy has evolved over time
-and continues to evolve.  Nevertheless, the high level architecture is 
-stable. As described above, there are two rather independent basic
-subsystems: the `Python Interpreter`_ and the `Translation Framework`_.
-
-.. _`translation framework`:
-
-The Translation Framework
--------------------------
-
-The job of the translation tool chain is to translate RPython_ programs
-into an efficient version of that program for one of various target
-platforms, generally one that is considerably lower-level than Python.
-
-The approach we have taken is to reduce the level of abstraction of the
-source RPython program in several steps, from the high level down to the
-level of the target platform, whatever that may be.  Currently we
-support two broad flavours of target platforms: the ones that assume a
-C-like memory model with structures and pointers, and the ones that
-assume an object-oriented model with classes, instances and methods (as,
-for example, the Java and .NET virtual machines do).
-
-The translation tool chain never sees the RPython source code or syntax
-trees, but rather starts with the *code objects* that define the
-behaviour of the function objects one gives it as input.  It can be
-considered as "freezing" a pre-imported RPython program into an
-executable form suitable for the target platform.
-
-The steps of the translation process can be summarized as follows:
-
-* The code object of each source functions is converted to a `control
-  flow graph` by the `Flow Object Space`_.
-
-* The control flow graphs are processed by the Annotator_, which
-  performs whole-program type inference to annotate each variable of
-  the control flow graph with the types it may take at run-time.
-
-* The information provided by the annotator is used by the RTyper_ to
-  convert the high level operations of the control flow graphs into
-  operations closer to the abstraction level of the target platform.
-
-* Optionally, `various transformations`_ can then be applied which, for
-  example, perform optimizations such as inlining, add capabilities
-  such as stackless_-style concurrency, or insert code for the
-  `garbage collector`_.
-
-* Then, the graphs are converted to source code for the target platform
-  and compiled into an executable.
-
-This process is described in much more detail in the `document about
-the translation process`_ and in the paper `Compiling dynamic language
-implementations`_.
-
-.. _`control flow graph`: translation.html#the-flow-model
-.. _`Flow Object Space`: objspace.html#the-flow-object-space
-.. _Annotator: translation.html#the-annotation-pass
-.. _RTyper: rtyper.html#overview
-.. _`various transformations`: translation.html#the-optional-transformations
-.. _`document about the translation process`: translation.html
-.. _`garbage collector`: garbage_collection.html
-
-
-.. _`standard interpreter`: 
-.. _`python interpreter`: 
-
-The Python Interpreter
--------------------------------------
-
-PyPy's *Python Interpreter* is written in RPython and implements the
-full Python language.  This interpreter very closely emulates the
-behavior of CPython.  It contains the following key components:
-
-- a bytecode compiler responsible for producing Python code objects 
-  from the source code of a user application;
-
-- a `bytecode evaluator`_ responsible for interpreting 
-  Python code objects;
-
-- a `standard object space`_, responsible for creating and manipulating
-  the Python objects seen by the application.
-
-The *bytecode compiler* is the preprocessing phase that produces a
-compact bytecode format via a chain of flexible passes (tokenizer,
-lexer, parser, abstract syntax tree builder, bytecode generator).  The
-*bytecode evaluator* interprets this bytecode.  It does most of its work
-by delegating all actual manipulations of user objects to the *object
-space*.  The latter can be thought of as the library of built-in types.
-It defines the implementation of the user objects, like integers and
-lists, as well as the operations between them, like addition or
-truth-value-testing.
-
-This division between bytecode evaluator and object space is very
-important, as it gives a lot of flexibility.  One can plug in 
-different `object spaces`_ to get different or enriched behaviours 
-of the Python objects.  Additionally, a special more abstract object
-space, the `flow object space`_, allows us to reuse the bytecode
-evaluator for our translation framework.
-
-.. _`bytecode evaluator`: interpreter.html
-.. _`standard object space`: objspace.html#the-standard-object-space
-.. _`object spaces`: objspace.html
-.. _`flow object space`: objspace.html#the-flow-object-space
-
-.. _`the translation framework`:
-
-
-Further reading
-===============
-
-All of PyPy's documentation can be reached from the `documentation
-index`_.  Of particular interest after reading this document might be:
-
- * `getting-started`_: a hands-on guide to getting involved with the
-   PyPy source code.
-
- * `PyPy's approach to virtual machine construction`_: a paper
-   presented to the Dynamic Languages Symposium attached to OOPSLA
-   2006.
-
- * `The translation document`_: a detailed description of our
-   translation process.
-
- * All our `Technical reports`_, including `Compiling dynamic language
-   implementations`_.
-
- * `JIT Generation in PyPy`_, describing how we produce a Just-in-time
-   Compiler from an interpreter.
-
-.. _`documentation index`: docindex.html
-.. _`getting-started`: getting-started.html
-.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf
-.. _`the translation document`: translation.html
-.. _`Compiling dynamic language implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf
-.. _`Technical reports`: index-report.html
-
-.. _`getting started`: getting-started.html
-.. _`Extreme Programming`: http://www.extremeprogramming.org/
-
-.. _fast: faq.html#how-fast-is-pypy
-.. _`very compliant`: cpython_differences.html
-
-.. _`RPython`: coding-guide.html#rpython
-
-.. _Python: http://docs.python.org/ref
-.. _Psyco: http://psyco.sourceforge.net
-.. _stackless: stackless.html
-.. _`generate Just-In-Time Compilers`: jit/index.html
-.. _`JIT Generation in PyPy`: jit/index.html
-
-.. include:: _ref.txt
-

diff --git a/pypy/rpython/lltypesystem/lltype.py b/pypy/rpython/lltypesystem/lltype.py
--- a/pypy/rpython/lltypesystem/lltype.py
+++ b/pypy/rpython/lltypesystem/lltype.py
@@ -13,6 +13,33 @@
 
 TLS = tlsobject()
 
+class WeakValueDictionary(weakref.WeakValueDictionary):
+    """A subclass of weakref.WeakValueDictionary
+    which resets the 'nested_hash_level' when keys are being deleted.
+    """
+    def __init__(self, *args, **kwargs):
+        weakref.WeakValueDictionary.__init__(self, *args, **kwargs)
+        remove_base = self._remove
+        def remove(*args):
+            if safe_equal is None:
+                # The interpreter is shutting down, and the comparison
+                # function is already gone.
+                return
+            if TLS is None: # Happens when the interpreter is shutting down
+                return remove_base(*args)
+            nested_hash_level = TLS.nested_hash_level
+            try:
+                # The 'remove' function is called when an object dies.  This
+                # can happen anywhere when they are reference cycles,
+                # especially when we are already computing another __hash__
+                # value.  It's not really a recursion in this case, so we
+                # reset the counter; otherwise the hash value may be be
+                # incorrect and the key won't be deleted.
+                TLS.nested_hash_level = 0
+                remove_base(*args)
+            finally:
+                TLS.nested_hash_level = nested_hash_level
+        self._remove = remove
 
 class _uninitialized(object):
     def __init__(self, TYPE):
@@ -368,6 +395,8 @@
                 return "{ %s }" % of._str_fields()
             else:
                 return "%s { %s }" % (of._name, of._str_fields())
+        elif self._hints.get('render_as_void'):
+            return 'void'
         else:
             return str(self.OF)
     _str_fields = saferecursive(_str_fields, '...')
@@ -397,7 +426,7 @@
     # behaves more or less like a Struct with fields item0, item1, ...
     # but also supports __getitem__(), __setitem__(), __len__().
 
-    _cache = weakref.WeakValueDictionary() # cache the length-1 FixedSizeArrays
+    _cache = WeakValueDictionary() # cache the length-1 FixedSizeArrays
     def __new__(cls, OF, length, **kwds):
         if length == 1 and not kwds:
             try:
@@ -633,7 +662,7 @@
 class Ptr(LowLevelType):
     __name__ = property(lambda self: '%sPtr' % self.TO.__name__)
 
-    _cache = weakref.WeakValueDictionary()  # cache the Ptrs
+    _cache = WeakValueDictionary()  # cache the Ptrs
     def __new__(cls, TO, use_cache=True):
         if not isinstance(TO, ContainerType):
             raise TypeError, ("can only point to a Container type, "
@@ -796,6 +825,8 @@
                 return cast_pointer(TGT, value)
         elif ORIG == llmemory.Address:
             return llmemory.cast_adr_to_ptr(value, TGT)
+        elif ORIG == Signed:
+            return cast_int_to_ptr(TGT, value)
     elif TGT == llmemory.Address and isinstance(ORIG, Ptr):
         return llmemory.cast_ptr_to_adr(value)
     elif TGT == Signed and isinstance(ORIG, Ptr) and ORIG.TO._gckind == 'raw':
@@ -1125,6 +1156,11 @@
                 raise TypeError("cannot directly assign to container array items")
             T2 = typeOf(val)
             if T2 != T1:
+                from pypy.rpython.lltypesystem import rffi
+                if T1 is rffi.VOIDP and isinstance(T2, Ptr):
+                    # Any pointer is convertible to void*
+                    val = rffi.cast(rffi.VOIDP, val)
+                else:
                     raise TypeError("%r items:\n"
                                     "expect %r\n"
                                     "   got %r" % (self._T, T1, T2))
@@ -1164,6 +1200,7 @@
             return '* %s' % (self._obj0,)
 
     def __call__(self, *args):
+        from pypy.rpython.lltypesystem import rffi
         if isinstance(self._T, FuncType):
             if len(args) != len(self._T.ARGS):
                 raise TypeError,"calling %r with wrong argument number: %r" % (self._T, args)
@@ -1177,11 +1214,19 @@
                             pass
                         else:
                             assert a == value
+                    # None is acceptable for any pointer
+                    elif isinstance(ARG, Ptr) and a is None:
+                        pass
+                    # Any pointer is convertible to void*
+                    elif ARG is rffi.VOIDP and isinstance(typeOf(a), Ptr):
+                        pass
                     # special case: ARG can be a container type, in which
                     # case a should be a pointer to it.  This must also be
                     # special-cased in the backends.
-                    elif not (isinstance(ARG, ContainerType)
-                            and typeOf(a) == Ptr(ARG)):
+                    elif (isinstance(ARG, ContainerType) and
+                          typeOf(a) == Ptr(ARG)):
+                        pass
+                    else:
                         args_repr = [typeOf(arg) for arg in args]
                         raise TypeError, ("calling %r with wrong argument "
                                           "types: %r" % (self._T, args_repr))

diff --git a/pypy/doc/config/objspace.usemodules.__builtin__.txt b/pypy/doc/config/objspace.usemodules.__builtin__.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.__builtin__.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the '__builtin__' module. 
-This module is essential, included by default and should not be removed.

diff --git a/pypy/doc/config/objspace.usemodules._lsprof.txt b/pypy/doc/config/objspace.usemodules._lsprof.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._lsprof.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Use the '_lsprof' module. 

diff --git a/pypy/doc/config/translation.compilerflags.txt b/pypy/doc/config/translation.compilerflags.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.compilerflags.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Experimental. Specify extra flags to pass to the C compiler.

diff --git a/py/_compat/__init__.py b/py/_compat/__init__.py
deleted file mode 100644
--- a/py/_compat/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-""" compatibility modules (taken from 2.4.4) """
-

diff --git a/py/_compat/dep_subprocess.py b/py/_compat/dep_subprocess.py
deleted file mode 100644
--- a/py/_compat/dep_subprocess.py
+++ /dev/null
@@ -1,5 +0,0 @@
-
-import py
-py.log._apiwarn("1.1", "py.compat.subprocess deprecated, use standard library version.", 
-stacklevel="apipkg")
-subprocess = py.std.subprocess

diff --git a/pypy/doc/config/objspace.usemodules._codecs.txt b/pypy/doc/config/objspace.usemodules._codecs.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._codecs.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the '_codecs' module. 
-Used by the 'codecs' standard lib module. This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/objspace.usemodules.unicodedata.txt b/pypy/doc/config/objspace.usemodules.unicodedata.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.unicodedata.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'unicodedata' module. 
-This module is expected to be fully working.

diff --git a/pypy/doc/config/translation.no__thread.txt b/pypy/doc/config/translation.no__thread.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.no__thread.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Don't use gcc __thread attribute for fast thread local storage
-implementation . Increases the chance that moving the resulting
-executable to another same processor Linux machine will work. (see
-:config:`translation.vanilla`).

diff --git a/pypy/doc/config/translation.backendopt.inline.txt b/pypy/doc/config/translation.backendopt.inline.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.inline.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-Inline flowgraphs based on an heuristic, the default one considers
-essentially the a weight for the flowgraph based on the number of
-low-level operations in them (see
-:config:`translation.backendopt.inline_threshold` ).
-
-Some amount of inlining in order to have RPython builtin type helpers
-inlined is needed for malloc removal
-(:config:`translation.backendopt.mallocs`) to be effective.
-
-This optimization is used by default.

diff --git a/pypy/doc/config/translation.countmallocs.txt b/pypy/doc/config/translation.countmallocs.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.countmallocs.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Internal; used by some of the C backend tests to check that the number of
-allocations matches the number of frees.
-
-.. internal

diff --git a/pypy/doc/config/objspace.std.newshortcut.txt b/pypy/doc/config/objspace.std.newshortcut.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.newshortcut.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Performance only: cache and shortcut calling __new__ from builtin types

diff --git a/pypy/doc/config/objspace.std.withprebuiltchar.txt b/pypy/doc/config/objspace.std.withprebuiltchar.txt
deleted file mode 100644

diff --git a/pypy/translator/c/primitive.py b/pypy/translator/c/primitive.py
--- a/pypy/translator/c/primitive.py
+++ b/pypy/translator/c/primitive.py
@@ -1,7 +1,8 @@
 import sys
 from pypy.rlib.objectmodel import Symbolic, ComputedIntSymbolic
 from pypy.rlib.objectmodel import CDefinedIntSymbolic
-from pypy.rlib.rarithmetic import r_longlong, isinf, isnan
+from pypy.rlib.rarithmetic import r_longlong
+from pypy.rlib.rfloat import isinf, isnan
 from pypy.rpython.lltypesystem.lltype import *
 from pypy.rpython.lltypesystem import rffi, llgroup
 from pypy.rpython.lltypesystem.llmemory import Address, \
@@ -122,7 +123,7 @@
     if ' ' <= value < '\x7f':
         return "'%s'" % (value.replace("\\", r"\\").replace("'", r"\'"),)
     else:
-        return '%d' % ord(value)
+        return '((char)%d)' % ord(value)
 
 def name_bool(value, db):
     return '%d' % value
@@ -132,7 +133,7 @@
 
 def name_unichar(value, db):
     assert type(value) is unicode and len(value) == 1
-    return '%d' % ord(value)
+    return '((wchar_t)%d)' % ord(value)
 
 def name_address(value, db):
     if value:

diff --git a/py/_plugin/pytest_pytester.py b/py/_plugin/pytest_pytester.py
deleted file mode 100644
--- a/py/_plugin/pytest_pytester.py
+++ /dev/null
@@ -1,500 +0,0 @@
-"""
-funcargs and support code for testing py.test's own functionality. 
-"""
-
-import py
-import sys, os
-import re
-import inspect
-import time
-from py._test.config import Config as pytestConfig
-from py.builtin import print_
-
-def pytest_addoption(parser):
-    group = parser.getgroup("pylib")
-    group.addoption('--tools-on-path',
-           action="store_true", dest="toolsonpath", default=False,
-           help=("discover tools on PATH instead of going through py.cmdline.")
-    )
-
-pytest_plugins = '_pytest'
-
-def pytest_funcarg__linecomp(request):
-    return LineComp()
-
-def pytest_funcarg__LineMatcher(request):
-    return LineMatcher
-
-def pytest_funcarg__testdir(request):
-    tmptestdir = TmpTestdir(request)
-    return tmptestdir
-
-rex_outcome = re.compile("(\d+) (\w+)")
-class RunResult:
-    def __init__(self, ret, outlines, errlines, duration):
-        self.ret = ret
-        self.outlines = outlines
-        self.errlines = errlines
-        self.stdout = LineMatcher(outlines)
-        self.stderr = LineMatcher(errlines)
-        self.duration = duration
-
-    def parseoutcomes(self):
-        for line in reversed(self.outlines):
-            if 'seconds' in line:
-                outcomes = rex_outcome.findall(line)
-                if outcomes:
-                    d = {}
-                    for num, cat in outcomes:
-                        d[cat] = int(num)
-                    return d
-
-class TmpTestdir:
-    def __init__(self, request):
-        self.request = request
-        self._pytest = request.getfuncargvalue("_pytest")
-        # XXX remove duplication with tmpdir plugin 
-        basetmp = request.config.ensuretemp("testdir")
-        name = request.function.__name__
-        for i in range(100):
-            try:
-                tmpdir = basetmp.mkdir(name + str(i))
-            except py.error.EEXIST:
-                continue
-            break
-        # we need to create another subdir
-        # because Directory.collect() currently loads
-        # conftest.py from sibling directories
-        self.tmpdir = tmpdir.mkdir(name)
-        self.plugins = []
-        self._syspathremove = []
-        self.chdir() # always chdir
-        self.request.addfinalizer(self.finalize)
-
-    def __repr__(self):
-        return "<TmpTestdir %r>" % (self.tmpdir,)
-
-    def Config(self, topdir=None):
-        if topdir is None:
-            topdir = self.tmpdir.dirpath()
-        return pytestConfig(topdir=topdir)
-
-    def finalize(self):
-        for p in self._syspathremove:
-            py.std.sys.path.remove(p)
-        if hasattr(self, '_olddir'):
-            self._olddir.chdir()
-        # delete modules that have been loaded from tmpdir
-        for name, mod in list(sys.modules.items()):
-            if mod:
-                fn = getattr(mod, '__file__', None)
-                if fn and fn.startswith(str(self.tmpdir)):
-                    del sys.modules[name]
-
-    def getreportrecorder(self, obj):
-        if hasattr(obj, 'config'):
-            obj = obj.config
-        if hasattr(obj, 'hook'):
-            obj = obj.hook
-        assert hasattr(obj, '_hookspecs'), obj
-        reprec = ReportRecorder(obj)
-        reprec.hookrecorder = self._pytest.gethookrecorder(obj)
-        reprec.hook = reprec.hookrecorder.hook
-        return reprec
-
-    def chdir(self):
-        old = self.tmpdir.chdir()
-        if not hasattr(self, '_olddir'):
-            self._olddir = old 
-
-    def _makefile(self, ext, args, kwargs):
-        items = list(kwargs.items())
-        if args:
-            source = "\n".join(map(str, args)) + "\n"
-            basename = self.request.function.__name__
-            items.insert(0, (basename, source))
-        ret = None
-        for name, value in items:
-            p = self.tmpdir.join(name).new(ext=ext)
-            source = str(py.code.Source(value)).lstrip()
-            p.write(source.encode("utf-8"), "wb")
-            if ret is None:
-                ret = p
-        return ret 
-
-
-    def makefile(self, ext, *args, **kwargs):
-        return self._makefile(ext, args, kwargs)
-
-    def makeconftest(self, source):
-        return self.makepyfile(conftest=source)
-
-    def makepyfile(self, *args, **kwargs):
-        return self._makefile('.py', args, kwargs)
-
-    def maketxtfile(self, *args, **kwargs):
-        return self._makefile('.txt', args, kwargs)
-
-    def syspathinsert(self, path=None):
-        if path is None:
-            path = self.tmpdir
-        py.std.sys.path.insert(0, str(path))
-        self._syspathremove.append(str(path))
-            
-    def mkdir(self, name):
-        return self.tmpdir.mkdir(name)
-
-    def mkpydir(self, name):
-        p = self.mkdir(name)
-        p.ensure("__init__.py")
-        return p
-
-    def genitems(self, colitems):
-        return list(self.session.genitems(colitems))
-
-    def inline_genitems(self, *args):
-        #config = self.parseconfig(*args)
-        config = self.parseconfig(*args)
-        session = config.initsession()
-        rec = self.getreportrecorder(config)
-        colitems = [config.getnode(arg) for arg in config.args]
-        items = list(session.genitems(colitems))
-        return items, rec 
-
-    def runitem(self, source):
-        # used from runner functional tests 
-        item = self.getitem(source)
-        # the test class where we are called from wants to provide the runner 
-        testclassinstance = py.builtin._getimself(self.request.function)
-        runner = testclassinstance.getrunner()
-        return runner(item)
-
-    def inline_runsource(self, source, *cmdlineargs):
-        p = self.makepyfile(source)
-        l = list(cmdlineargs) + [p]
-        return self.inline_run(*l)
-
-    def inline_runsource1(self, *args):
-        args = list(args)
-        source = args.pop()
-        p = self.makepyfile(source)
-        l = list(args) + [p]
-        reprec = self.inline_run(*l)
-        reports = reprec.getreports("pytest_runtest_logreport")
-        assert len(reports) == 1, reports 
-        return reports[0]
-
-    def inline_run(self, *args):
-        args = ("-s", ) + args # otherwise FD leakage
-        config = self.parseconfig(*args)
-        config.pluginmanager.do_configure(config)
-        session = config.initsession()
-        reprec = self.getreportrecorder(config)
-        colitems = config.getinitialnodes()
-        session.main(colitems)
-        config.pluginmanager.do_unconfigure(config)
-        return reprec 
-
-    def config_preparse(self):
-        config = self.Config()
-        for plugin in self.plugins:
-            if isinstance(plugin, str):
-                config.pluginmanager.import_plugin(plugin)
-            else:
-                if isinstance(plugin, dict):
-                    plugin = PseudoPlugin(plugin) 
-                if not config.pluginmanager.isregistered(plugin):
-                    config.pluginmanager.register(plugin)
-        return config
-
-    def parseconfig(self, *args):
-        if not args:
-            args = (self.tmpdir,)
-        config = self.config_preparse()
-        args = list(args) + ["--basetemp=%s" % self.tmpdir.dirpath('basetemp')]
-        config.parse(args)
-        return config 
-
-    def reparseconfig(self, args=None):
-        """ this is used from tests that want to re-invoke parse(). """
-        if not args:
-            args = [self.tmpdir]
-        from py._test import config 
-        oldconfig = config.config_per_process # py.test.config
-        try:
-            c = config.config_per_process = py.test.config = pytestConfig()
-            c.basetemp = oldconfig.mktemp("reparse", numbered=True)
-            c.parse(args) 
-            return c
-        finally: 
-            config.config_per_process = py.test.config = oldconfig 
-
-    def parseconfigure(self, *args):
-        config = self.parseconfig(*args)
-        config.pluginmanager.do_configure(config)
-        return config
-
-    def getitem(self,  source, funcname="test_func"):
-        modcol = self.getmodulecol(source)
-        moditems = modcol.collect()
-        for item in modcol.collect():
-            if item.name == funcname:
-                return item 
-        else:
-            assert 0, "%r item not found in module:\n%s" %(funcname, source)
-
-    def getitems(self,  source):
-        modcol = self.getmodulecol(source)
-        return list(modcol.config.initsession().genitems([modcol]))
-        #assert item is not None, "%r item not found in module:\n%s" %(funcname, source)
-        #return item 
-
-    def getfscol(self,  path, configargs=()):
-        self.config = self.parseconfig(path, *configargs)
-        self.session = self.config.initsession()
-        return self.config.getnode(path)
-
-    def getmodulecol(self,  source, configargs=(), withinit=False):
-        kw = {self.request.function.__name__: py.code.Source(source).strip()}
-        path = self.makepyfile(**kw)
-        if withinit:
-            self.makepyfile(__init__ = "#")
-        self.config = self.parseconfig(path, *configargs)
-        self.session = self.config.initsession()
-        #self.config.pluginmanager.do_configure(config=self.config)
-        # XXX 
-        self.config.pluginmanager.import_plugin("runner") 
-        plugin = self.config.pluginmanager.getplugin("runner") 
-        plugin.pytest_configure(config=self.config)
-
-        return self.config.getnode(path)
-
-    def popen(self, cmdargs, stdout, stderr, **kw):
-        if not hasattr(py.std, 'subprocess'):
-            py.test.skip("no subprocess module")
-        env = os.environ.copy()
-        env['PYTHONPATH'] = ":".join(filter(None, [
-            str(os.getcwd()), env.get('PYTHONPATH', '')]))
-        kw['env'] = env
-        #print "env", env
-        return py.std.subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw)
-
-    def run(self, *cmdargs):
-        return self._run(*cmdargs)
-
-    def _run(self, *cmdargs):
-        cmdargs = [str(x) for x in cmdargs]
-        p1 = self.tmpdir.join("stdout")
-        p2 = self.tmpdir.join("stderr")
-        print_("running", cmdargs, "curdir=", py.path.local())
-        f1 = p1.open("wb")
-        f2 = p2.open("wb")
-        now = time.time()
-        popen = self.popen(cmdargs, stdout=f1, stderr=f2, 
-            close_fds=(sys.platform != "win32"))
-        ret = popen.wait()
-        f1.close()
-        f2.close()
-        out = p1.read("rb")
-        out = getdecoded(out).splitlines()
-        err = p2.read("rb")
-        err = getdecoded(err).splitlines()
-        def dump_lines(lines, fp):
-            try:
-                for line in lines:
-                    py.builtin.print_(line, file=fp)
-            except UnicodeEncodeError:
-                print("couldn't print to %s because of encoding" % (fp,))
-        dump_lines(out, sys.stdout)
-        dump_lines(err, sys.stderr)
-        return RunResult(ret, out, err, time.time()-now)
-
-    def runpybin(self, scriptname, *args):
-        fullargs = self._getpybinargs(scriptname) + args
-        return self.run(*fullargs)
-
-    def _getpybinargs(self, scriptname):
-        if self.request.config.getvalue("toolsonpath"):
-            script = py.path.local.sysfind(scriptname)
-            assert script, "script %r not found" % scriptname
-            return (script,)
-        else:
-            cmdlinename = scriptname.replace(".", "")
-            assert hasattr(py.cmdline, cmdlinename), cmdlinename
-            source = ("import sys;sys.path.insert(0,%r);"
-                      "import py;py.cmdline.%s()" % 
-                (str(py._pydir.dirpath()), cmdlinename))
-            return (sys.executable, "-c", source,)
-
-    def runpython(self, script):
-        s = self._getsysprepend()
-        if s:
-            script.write(s + "\n" + script.read())
-        return self.run(sys.executable, script)
-
-    def _getsysprepend(self):
-        if not self.request.config.getvalue("toolsonpath"):
-            s = "import sys;sys.path.insert(0,%r);" % str(py._pydir.dirpath())
-        else:
-            s = ""
-        return s
-
-    def runpython_c(self, command):
-        command = self._getsysprepend() + command
-        return self.run(py.std.sys.executable, "-c", command)
-
-    def runpytest(self, *args):
-        p = py.path.local.make_numbered_dir(prefix="runpytest-", 
-            keep=None, rootdir=self.tmpdir)
-        args = ('--basetemp=%s' % p, ) + args 
-        plugins = [x for x in self.plugins if isinstance(x, str)]
-        if plugins:
-            args = ('-p', plugins[0]) + args
-        return self.runpybin("py.test", *args)
-
-    def spawn_pytest(self, string, expect_timeout=10.0):
-        pexpect = py.test.importorskip("pexpect", "2.4")
-        if not self.request.config.getvalue("toolsonpath"):
-            py.test.skip("need --tools-on-path to run py.test script")
-        basetemp = self.tmpdir.mkdir("pexpect")
-        invoke = self._getpybinargs("py.test")[0]
-        cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string)
-        child = pexpect.spawn(cmd, logfile=basetemp.join("spawn.out").open("w"))
-        child.timeout = expect_timeout
-        return child
-
-def getdecoded(out):
-        try:
-            return out.decode("utf-8")
-        except UnicodeDecodeError:
-            return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
-                    py.io.saferepr(out),)
-
-class PseudoPlugin:
-    def __init__(self, vars):
-        self.__dict__.update(vars) 
-
-class ReportRecorder(object):
-    def __init__(self, hook):
-        self.hook = hook
-        self.registry = hook._registry
-        self.registry.register(self)
-
-    def getcall(self, name):
-        return self.hookrecorder.getcall(name)
-
-    def popcall(self, name):
-        return self.hookrecorder.popcall(name)
-
-    def getcalls(self, names):
-        """ return list of ParsedCall instances matching the given eventname. """
-        return self.hookrecorder.getcalls(names)
-
-    # functionality for test reports 
-
-    def getreports(self, names="pytest_runtest_logreport pytest_collectreport"):
-        return [x.report for x in self.getcalls(names)]
-
-    def matchreport(self, inamepart="", names="pytest_runtest_logreport pytest_collectreport"):
-        """ return a testreport whose dotted import path matches """
-        l = []
-        for rep in self.getreports(names=names):
-            colitem = rep.getnode()
-            if not inamepart or inamepart in colitem.listnames():
-                l.append(rep)
-        if not l:
-            raise ValueError("could not find test report matching %r: no test reports at all!" %
-                (inamepart,))
-        if len(l) > 1:
-            raise ValueError("found more than one testreport matching %r: %s" %(
-                             inamepart, l))
-        return l[0]
-
-    def getfailures(self, names='pytest_runtest_logreport pytest_collectreport'):
-        return [rep for rep in self.getreports(names) if rep.failed]
-
-    def getfailedcollections(self):
-        return self.getfailures('pytest_collectreport')
-
-    def listoutcomes(self):
-        passed = []
-        skipped = []
-        failed = []
-        for rep in self.getreports("pytest_runtest_logreport"):
-            if rep.passed: 
-                if rep.when == "call": 
-                    passed.append(rep) 
-            elif rep.skipped: 
-                skipped.append(rep) 
-            elif rep.failed:
-                failed.append(rep) 
-        return passed, skipped, failed 
-
-    def countoutcomes(self):
-        return [len(x) for x in self.listoutcomes()]
-
-    def assertoutcome(self, passed=0, skipped=0, failed=0):
-        realpassed, realskipped, realfailed = self.listoutcomes()
-        assert passed == len(realpassed)
-        assert skipped == len(realskipped)
-        assert failed == len(realfailed)
-
-    def clear(self):
-        self.hookrecorder.calls[:] = []
-
-    def unregister(self):
-        self.registry.unregister(self)
-        self.hookrecorder.finish_recording()
-
-class LineComp:
-    def __init__(self):
-        self.stringio = py.io.TextIO()
-
-    def assert_contains_lines(self, lines2):
-        """ assert that lines2 are contained (linearly) in lines1. 
-            return a list of extralines found.
-        """
-        __tracebackhide__ = True
-        val = self.stringio.getvalue()
-        self.stringio.truncate(0)
-        self.stringio.seek(0)
-        lines1 = val.split("\n")
-        return LineMatcher(lines1).fnmatch_lines(lines2)
-            
-class LineMatcher:
-    def __init__(self,  lines):
-        self.lines = lines
-
-    def str(self):
-        return "\n".join(self.lines)
-
-    def fnmatch_lines(self, lines2):
-        if isinstance(lines2, str):
-            lines2 = py.code.Source(lines2)
-        if isinstance(lines2, py.code.Source):
-            lines2 = lines2.strip().lines
-
-        from fnmatch import fnmatch
-        lines1 = self.lines[:]
-        nextline = None
-        extralines = []
-        __tracebackhide__ = True
-        for line in lines2:
-            nomatchprinted = False
-            while lines1:
-                nextline = lines1.pop(0)
-                if line == nextline:
-                    print_("exact match:", repr(line))
-                    break 
-                elif fnmatch(nextline, line):
-                    print_("fnmatch:", repr(line))
-                    print_("   with:", repr(nextline))
-                    break
-                else:
-                    if not nomatchprinted:
-                        print_("nomatch:", repr(line))
-                        nomatchprinted = True
-                    print_("    and:", repr(nextline))
-                extralines.append(nextline)
-            else:
-                assert line == nextline

diff --git a/py/_plugin/pytest_monkeypatch.py b/py/_plugin/pytest_monkeypatch.py
deleted file mode 100644
--- a/py/_plugin/pytest_monkeypatch.py
+++ /dev/null
@@ -1,141 +0,0 @@
-"""
-safely patch object attributes, dicts and environment variables. 
-
-Usage 
-----------------
-
-Use the `monkeypatch funcarg`_ to tweak your global test environment 
-for running a particular test.  You can safely set/del an attribute, 
-dictionary item or environment variable by respective methods
-on the monkeypatch funcarg.  If you want e.g. to set an ENV1 variable 
-and have os.path.expanduser return a particular directory, you can 
-write it down like this:
-
-.. sourcecode:: python 
-
-    def test_mytest(monkeypatch):
-        monkeypatch.setenv('ENV1', 'myval')
-        monkeypatch.setattr(os.path, 'expanduser', lambda x: '/tmp/xyz')
-        ... # your test code that uses those patched values implicitely
-
-After the test function finished all modifications will be undone, 
-because the ``monkeypatch.undo()`` method is registered as a finalizer. 
-
-``monkeypatch.setattr/delattr/delitem/delenv()`` all 
-by default raise an Exception if the target does not exist. 
-Pass ``raising=False`` if you want to skip this check. 
-
-prepending to PATH or other environment variables 
----------------------------------------------------------
-
-To prepend a value to an already existing environment parameter:
-
-.. sourcecode:: python 
-
-    def test_mypath_finding(monkeypatch):
-        monkeypatch.setenv('PATH', 'x/y', prepend=":")
-        # in bash language: export PATH=x/y:$PATH 
-
-calling "undo" finalization explicitely
------------------------------------------
-
-At the end of function execution py.test invokes
-a teardown hook which undoes all monkeypatch changes. 
-If you do not want to wait that long you can call 
-finalization explicitely::
-
-    monkeypatch.undo()  
-
-This will undo previous changes.  This call consumes the
-undo stack.  Calling it a second time has no effect unless
-you  start monkeypatching after the undo call. 
-
-.. _`monkeypatch blog post`: http://tetamap.wordpress.com/2009/03/03/monkeypatching-in-unit-tests-done-right/
-"""
-
-import py, os, sys
-
-def pytest_funcarg__monkeypatch(request):
-    """The returned ``monkeypatch`` funcarg provides these 
-    helper methods to modify objects, dictionaries or os.environ::
-
-        monkeypatch.setattr(obj, name, value, raising=True)  
-        monkeypatch.delattr(obj, name, raising=True)
-        monkeypatch.setitem(mapping, name, value) 
-        monkeypatch.delitem(obj, name, raising=True)
-        monkeypatch.setenv(name, value, prepend=False) 
-        monkeypatch.delenv(name, value, raising=True)
-        monkeypatch.syspath_prepend(path)
-
-    All modifications will be undone when the requesting 
-    test function finished its execution.  The ``raising`` 
-    parameter determines if a KeyError or AttributeError 
-    will be raised if the set/deletion operation has no target. 
-    """
-    monkeypatch = MonkeyPatch()
-    request.addfinalizer(monkeypatch.undo)
-    return monkeypatch
-
-notset = object()
-
-class MonkeyPatch:
-    def __init__(self):
-        self._setattr = []
-        self._setitem = []
-
-    def setattr(self, obj, name, value, raising=True):
-        oldval = getattr(obj, name, notset)
-        if raising and oldval is notset:
-            raise AttributeError("%r has no attribute %r" %(obj, name))
-        self._setattr.insert(0, (obj, name, oldval))
-        setattr(obj, name, value)
-
-    def delattr(self, obj, name, raising=True):
-        if not hasattr(obj, name):
-            if raising:
-                raise AttributeError(name) 
-        else:
-            self._setattr.insert(0, (obj, name, getattr(obj, name, notset)))
-            delattr(obj, name)
-
-    def setitem(self, dic, name, value):
-        self._setitem.insert(0, (dic, name, dic.get(name, notset)))
-        dic[name] = value
-
-    def delitem(self, dic, name, raising=True):
-        if name not in dic:
-            if raising:
-                raise KeyError(name) 
-        else:    
-            self._setitem.insert(0, (dic, name, dic.get(name, notset)))
-            del dic[name]
-
-    def setenv(self, name, value, prepend=None):
-        value = str(value)
-        if prepend and name in os.environ:
-            value = value + prepend + os.environ[name]
-        self.setitem(os.environ, name, value)
-
-    def delenv(self, name, raising=True):
-        self.delitem(os.environ, name, raising=raising)
-
-    def syspath_prepend(self, path):
-        if not hasattr(self, '_savesyspath'):
-            self._savesyspath = sys.path[:]
-        sys.path.insert(0, str(path))
-
-    def undo(self):
-        for obj, name, value in self._setattr:
-            if value is not notset:
-                setattr(obj, name, value)
-            else:
-                delattr(obj, name)
-        self._setattr[:] = []
-        for dictionary, name, value in self._setitem:
-            if value is notset:
-                del dictionary[name]
-            else:
-                dictionary[name] = value
-        self._setitem[:] = []
-        if hasattr(self, '_savesyspath'):
-            sys.path[:] = self._savesyspath

diff --git a/py/_code/oldmagic.py b/py/_code/oldmagic.py
deleted file mode 100644
--- a/py/_code/oldmagic.py
+++ /dev/null
@@ -1,62 +0,0 @@
-""" deprecated module for turning on/off some features. """ 
-
-import py 
-
-from py.builtin import builtins as cpy_builtin
-
-def invoke(assertion=False, compile=False):
-    """ (deprecated) invoke magic, currently you can specify:
-
-        assertion  patches the builtin AssertionError to try to give
-                   more meaningful AssertionErrors, which by means
-                   of deploying a mini-interpreter constructs
-                   a useful error message.
-    """
-    py.log._apiwarn("1.1", 
-        "py.magic.invoke() is deprecated, use py.code.patch_builtins()",
-        stacklevel=2, 
-    )
-    py.code.patch_builtins(assertion=assertion, compile=compile)
-
-def revoke(assertion=False, compile=False):
-    """ (deprecated) revoke previously invoked magic (see invoke())."""
-    py.log._apiwarn("1.1", 
-        "py.magic.revoke() is deprecated, use py.code.unpatch_builtins()",
-        stacklevel=2, 
-    )
-    py.code.unpatch_builtins(assertion=assertion, compile=compile)
-
-patched = {}
-
-def patch(namespace, name, value):
-    """ (deprecated) rebind the 'name' on the 'namespace'  to the 'value',
-        possibly and remember the original value. Multiple
-        invocations to the same namespace/name pair will
-        remember a list of old values.
-    """
-    py.log._apiwarn("1.1", 
-        "py.magic.patch() is deprecated, in tests use monkeypatch funcarg.", 
-        stacklevel=2, 
-    )
-    nref = (namespace, name)
-    orig = getattr(namespace, name)
-    patched.setdefault(nref, []).append(orig)
-    setattr(namespace, name, value)
-    return orig
-
-def revert(namespace, name):
-    """ (deprecated) revert to the orginal value the last patch modified.
-        Raise ValueError if no such original value exists.
-    """
-    py.log._apiwarn("1.1", 
-        "py.magic.revert() is deprecated, in tests use monkeypatch funcarg.",
-        stacklevel=2, 
-    )
-    nref = (namespace, name)
-    if nref not in patched or not patched[nref]:
-        raise ValueError("No original value stored for %s.%s" % nref)
-    current = getattr(namespace, name)
-    orig = patched[nref].pop()
-    setattr(namespace, name, orig)
-    return current
-

diff --git a/py/bin/win32/py.lookup.cmd b/py/bin/win32/py.lookup.cmd
deleted file mode 100644
--- a/py/bin/win32/py.lookup.cmd
+++ /dev/null
@@ -1,2 +0,0 @@
- at echo off
-python "%~dp0\..\py.lookup" %*
\ No newline at end of file

diff --git a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt b/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-Introduce a new opcode called ``CALL_LIKELY_BUILTIN``. It is used when something
-is called, that looks like a builtin function (but could in reality be shadowed
-by a name in the module globals). For all module globals dictionaries it is
-then tracked which builtin name is shadowed in this module. If the
-``CALL_LIKELY_BUILTIN`` opcode is executed, it is checked whether the builtin is
-shadowed. If not, the corresponding builtin is called. Otherwise the object that
-is shadowing it is called instead. If no shadowing is happening, this saves two
-dictionary lookups on calls to builtins.
-
-For more information, see the section in `Standard Interpreter Optimizations`_.
-
-.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#call-likely-builtin

diff --git a/pypy/doc/config/translation.backendopt.storesink.txt b/pypy/doc/config/translation.backendopt.storesink.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.storesink.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Store sinking optimization. On by default.

diff --git a/pypy/doc/carbonpython.txt b/pypy/doc/carbonpython.txt
deleted file mode 100644
--- a/pypy/doc/carbonpython.txt
+++ /dev/null
@@ -1,230 +0,0 @@
-==================================================
-CarbonPython, aka C# considered harmful
-==================================================
-
-CarbonPython overview
-=====================
-
-CarbonPython is an experimental RPython to .NET compiler. Its main
-focus is to produce DLLs to be used by other .NET programs, not
-standalone executables; if you want to compile an RPython standalone
-program, have a look to `translate.py`_.
-
-Compiled RPython programs are much faster (up to 250x) than
-interpreted IronPython programs, hence it might be a convenient
-replacement for C# when more speed is needed. RPython programs can be
-as fast as C# programs.
-
-RPython is a restrict subset of Python, static enough to be analyzed
-and compiled efficiently to lower level languages.  To read more about
-the RPython limitations read the `RPython description`_.
-
-**Disclaimer**: RPython is a much less convenient language than Python
-to program with. If you do not need speed, there is no reason to look
-at RPython.
-
-**Big disclaimer**: CarbonPython is still in a pre-alpha stage: it's
-not meant to be used for production code, and the API might change in
-the future. Despite this, it might be useful in some situations and
-you are encouraged to try it by yourself. Suggestions, bug-reports and
-even better patches are welcome.
-
-.. _`RPython description`: coding-guide.html#restricted-python
-.. _`translate.py`: faq.html#how-do-i-compile-my-own-interpreters
-
-
-Quick start
-===========
-
-Suppose you want to write a little DLL in RPython and call its
-function from C#.
-
-Here is the file mylibrary.py::
-
-    from pypy.translator.cli.carbonpython import export
-
-    @export(int, int)
-    def add(x, y):
-        return x+y
-
-    @export(int, int)
-    def sub(x, y):
-        return x-y
-
-
-And here the C# program main.cs::
-
-    using System;
-    public class CarbonPythonTest
-    {
-        public static void Main()
-        {
-            Console.WriteLine(mylibrary.add(40, 2));
-            Console.WriteLine(mylibrary.sub(44, 2));
-        }
-    }
-
-Once the files have been created, you can compile ``mylibrary.py``
-with CarbonPython to get the corresponding DLL::
-
-    $ python carbonpython.py mylibrary.py
-    ... lot of stuff
-
-Then, we compile main.cs into an executable, being sure to add a
-reference to the newly created ``mylibrary.dll``::
-
-    # with mono on linux
-    $ gmcs /r:mylibrary.dll main.cs
-
-    # with Microsoft CLR on windows
-    c:\> csc /r:mylibrary main.cs
-
-Now we can run the executable to see whether the answers are right::
-
-    $ mono main.exe
-    42
-    42
-
-
-Multiple entry-points
-=====================
-
-In RPython, the type of each variable is inferred by the `Annotator`_:
-the annotator analyzed the whole program top-down starting from an
-entry-point, i.e. a function whose we specified the types of the
-parameters.
-
-This approach works for a standalone executables, but not for a
-library that by definition is composed by more than one
-entry-point. Thus, you need to explicitly specify which functions you
-want to include in your DLL, together with the expected input types.
-
-To mark a function as an entry-point, you use the ``@export``
-decorator, which is defined in ``pypy.translator.cli.carbonpython``,
-as shown by the previous example.  Note that you do not need to
-specify the return type, because it is automatically inferenced by the
-annotator.
-
-.. _`Annotator`: translation.html#annotator
-
-
-Namespaces
-==========
-
-Since `CLS`_ (Common Language Specification) does not support module
-level static methods, RPython functions marked as entry-points are
-compiled to static methods of a class, in order to be accessible by
-every CLS-compliant language such as C# or VB.NET.
-
-The class which each function is placed in depends on its
-**namespace**; for example, if the namespace of a function ``foo`` is
-``A.B.C``, the function will be rendered as a static method of the
-``C`` class inside the ``A.B`` namespace. This allows C# and
-IronPython code to call the function using the intuitive ``A.B.C.foo``
-syntax.
-
-By default, the default namespace for exported function is the same as
-the name of the module. Thus in the previous example the default
-namespace is ``mylibrary`` and the functions are placed inside the
-corresponding class in the global namespace.
-
-You can change the default namespace by setting the ``_namespace_``
-variable in the module you are compiling::
-
-    _namespace_ = 'Foo.Bar'
-
-    @export(int, int)
-    def f(x, y):
-        pass
-
-Finally, you can also set a specific namespace on a per-function
-basis, using the appropriate keyword argument of the ``@export``
-decorator::
-
-    @export(int, int, namespace='Foo.Bar')
-    def f(x, y):
-        pass
-
-
-.. _`CLS`: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-335.pdf
-
-
-Exporting classes
-=================
-
-RPython libraries can also export classes: to export a class, add the
-``@export`` decorator to its ``__init__`` method; similarly, you can
-also export any methods of the class::
-
-    class MyClass:
-
-        @export(int)
-        def __init__(self, x):
-            self.x = x
-
-        @export
-        def getx(self):
-            return self.x
-
-
-Note that the type of ``self`` must not be specified: it will
-automatically assumed to be ``MyClass``.
-
-The ``__init__`` method is not automatically mapped to the .NET
-constructor; to properly initialize an RPython object from C# or
-IronPython code you need to explicitly call ``__init__``; for example,
-in C#::
-
-    MyClass obj = new MyClass();
-    obj.__init__(x);
-
-Note that this is needed only when calling RPython code from 
-outside; the RPython compiler automatically calls ``__init__``
-whenever an RPython class is instantiated.
-
-In the future this discrepancy will be fixed and the ``__init__``
-method will be automatically mapped to the constructor.
-
-
-Accessing .NET libraries
-========================
-
-**Warning**: the API for accessing .NET classes from RPython is highly
-experimental and will probably change in the future.
-
-In RPython you can access native .NET classes through the ``CLR``
-object defined in ``translator.cli.dotnet``: from there, you can
-navigate through namespaces using the usual dot notation; for example,
-``CLR.System.Collections.ArrayList`` refers to the ``ArrayList`` class
-in the ``System.Collections`` namespace.
-
-To instantiate a .NET class, simply call it::
-
-    ArrayList = CLR.System.Collections.ArrayList
-    def foo():
-        obj = ArrayList()
-        obj.Add(42)
-        return obj
-
-At the moment there is no special syntax support for indexers and
-properties: for example, you can't access ArrayList's elements using
-the square bracket notation, but you have to call the call the
-``get_Item`` and ``set_Item`` methods; similarly, to access a property
-``XXX`` you need to call ``get_XXX`` and ``set_XXX``::
-
-    def foo():
-        obj = ArrayList()
-        obj.Add(42)
-        print obj.get_Item(0)
-        print obj.get_Count()
-
-Static methods and are also supported, as well as overloadings::
-
-    Math = CLR.System.Math
-    def foo():
-        print Math.Abs(-42)
-        print Math.Abs(-42.0)
-
-
-At the moment, it is not possible to reference assemblies other than
-mscorlib. This will be fixed soon.

diff --git a/pypy/doc/__pypy__-module.txt b/pypy/doc/__pypy__-module.txt
deleted file mode 100644
--- a/pypy/doc/__pypy__-module.txt
+++ /dev/null
@@ -1,86 +0,0 @@
-=======================
-The ``__pypy__`` module
-=======================
-
-The ``__pypy__`` module is the main entry point to special features provided
-by PyPy's standard interpreter. Its content depends on `configuration options`_ 
-which may add new functionality and functions whose existence or non-existence 
-indicates the presence of such features. 
-
-.. _`configuration options`: config/index.html
-
-Generally available functionality
-=================================
-
- - ``internal_repr(obj)``: return the interpreter-level representation of an
-   object.
- - ``bytebuffer(length)``: return a new read-write buffer of the given length.
-   It works like a simplified array of characters (actually, depending on the
-   configuration the ``array`` module internally uses this).
-
-Thunk Object Space Functionality
-================================
-
-When the thunk object space is used (choose with :config:`objspace.name`),
-the following functions are put into ``__pypy__``:
-
- - ``thunk``
- - ``is_thunk``
- - ``become``
- - ``lazy``
-
-Those are all described in the `interface section of the thunk object space
-docs`_.
-
-For explanations and examples see the `thunk object space docs`_.
-
-.. _`thunk object space docs`: objspace-proxies.html#thunk
-.. _`interface section of the thunk object space docs`: objspace-proxies.html#thunk-interface
-
-Taint Object Space Functionality
-================================
-
-When the taint object space is used (choose with :config:`objspace.name`),
-the following names are put into ``__pypy__``:
-
- - ``taint``
- - ``is_tainted``
- - ``untaint``
- - ``taint_atomic``
- - ``_taint_debug``
- - ``_taint_look``
- - ``TaintError``
-
-Those are all described in the `interface section of the taint object space
-docs`_.
-
-For more detailed explanations and examples see the `taint object space docs`_.
-
-.. _`taint object space docs`: objspace-proxies.html#taint
-.. _`interface section of the taint object space docs`: objspace-proxies.html#taint-interface
-
-Transparent Proxy Functionality
-===============================
-
-If `transparent proxies`_ are enabled (with :config:`objspace.std.withtproxy`)
-the following functions are put into ``__pypy__``:
-
- - ``tproxy(typ, controller)``: Return something that looks like it is of type
-   typ. Its behaviour is completely controlled by the controller. See the docs
-   about `transparent proxies`_ for detail.
-
- - ``get_tproxy_controller(obj)``: If obj is really a transparent proxy, return
-   its controller. Otherwise return None.
-
-.. _`transparent proxies`: objspace-proxies.html#tproxy
-
-
-Functionality available on py.py (not after translation)
-========================================================
-
- - ``isfake(obj)``: returns True if ``obj`` is faked.
-
- - ``interp_pdb()``: start a pdb at interpreter-level.
-
-
-

diff --git a/pypy/rlib/rmmap.py b/pypy/rlib/rmmap.py
--- a/pypy/rlib/rmmap.py
+++ b/pypy/rlib/rmmap.py
@@ -565,7 +565,11 @@
                     charp = rffi.cast(LPCSTR, data)
                     self.setdata(charp, newsize)
                     return
-            raise rwin32.lastWindowsError()
+            winerror = rwin32.lastWindowsError()
+            if self.map_handle:
+                rwin32.CloseHandle(self.map_handle)
+            self.map_handle = INVALID_HANDLE
+            raise winerror
 
     def len(self):
         self.check_valid()
@@ -788,13 +792,17 @@
 
         if m.map_handle:
             data = MapViewOfFile(m.map_handle, dwDesiredAccess,
-                                 offset_hi, offset_lo, 0)
+                                 offset_hi, offset_lo, length)
             if data:
                 # XXX we should have a real LPVOID which must always be casted
                 charp = rffi.cast(LPCSTR, data)
                 m.setdata(charp, map_size)
                 return m
-        raise rwin32.lastWindowsError()
+        winerror = rwin32.lastWindowsError()
+        if m.map_handle:
+            rwin32.CloseHandle(m.map_handle)
+        m.map_handle = INVALID_HANDLE
+        raise winerror
 
     def alloc(map_size):
         """Allocate memory.  This is intended to be used by the JIT,

diff --git a/pypy/doc/config/objspace.opcodes.txt b/pypy/doc/config/objspace.opcodes.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.opcodes.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-..  intentionally empty

diff --git a/pypy/doc/config/objspace.usemodules.errno.txt b/pypy/doc/config/objspace.usemodules.errno.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.errno.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'errno' module. 
-This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/objspace.usemodules._sha.txt b/pypy/doc/config/objspace.usemodules._sha.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._sha.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Use the built-in _'sha' module.
-This module is expected to be working and is included by default.
-There is also a pure Python version in lib_pypy which is used
-if the built-in is disabled, but it is several orders of magnitude 
-slower.

diff --git a/pypy/doc/config/objspace.usemodules.sys.txt b/pypy/doc/config/objspace.usemodules.sys.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.sys.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'sys' module. 
-This module is essential, included by default and should not be removed.

diff --git a/py/_plugin/pytest_hooklog.py b/py/_plugin/pytest_hooklog.py
deleted file mode 100644
--- a/py/_plugin/pytest_hooklog.py
+++ /dev/null
@@ -1,33 +0,0 @@
-""" log invocations of extension hooks to a file. """ 
-import py
-
-def pytest_addoption(parser):
-    parser.addoption("--hooklog", dest="hooklog", default=None, 
-        help="write hook calls to the given file.")
-
-def pytest_configure(config):
-    hooklog = config.getvalue("hooklog")
-    if hooklog:
-        config._hooklogfile = open(hooklog, 'w')
-        config._hooklog_oldperformcall = config.hook._performcall
-        config.hook._performcall = (lambda name, multicall: 
-            logged_call(name=name, multicall=multicall, config=config))
-
-def logged_call(name, multicall, config):
-    f = config._hooklogfile
-    f.write("%s(**%s)\n" % (name, multicall.kwargs))
-    try:
-        res = config._hooklog_oldperformcall(name=name, multicall=multicall)
-    except:
-        f.write("-> exception")
-        raise
-    f.write("-> %r" % (res,))
-    return res
-
-def pytest_unconfigure(config):
-    try:
-        del config.hook.__dict__['_performcall'] 
-    except KeyError:
-        pass
-    else:
-        config._hooklogfile.close()

diff --git a/py/_cmdline/pycleanup.py b/py/_cmdline/pycleanup.py
deleted file mode 100755
--- a/py/_cmdline/pycleanup.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/usr/bin/env python 
-
-"""\
-py.cleanup [PATH] ...
-
-Delete typical python development related files recursively under the specified PATH (which defaults to the current working directory). Don't follow links and don't recurse into directories with a dot.  Optionally remove setup.py related files and empty
-directories. 
-
-"""
-import py
-import sys, subprocess
-
-def main():
-    parser = py.std.optparse.OptionParser(usage=__doc__)
-    parser.add_option("-e", metavar="ENDING", 
-        dest="endings", default=[".pyc", "$py.class"], action="append", 
-        help=("(multi) recursively remove files with the given ending." 
-             " '.pyc' and '$py.class' are in the default list."))
-    parser.add_option("-d", action="store_true", dest="removedir",
-                      help="remove empty directories.")
-    parser.add_option("-s", action="store_true", dest="setup",
-                      help="remove 'build' and 'dist' directories next to setup.py files")
-    parser.add_option("-a", action="store_true", dest="all",
-                      help="synonym for '-S -d -e pip-log.txt'")
-    parser.add_option("-n", "--dryrun", dest="dryrun", default=False, 
-        action="store_true", 
-        help="don't actually delete but display would-be-removed filenames.")
-    (options, args) = parser.parse_args()
-
-    Cleanup(options, args).main()
-
-class Cleanup:
-    def __init__(self, options, args):
-        if not args:
-            args = ["."]
-        self.options = options
-        self.args = [py.path.local(x) for x in args]
-        if options.all:
-            options.setup = True
-            options.removedir = True
-            options.endings.append("pip-log.txt")
-
-    def main(self):
-        if self.options.setup:
-            for arg in self.args:
-                self.setupclean(arg)
-        
-        for path in self.args:
-            py.builtin.print_("cleaning path", path, 
-                "of extensions", self.options.endings)
-            for x in path.visit(self.shouldremove, self.recursedir):
-                self.remove(x)
-        if self.options.removedir:
-            for x in path.visit(lambda x: x.check(dir=1), self.recursedir):
-                if not x.listdir():
-                    self.remove(x)
-
-    def shouldremove(self, p):
-        for ending in self.options.endings:
-            if p.basename.endswith(ending):
-                return True
-
-    def recursedir(self, path):
-        return path.check(dotfile=0, link=0)
-
-    def remove(self, path):
-        if not path.check():
-            return
-        if self.options.dryrun:
-            py.builtin.print_("would remove", path)
-        else:
-            py.builtin.print_("removing", path)
-            path.remove()
-
-    def XXXcallsetup(self, setup, *args):
-        old = setup.dirpath().chdir()
-        try:
-            subprocess.call([sys.executable, str(setup)] + list(args))
-        finally:
-            old.chdir()
-            
-    def setupclean(self, path):
-        for x in path.visit("setup.py", self.recursedir):
-            basepath = x.dirpath()
-            self.remove(basepath / "build")
-            self.remove(basepath / "dist")

diff --git a/pypy/doc/config/translation.backendopt.remove_asserts.txt b/pypy/doc/config/translation.backendopt.remove_asserts.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.remove_asserts.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Remove raising of assertions from the flowgraphs, which might give small speedups.

diff --git a/pypy/doc/config/translation.dump_static_data_info.txt b/pypy/doc/config/translation.dump_static_data_info.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.dump_static_data_info.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Dump information about static prebuilt constants, to the file
-TARGETNAME.staticdata.info in the /tmp/usession-... directory.  This file can
-be later inspected using the script ``bin/reportstaticdata.py``.

diff --git a/pypy/doc/config/objspace.usemodules.fcntl.txt b/pypy/doc/config/objspace.usemodules.fcntl.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.fcntl.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'fcntl' module. 
-This module is expected to be fully working.

diff --git a/pypy/doc/config/objspace.usemodules.pyexpat.txt b/pypy/doc/config/objspace.usemodules.pyexpat.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.pyexpat.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use (experimental) pyexpat module written in RPython, instead of CTypes
-version which is used by default.

diff --git a/pypy/doc/config/translation.ootype.txt b/pypy/doc/config/translation.ootype.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.ootype.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-This group contains options specific for ootypesystem.

diff --git a/pypy/doc/config/translation.cli.txt b/pypy/doc/config/translation.cli.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.cli.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-..  intentionally empty

diff --git a/py/_plugin/pytest_assertion.py b/py/_plugin/pytest_assertion.py
deleted file mode 100644
--- a/py/_plugin/pytest_assertion.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import py
-import sys
-
-def pytest_addoption(parser):
-    group = parser.getgroup("debugconfig")
-    group._addoption('--no-assert', action="store_true", default=False, 
-        dest="noassert", 
-        help="disable python assert expression reinterpretation."),
-
-def pytest_configure(config):
-    if not config.getvalue("noassert") and not config.getvalue("nomagic"):
-        warn_about_missing_assertion()
-        config._oldassertion = py.builtin.builtins.AssertionError
-        py.builtin.builtins.AssertionError = py.code._AssertionError 
-
-def pytest_unconfigure(config):
-    if hasattr(config, '_oldassertion'):
-        py.builtin.builtins.AssertionError = config._oldassertion
-        del config._oldassertion
-
-def warn_about_missing_assertion():
-    try:
-        assert False
-    except AssertionError:
-        pass
-    else:
-        py.std.warnings.warn("Assertions are turned off!"
-                             " (are you using python -O?)")

diff --git a/pypy/doc/config/objspace.usemodules.cStringIO.txt b/pypy/doc/config/objspace.usemodules.cStringIO.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.cStringIO.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Use the built-in cStringIO module.
-
-If not enabled, importing cStringIO gives you the app-level
-implementation from the standard library StringIO module.

diff --git a/pypy/doc/config/objspace.usemodules.token.txt b/pypy/doc/config/objspace.usemodules.token.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.token.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'token' module. 
-This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/objspace.std.logspaceoptypes.txt b/pypy/doc/config/objspace.std.logspaceoptypes.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.logspaceoptypes.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-.. internal
-
-Wrap "simple" bytecode implementations like BINARY_ADD with code that collects
-information about which types these bytecodes receive as arguments.

diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Weight threshold used to decide whether to inline flowgraphs.  
-This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`).

diff --git a/py/_plugin/pytest_resultlog.py b/py/_plugin/pytest_resultlog.py
deleted file mode 100644
--- a/py/_plugin/pytest_resultlog.py
+++ /dev/null
@@ -1,98 +0,0 @@
-"""non-xml machine-readable logging of test results. 
-   Useful for buildbot integration code.  See the `PyPy-test`_ 
-   web page for post-processing. 
-
-.. _`PyPy-test`: http://codespeak.net:8099/summary
- 
-""" 
-
-import py
-from py.builtin import print_
-
-def pytest_addoption(parser):
-    group = parser.getgroup("resultlog", "resultlog plugin options")
-    group.addoption('--resultlog', action="store", dest="resultlog", metavar="path", default=None,
-           help="path for machine-readable result log.")
-
-def pytest_configure(config):
-    resultlog = config.option.resultlog
-    if resultlog:
-        logfile = open(resultlog, 'w', 1) # line buffered
-        config._resultlog = ResultLog(config, logfile) 
-        config.pluginmanager.register(config._resultlog)
-
-def pytest_unconfigure(config):
-    resultlog = getattr(config, '_resultlog', None)
-    if resultlog:
-        resultlog.logfile.close()
-        del config._resultlog 
-        config.pluginmanager.unregister(resultlog)
-
-def generic_path(item):
-    chain = item.listchain()
-    gpath = [chain[0].name]
-    fspath = chain[0].fspath
-    fspart = False
-    for node in chain[1:]:
-        newfspath = node.fspath
-        if newfspath == fspath:
-            if fspart:
-                gpath.append(':')
-                fspart = False
-            else:
-                gpath.append('.')            
-        else:
-            gpath.append('/')
-            fspart = True
-        name = node.name
-        if name[0] in '([':
-            gpath.pop()
-        gpath.append(name)
-        fspath = newfspath
-    return ''.join(gpath)
-        
-class ResultLog(object):
-    def __init__(self, config, logfile):
-        self.config = config
-        self.logfile = logfile # preferably line buffered
-
-    def write_log_entry(self, testpath, shortrepr, longrepr):
-        print_("%s %s" % (shortrepr, testpath), file=self.logfile)
-        for line in longrepr.splitlines():
-            print_(" %s" % line, file=self.logfile)
-
-    def log_outcome(self, node, shortrepr, longrepr):
-        testpath = generic_path(node)
-        self.write_log_entry(testpath, shortrepr, longrepr) 
-
-    def pytest_runtest_logreport(self, report):
-        res = self.config.hook.pytest_report_teststatus(report=report)
-        if res is not None:
-            code = res[1]
-        else:
-            code = report.shortrepr
-        if code == 'x':
-            longrepr = str(report.longrepr)
-        elif code == 'X':
-            longrepr = ''
-        elif report.passed:
-            longrepr = ""
-        elif report.failed:
-            longrepr = str(report.longrepr) 
-        elif report.skipped:
-            longrepr = str(report.longrepr.reprcrash.message)
-        self.log_outcome(report.item, code, longrepr) 
-
-    def pytest_collectreport(self, report):
-        if not report.passed:
-            if report.failed: 
-                code = "F"
-            else:
-                assert report.skipped
-                code = "S"
-            longrepr = str(report.longrepr.reprcrash)
-            self.log_outcome(report.collector, code, longrepr)    
-
-    def pytest_internalerror(self, excrepr):
-        path = excrepr.reprcrash.path 
-        self.write_log_entry(path, '!', str(excrepr))

diff --git a/pypy/doc/config/objspace.std.builtinshortcut.txt b/pypy/doc/config/objspace.std.builtinshortcut.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.builtinshortcut.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-A shortcut speeding up primitive operations between built-in types.
-
-This is a space-time trade-off: at the moment, this option makes a
-translated pypy-c executable bigger by about 1.7 MB.  (This can probably
-be improved with careful analysis.)

diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py
--- a/pypy/jit/backend/x86/test/test_zrpy_gc.py
+++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py
@@ -11,7 +11,7 @@
 from pypy.rlib import rgc
 from pypy.rpython.lltypesystem import lltype, llmemory, rffi
 from pypy.rpython.lltypesystem.lloperation import llop
-from pypy.rlib.jit import JitDriver, OPTIMIZER_SIMPLE, dont_look_inside
+from pypy.rlib.jit import JitDriver, dont_look_inside
 from pypy.rlib.jit import purefunction, unroll_safe
 from pypy.jit.backend.x86.runner import CPU386
 from pypy.jit.backend.llsupport.gc import GcRefList, GcRootMap_asmgcc
@@ -87,7 +87,7 @@
     ann.build_types(f, [s_list_of_strings], main_entry_point=True)
     t.buildrtyper().specialize()
     if kwds['jit']:
-        apply_jit(t, optimizer=OPTIMIZER_SIMPLE)
+        apply_jit(t, enable_opts='')
     cbuilder = genc.CStandaloneBuilder(t, f, t.config)
     cbuilder.generate_source()
     cbuilder.compile()
@@ -159,7 +159,7 @@
             x.foo = 5
             return weakref.ref(x)
         def main_allfuncs(name, n, x):
-            num = name_to_func[name]            
+            num = name_to_func[name]
             n, x, x0, x1, x2, x3, x4, x5, x6, x7, l, s = funcs[num][0](n, x)
             while n > 0:
                 myjitdriver.can_enter_jit(num=num, n=n, x=x, x0=x0, x1=x1,
@@ -428,7 +428,7 @@
     def define_compile_framework_external_exception_handling(cls):
         def before(n, x):
             x = X(0)
-            return n, x, None, None, None, None, None, None, None, None, None, None        
+            return n, x, None, None, None, None, None, None, None, None, None, None
 
         @dont_look_inside
         def g(x):
@@ -460,7 +460,7 @@
 
     def test_compile_framework_external_exception_handling(self):
         self.run('compile_framework_external_exception_handling')
-            
+
     def define_compile_framework_bug1(self):
         @purefunction
         def nonmoving():

diff --git a/py/_test/pluginmanager.py b/py/_test/pluginmanager.py
deleted file mode 100644
--- a/py/_test/pluginmanager.py
+++ /dev/null
@@ -1,353 +0,0 @@
-"""
-managing loading and interacting with pytest plugins. 
-"""
-import py
-import inspect
-from py._plugin import hookspec
-
-default_plugins = (
-    "default runner capture mark terminal skipping tmpdir monkeypatch "
-    "recwarn pdb pastebin unittest helpconfig nose assertion genscript "
-    "junitxml doctest").split()
-
-def check_old_use(mod, modname):
-    clsname = modname[len('pytest_'):].capitalize() + "Plugin" 
-    assert not hasattr(mod, clsname), (mod, clsname)
-
-class PluginManager(object):
-    def __init__(self):
-        self.registry = Registry()
-        self._name2plugin = {}
-        self._hints = []
-        self.hook = HookRelay([hookspec], registry=self.registry) 
-        self.register(self)
-        for spec in default_plugins:
-            self.import_plugin(spec)
-
-    def _getpluginname(self, plugin, name):
-        if name is None:
-            if hasattr(plugin, '__name__'):
-                name = plugin.__name__.split(".")[-1]
-            else:
-                name = id(plugin) 
-        return name 
-
-    def register(self, plugin, name=None):
-        assert not self.isregistered(plugin), plugin
-        assert not self.registry.isregistered(plugin), plugin
-        name = self._getpluginname(plugin, name)
-        if name in self._name2plugin:
-            return False
-        self._name2plugin[name] = plugin
-        self.call_plugin(plugin, "pytest_addhooks", {'pluginmanager': self})
-        self.hook.pytest_plugin_registered(manager=self, plugin=plugin)
-        self.registry.register(plugin)
-        return True
-
-    def unregister(self, plugin):
-        self.hook.pytest_plugin_unregistered(plugin=plugin)
-        self.registry.unregister(plugin)
-        for name, value in list(self._name2plugin.items()):
-            if value == plugin:
-                del self._name2plugin[name]
-
-    def isregistered(self, plugin, name=None):
-        if self._getpluginname(plugin, name) in self._name2plugin:
-            return True
-        for val in self._name2plugin.values():
-            if plugin == val:
-                return True
-
-    def addhooks(self, spec):
-        self.hook._addhooks(spec, prefix="pytest_")
-
-    def getplugins(self):
-        return list(self.registry)
-
-    def skipifmissing(self, name):
-        if not self.hasplugin(name):
-            py.test.skip("plugin %r is missing" % name)
-
-    def hasplugin(self, name):
-        try:
-            self.getplugin(name)
-        except KeyError:
-            return False
-        else:
-            return True
-
-    def getplugin(self, name):
-        try:
-            return self._name2plugin[name]
-        except KeyError:
-            impname = canonical_importname(name)
-            return self._name2plugin[impname]
-
-    # API for bootstrapping 
-    #
-    def _envlist(self, varname):
-        val = py.std.os.environ.get(varname, None)
-        if val is not None:
-            return val.split(',')
-        return ()
-    
-    def consider_env(self):
-        for spec in self._envlist("PYTEST_PLUGINS"):
-            self.import_plugin(spec)
-
-    def consider_setuptools_entrypoints(self):
-        try:
-            from pkg_resources import iter_entry_points
-        except ImportError:
-            return # XXX issue a warning 
-        for ep in iter_entry_points('pytest11'):
-            name = canonical_importname(ep.name)
-            if name in self._name2plugin:
-                continue
-            plugin = ep.load()
-            self.register(plugin, name=name)
-
-    def consider_preparse(self, args):
-        for opt1,opt2 in zip(args, args[1:]):
-            if opt1 == "-p": 
-                self.import_plugin(opt2)
-
-    def consider_conftest(self, conftestmodule):
-        cls = getattr(conftestmodule, 'ConftestPlugin', None)
-        if cls is not None:
-            raise ValueError("%r: 'ConftestPlugins' only existed till 1.0.0b1, "
-                "were removed in 1.0.0b2" % (cls,))
-        if self.register(conftestmodule, name=conftestmodule.__file__):
-            self.consider_module(conftestmodule)
-
-    def consider_module(self, mod):
-        attr = getattr(mod, "pytest_plugins", ())
-        if attr:
-            if not isinstance(attr, (list, tuple)):
-                attr = (attr,)
-            for spec in attr:
-                self.import_plugin(spec) 
-
-    def import_plugin(self, spec):
-        assert isinstance(spec, str)
-        modname = canonical_importname(spec)
-        if modname in self._name2plugin:
-            return
-        try:
-            mod = importplugin(modname)
-        except KeyboardInterrupt:
-            raise
-        except py.test.skip.Exception:
-            e = py.std.sys.exc_info()[1]
-            self._hints.append("skipped plugin %r: %s" %((modname, e.msg)))
-        else:
-            check_old_use(mod, modname) 
-            self.register(mod)
-            self.consider_module(mod)
-
-    def pytest_terminal_summary(self, terminalreporter):
-        tw = terminalreporter._tw
-        if terminalreporter.config.option.traceconfig:
-            for hint in self._hints:
-                tw.line("hint: %s" % hint)
-
-    # 
-    #
-    # API for interacting with registered and instantiated plugin objects 
-    #
-    # 
-    def listattr(self, attrname, plugins=None):
-        return self.registry.listattr(attrname, plugins=plugins)
-
-    def notify_exception(self, excinfo=None):
-        if excinfo is None:
-            excinfo = py.code.ExceptionInfo()
-        excrepr = excinfo.getrepr(funcargs=True, showlocals=True)
-        return self.hook.pytest_internalerror(excrepr=excrepr)
-
-    def do_addoption(self, parser):
-        mname = "pytest_addoption"
-        methods = self.registry.listattr(mname, reverse=True)
-        mc = MultiCall(methods, {'parser': parser})
-        mc.execute()
-
-    def pytest_plugin_registered(self, plugin):
-        dic = self.call_plugin(plugin, "pytest_namespace", {}) or {}
-        for name, value in dic.items():
-            setattr(py.test, name, value)
-            py.test.__all__.append(name)
-        if hasattr(self, '_config'):
-            self.call_plugin(plugin, "pytest_addoption", 
-                {'parser': self._config._parser})
-            self.call_plugin(plugin, "pytest_configure", 
-                {'config': self._config})
-
-    def call_plugin(self, plugin, methname, kwargs):
-        return MultiCall(
-                methods=self.listattr(methname, plugins=[plugin]), 
-                kwargs=kwargs, firstresult=True).execute()
-
-    def do_configure(self, config):
-        assert not hasattr(self, '_config')
-        self._config = config
-        config.hook.pytest_configure(config=self._config)
-
-    def do_unconfigure(self, config):
-        config = self._config 
-        del self._config 
-        config.hook.pytest_unconfigure(config=config)
-        config.pluginmanager.unregister(self)
-
-def canonical_importname(name):
-    name = name.lower()
-    modprefix = "pytest_"
-    if not name.startswith(modprefix):
-        name = modprefix + name 
-    return name 
-
-def importplugin(importspec):
-    try:
-        return __import__(importspec) 
-    except ImportError:
-        e = py.std.sys.exc_info()[1]
-        if str(e).find(importspec) == -1:
-            raise
-        try:
-            return __import__("py._plugin.%s" %(importspec), 
-                None, None, '__doc__')
-        except ImportError:
-            e = py.std.sys.exc_info()[1]
-            if str(e).find(importspec) == -1:
-                raise
-            # show the original exception, not the failing internal one
-            return __import__(importspec)  
-
-
-class MultiCall:
-    """ execute a call into multiple python functions/methods.  """
-
-    def __init__(self, methods, kwargs, firstresult=False):
-        self.methods = methods[:]
-        self.kwargs = kwargs.copy()
-        self.kwargs['__multicall__'] = self
-        self.results = []
-        self.firstresult = firstresult
-
-    def __repr__(self):
-        status = "%d results, %d meths" % (len(self.results), len(self.methods))
-        return "<MultiCall %s, kwargs=%r>" %(status, self.kwargs)
-
-    def execute(self):
-        while self.methods:
-            method = self.methods.pop()
-            kwargs = self.getkwargs(method)
-            res = method(**kwargs)
-            if res is not None:
-                self.results.append(res) 
-                if self.firstresult:
-                    return res
-        if not self.firstresult:
-            return self.results 
-
-    def getkwargs(self, method):
-        kwargs = {}
-        for argname in varnames(method):
-            try:
-                kwargs[argname] = self.kwargs[argname]
-            except KeyError:
-                pass # might be optional param
-        return kwargs 
-
-def varnames(func):
-    ismethod = inspect.ismethod(func)
-    rawcode = py.code.getrawcode(func)
-    try:
-        return rawcode.co_varnames[ismethod:]
-    except AttributeError:
-        return ()
-
-class Registry:
-    """
-        Manage Plugins: register/unregister call calls to plugins. 
-    """
-    def __init__(self, plugins=None):
-        if plugins is None:
-            plugins = []
-        self._plugins = plugins
-
-    def register(self, plugin):
-        assert not isinstance(plugin, str)
-        assert not plugin in self._plugins
-        self._plugins.append(plugin)
-
-    def unregister(self, plugin):
-        self._plugins.remove(plugin)
-
-    def isregistered(self, plugin):
-        return plugin in self._plugins 
-
-    def __iter__(self):
-        return iter(self._plugins)
-
-    def listattr(self, attrname, plugins=None, reverse=False):
-        l = []
-        if plugins is None:
-            plugins = self._plugins
-        for plugin in plugins:
-            try:
-                l.append(getattr(plugin, attrname))
-            except AttributeError:
-                continue 
-        if reverse:
-            l.reverse()
-        return l
-
-class HookRelay: 
-    def __init__(self, hookspecs, registry, prefix="pytest_"):
-        if not isinstance(hookspecs, list):
-            hookspecs = [hookspecs]
-        self._hookspecs = []
-        self._registry = registry
-        for hookspec in hookspecs:
-            self._addhooks(hookspec, prefix)
-
-    def _addhooks(self, hookspecs, prefix):
-        self._hookspecs.append(hookspecs)
-        added = False
-        for name, method in vars(hookspecs).items():
-            if name.startswith(prefix):
-                if not method.__doc__:
-                    raise ValueError("docstring required for hook %r, in %r"
-                        % (method, hookspecs))
-                firstresult = getattr(method, 'firstresult', False)
-                hc = HookCaller(self, name, firstresult=firstresult)
-                setattr(self, name, hc)
-                added = True
-                #print ("setting new hook", name)
-        if not added:
-            raise ValueError("did not find new %r hooks in %r" %(
-                prefix, hookspecs,))
-            
-
-    def _performcall(self, name, multicall):
-        return multicall.execute()
-        
-class HookCaller:
-    def __init__(self, hookrelay, name, firstresult):
-        self.hookrelay = hookrelay 
-        self.name = name 
-        self.firstresult = firstresult 
-
-    def __repr__(self):
-        return "<HookCaller %r>" %(self.name,)
-
-    def __call__(self, **kwargs):
-        methods = self.hookrelay._registry.listattr(self.name)
-        mc = MultiCall(methods, kwargs, firstresult=self.firstresult)
-        return self.hookrelay._performcall(self.name, mc)
-
-    def pcall(self, plugins, **kwargs):
-        methods = self.hookrelay._registry.listattr(self.name, plugins=plugins)
-        mc = MultiCall(methods, kwargs, firstresult=self.firstresult)
-        return self.hookrelay._performcall(self.name, mc)
-   

diff --git a/pypy/doc/config/objspace.disable_call_speedhacks.txt b/pypy/doc/config/objspace.disable_call_speedhacks.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.disable_call_speedhacks.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-disable the speed hacks that the interpreter normally does. Usually you don't
-want to set this to False, but some object spaces require it.

diff --git a/pypy/doc/config/objspace.std.optimized_list_getitem.txt b/pypy/doc/config/objspace.std.optimized_list_getitem.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.optimized_list_getitem.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Optimized list[int] a bit.

diff --git a/py/_compat/dep_optparse.py b/py/_compat/dep_optparse.py
deleted file mode 100644
--- a/py/_compat/dep_optparse.py
+++ /dev/null
@@ -1,4 +0,0 @@
-import py
-py.log._apiwarn("1.1", "py.compat.optparse deprecated, use standard library version.", stacklevel="apipkg")
-
-optparse = py.std.optparse 

diff --git a/py/bin/win32/py.cleanup.cmd b/py/bin/win32/py.cleanup.cmd
deleted file mode 100644
--- a/py/bin/win32/py.cleanup.cmd
+++ /dev/null
@@ -1,2 +0,0 @@
- at echo off
-python "%~dp0\..\py.cleanup" %*
\ No newline at end of file

diff --git a/py/_plugin/pytest_terminal.py b/py/_plugin/pytest_terminal.py
deleted file mode 100644
--- a/py/_plugin/pytest_terminal.py
+++ /dev/null
@@ -1,540 +0,0 @@
-"""
-Implements terminal reporting of the full testing process.
-
-This is a good source for looking at the various reporting hooks. 
-"""
-import py
-import sys
-
-optionalhook = py.test.mark.optionalhook
-
-def pytest_addoption(parser):
-    group = parser.getgroup("terminal reporting", "reporting", after="general")
-    group._addoption('-v', '--verbose', action="count", 
-               dest="verbose", default=0, help="increase verbosity."),
-    group._addoption('-r',
-         action="store", dest="reportchars", default=None, metavar="chars",
-         help="show extra test summary info as specified by chars (f)ailed, "
-              "(s)skipped, (x)failed, (X)passed.")
-    group._addoption('-l', '--showlocals',
-         action="store_true", dest="showlocals", default=False,
-         help="show locals in tracebacks (disabled by default).")
-    group._addoption('--report',
-         action="store", dest="report", default=None, metavar="opts",
-         help="(deprecated, use -r)")
-    group._addoption('--tb', metavar="style", 
-               action="store", dest="tbstyle", default='long',
-               type="choice", choices=['long', 'short', 'no', 'line'],
-               help="traceback print mode (long/short/line/no).")
-    group._addoption('--fulltrace',
-               action="store_true", dest="fulltrace", default=False,
-               help="don't cut any tracebacks (default is to cut).")
-    group._addoption('--funcargs',
-               action="store_true", dest="showfuncargs", default=False,
-               help="show available function arguments, sorted by plugin")
-
-def pytest_configure(config):
-    if config.option.collectonly:
-        reporter = CollectonlyReporter(config)
-    elif config.option.showfuncargs:
-        config.setsessionclass(ShowFuncargSession)
-        reporter = None
-    else:
-        reporter = TerminalReporter(config)
-    if reporter:
-        # XXX see remote.py's XXX 
-        for attr in 'pytest_terminal_hasmarkup', 'pytest_terminal_fullwidth':
-            if hasattr(config, attr):
-                #print "SETTING TERMINAL OPTIONS", attr, getattr(config, attr)
-                name = attr.split("_")[-1]
-                assert hasattr(self.reporter._tw, name), name
-                setattr(reporter._tw, name, getattr(config, attr))
-        config.pluginmanager.register(reporter, 'terminalreporter')
-
-def getreportopt(config):
-    reportopts = ""
-    optvalue = config.getvalue("report")
-    if optvalue:
-        py.builtin.print_("DEPRECATED: use -r instead of --report option.", 
-            file=py.std.sys.stderr)
-        if optvalue:
-            for setting in optvalue.split(","):
-                setting = setting.strip()
-                if setting == "skipped":
-                    reportopts += "s"
-                elif setting == "xfailed":
-                    reportopts += "x"
-    reportchars = config.getvalue("reportchars")
-    if reportchars:
-        for char in reportchars:
-            if char not in reportopts:
-                reportopts += char
-    return reportopts
-
-class TerminalReporter:
-    def __init__(self, config, file=None):
-        self.config = config 
-        self.stats = {}       
-        self.curdir = py.path.local()
-        if file is None:
-            file = py.std.sys.stdout
-        self._tw = py.io.TerminalWriter(file)
-        self.currentfspath = None 
-        self.gateway2info = {}
-        self.reportchars = getreportopt(config)
-
-    def hasopt(self, char):
-        char = {'xfailed': 'x', 'skipped': 's'}.get(char,char)
-        return char in self.reportchars
-
-    def write_fspath_result(self, fspath, res):
-        fspath = self.curdir.bestrelpath(fspath)
-        if fspath != self.currentfspath:
-            self._tw.line()
-            relpath = self.curdir.bestrelpath(fspath)
-            self._tw.write(relpath + " ")
-            self.currentfspath = fspath
-        self._tw.write(res)
-
-    def write_ensure_prefix(self, prefix, extra="", **kwargs):
-        if self.currentfspath != prefix:
-            self._tw.line()
-            self.currentfspath = prefix 
-            self._tw.write(prefix)
-        if extra:
-            self._tw.write(extra, **kwargs)
-            self.currentfspath = -2
-
-    def ensure_newline(self):
-        if self.currentfspath: 
-            self._tw.line()
-            self.currentfspath = None
-
-    def write_line(self, line, **markup):
-        line = str(line)
-        self.ensure_newline()
-        self._tw.line(line, **markup)
-
-    def write_sep(self, sep, title=None, **markup):
-        self.ensure_newline()
-        self._tw.sep(sep, title, **markup)
-
-    def getcategoryletterword(self, rep):
-        res = self.config.hook.pytest_report_teststatus(report=rep)
-        if res:
-            return res
-        for cat in 'skipped failed passed ???'.split():
-            if getattr(rep, cat, None):
-                break 
-        return cat, self.getoutcomeletter(rep), self.getoutcomeword(rep)
-
-    def getoutcomeletter(self, rep):
-        return rep.shortrepr 
-
-    def getoutcomeword(self, rep):
-        if rep.passed: 
-            return "PASS", dict(green=True)
-        elif rep.failed: 
-            return "FAIL", dict(red=True)
-        elif rep.skipped: 
-            return "SKIP"
-        else: 
-            return "???", dict(red=True)
-
-    def gettestid(self, item, relative=True):
-        fspath = item.fspath
-        chain = [x for x in item.listchain() if x.fspath == fspath]
-        chain = chain[1:]
-        names = [x.name for x in chain if x.name != "()"]
-        path = item.fspath
-        if relative:
-            relpath = path.relto(self.curdir)
-            if relpath:
-                path = relpath
-        names.insert(0, str(path))
-        return "::".join(names)
-
-
-    def pytest_internalerror(self, excrepr):
-        for line in str(excrepr).split("\n"):
-            self.write_line("INTERNALERROR> " + line)
-
-    def pytest_plugin_registered(self, plugin):
-        if self.config.option.traceconfig: 
-            msg = "PLUGIN registered: %s" %(plugin,)
-            # XXX this event may happen during setup/teardown time 
-            #     which unfortunately captures our output here 
-            #     which garbles our output if we use self.write_line 
-            self.write_line(msg)
-
-    @optionalhook
-    def pytest_gwmanage_newgateway(self, gateway, platinfo):
-        #self.write_line("%s instantiated gateway from spec %r" %(gateway.id, gateway.spec._spec))
-        d = {}
-        d['version'] = repr_pythonversion(platinfo.version_info)
-        d['id'] = gateway.id
-        d['spec'] = gateway.spec._spec 
-        d['platform'] = platinfo.platform 
-        if self.config.option.verbose:
-            d['extra'] = "- " + platinfo.executable
-        else:
-            d['extra'] = ""
-        d['cwd'] = platinfo.cwd
-        infoline = ("[%(id)s] %(spec)s -- platform %(platform)s, "
-                        "Python %(version)s "
-                        "cwd: %(cwd)s"
-                        "%(extra)s" % d)
-        self.write_line(infoline)
-        self.gateway2info[gateway] = infoline
-
-    @optionalhook
-    def pytest_testnodeready(self, node):
-        self.write_line("[%s] txnode ready to receive tests" %(node.gateway.id,))
-
-    @optionalhook
-    def pytest_testnodedown(self, node, error):
-        if error:
-            self.write_line("[%s] node down, error: %s" %(node.gateway.id, error))
-
-    @optionalhook
-    def pytest_rescheduleitems(self, items):
-        if self.config.option.debug:
-            self.write_sep("!", "RESCHEDULING %s " %(items,))
-
-    @optionalhook
-    def pytest_looponfailinfo(self, failreports, rootdirs):
-        if failreports:
-            self.write_sep("#", "LOOPONFAILING", red=True)
-            for report in failreports:
-                loc = self._getcrashline(report)
-                self.write_line(loc, red=True)
-        self.write_sep("#", "waiting for changes")
-        for rootdir in rootdirs:
-            self.write_line("### Watching:   %s" %(rootdir,), bold=True)
-
-
-    def pytest_trace(self, category, msg):
-        if self.config.option.debug or \
-           self.config.option.traceconfig and category.find("config") != -1:
-            self.write_line("[%s] %s" %(category, msg))
-
-    def pytest_deselected(self, items):
-        self.stats.setdefault('deselected', []).append(items)
-
-    def pytest_itemstart(self, item, node=None):
-        if getattr(self.config.option, 'dist', 'no') != "no":
-            # for dist-testing situations itemstart means we 
-            # queued the item for sending, not interesting (unless debugging) 
-            if self.config.option.debug:
-                line = self._reportinfoline(item)
-                extra = ""
-                if node:
-                    extra = "-> [%s]" % node.gateway.id
-                self.write_ensure_prefix(line, extra)
-        else:
-            if self.config.option.verbose:
-                line = self._reportinfoline(item)
-                self.write_ensure_prefix(line, "") 
-            else:
-                # ensure that the path is printed before the 
-                # 1st test of a module starts running
-
-                self.write_fspath_result(self._getfspath(item), "")
-
-    def pytest__teardown_final_logerror(self, report):
-        self.stats.setdefault("error", []).append(report)
- 
-    def pytest_runtest_logreport(self, report):
-        rep = report
-        cat, letter, word = self.getcategoryletterword(rep)
-        if not letter and not word:
-            # probably passed setup/teardown
-            return
-        if isinstance(word, tuple):
-            word, markup = word
-        else:
-            markup = {}
-        self.stats.setdefault(cat, []).append(rep)
-        if not self.config.option.verbose:
-            self.write_fspath_result(self._getfspath(rep.item), letter)
-        else:
-            line = self._reportinfoline(rep.item)
-            if not hasattr(rep, 'node'):
-                self.write_ensure_prefix(line, word, **markup)
-            else:
-                self.ensure_newline()
-                if hasattr(rep, 'node'):
-                    self._tw.write("[%s] " % rep.node.gateway.id)
-                self._tw.write(word, **markup)
-                self._tw.write(" " + line)
-                self.currentfspath = -2
-
-    def pytest_collectreport(self, report):
-        if not report.passed:
-            if report.failed:
-                self.stats.setdefault("error", []).append(report)
-                msg = report.longrepr.reprcrash.message 
-                self.write_fspath_result(report.collector.fspath, "E")
-            elif report.skipped:
-                self.stats.setdefault("skipped", []).append(report)
-                self.write_fspath_result(report.collector.fspath, "S")
-
-    def pytest_sessionstart(self, session):
-        self.write_sep("=", "test session starts", bold=True)
-        self._sessionstarttime = py.std.time.time()
-
-        verinfo = ".".join(map(str, sys.version_info[:3]))
-        msg = "platform %s -- Python %s" % (sys.platform, verinfo)
-        msg += " -- pytest-%s" % (py.__version__)
-        if self.config.option.verbose or self.config.option.debug or getattr(self.config.option, 'pastebin', None):
-            msg += " -- " + str(sys.executable)
-        self.write_line(msg)
-        lines = self.config.hook.pytest_report_header(config=self.config)
-        lines.reverse()
-        for line in flatten(lines):
-            self.write_line(line)
-        for i, testarg in enumerate(self.config.args):
-            self.write_line("test object %d: %s" %(i+1, testarg))
-
-    def pytest_sessionfinish(self, exitstatus, __multicall__):
-        __multicall__.execute() 
-        self._tw.line("")
-        if exitstatus in (0, 1, 2):
-            self.summary_errors()
-            self.summary_failures()
-            self.config.hook.pytest_terminal_summary(terminalreporter=self)
-        if exitstatus == 2:
-            self._report_keyboardinterrupt()
-        self.summary_deselected()
-        self.summary_stats()
-
-    def pytest_keyboard_interrupt(self, excinfo):
-        self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
-
-    def _report_keyboardinterrupt(self):
-        excrepr = self._keyboardinterrupt_memo
-        msg = excrepr.reprcrash.message
-        self.write_sep("!", msg)
-        if "KeyboardInterrupt" in msg:
-            if self.config.getvalue("fulltrace"):
-                excrepr.toterminal(self._tw)
-            else:
-                excrepr.reprcrash.toterminal(self._tw)
-
-    def _getcrashline(self, report):
-        try:
-            return report.longrepr.reprcrash
-        except AttributeError:
-            return str(report.longrepr)[:50]
-
-    def _reportinfoline(self, item):
-        collect_fspath = self._getfspath(item)
-        fspath, lineno, msg = self._getreportinfo(item)
-        if fspath and fspath != collect_fspath:
-            fspath = "%s <- %s" % (
-                self.curdir.bestrelpath(collect_fspath),
-                self.curdir.bestrelpath(fspath))
-        elif fspath:
-            fspath = self.curdir.bestrelpath(fspath)
-        if lineno is not None:
-            lineno += 1
-        if fspath and lineno and msg:
-            line = "%(fspath)s:%(lineno)s: %(msg)s"
-        elif fspath and msg:
-            line = "%(fspath)s: %(msg)s"
-        elif fspath and lineno:
-            line = "%(fspath)s:%(lineno)s %(extrapath)s"
-        else:
-            line = "[noreportinfo]"
-        return line % locals() + " "
-        
-    def _getfailureheadline(self, rep):
-        if hasattr(rep, "collector"):
-            return str(rep.collector.fspath)
-        elif hasattr(rep, 'item'):
-            fspath, lineno, msg = self._getreportinfo(rep.item)
-            return msg
-        else:
-            return "test session" 
-
-    def _getreportinfo(self, item):
-        try:
-            return item.__reportinfo
-        except AttributeError:
-            pass
-        reportinfo = item.config.hook.pytest_report_iteminfo(item=item)
-        # cache on item
-        item.__reportinfo = reportinfo
-        return reportinfo
-
-    def _getfspath(self, item):
-        try:
-            return item.fspath
-        except AttributeError:
-            fspath, lineno, msg = self._getreportinfo(item)
-            return fspath
-
-    #
-    # summaries for sessionfinish 
-    #
-
-    def summary_failures(self):
-        tbstyle = self.config.getvalue("tbstyle")
-        if 'failed' in self.stats and tbstyle != "no":
-            self.write_sep("=", "FAILURES")
-            for rep in self.stats['failed']:
-                if tbstyle == "line":
-                    line = self._getcrashline(rep)
-                    self.write_line(line)
-                else:    
-                    msg = self._getfailureheadline(rep)
-                    self.write_sep("_", msg)
-                    self.write_platinfo(rep)
-                    rep.toterminal(self._tw)
-
-    def summary_errors(self):
-        if 'error' in self.stats and self.config.option.tbstyle != "no":
-            self.write_sep("=", "ERRORS")
-            for rep in self.stats['error']:
-                msg = self._getfailureheadline(rep)
-                if not hasattr(rep, 'when'):
-                    # collect
-                    msg = "ERROR during collection " + msg
-                elif rep.when == "setup":
-                    msg = "ERROR at setup of " + msg 
-                elif rep.when == "teardown":
-                    msg = "ERROR at teardown of " + msg 
-                self.write_sep("_", msg)
-                self.write_platinfo(rep)
-                rep.toterminal(self._tw)
-
-    def write_platinfo(self, rep):
-        if hasattr(rep, 'node'):
-            self.write_line(self.gateway2info.get(
-                rep.node.gateway, 
-                "node %r (platinfo not found? strange)")
-                    [:self._tw.fullwidth-1])
-
-    def summary_stats(self):
-        session_duration = py.std.time.time() - self._sessionstarttime
-
-        keys = "failed passed skipped deselected".split()
-        for key in self.stats.keys():
-            if key not in keys:
-                keys.append(key)
-        parts = []
-        for key in keys:
-            val = self.stats.get(key, None)
-            if val:
-                parts.append("%d %s" %(len(val), key))
-        line = ", ".join(parts)
-        # XXX coloring
-        self.write_sep("=", "%s in %.2f seconds" %(line, session_duration))
-
-    def summary_deselected(self):
-        if 'deselected' in self.stats:
-            self.write_sep("=", "%d tests deselected by %r" %(
-                len(self.stats['deselected']), self.config.option.keyword), bold=True)
-
-
-class CollectonlyReporter:
-    INDENT = "  "
-
-    def __init__(self, config, out=None):
-        self.config = config 
-        if out is None:
-            out = py.std.sys.stdout
-        self.out = py.io.TerminalWriter(out)
-        self.indent = ""
-        self._failed = []
-
-    def outindent(self, line):
-        self.out.line(self.indent + str(line))
-
-    def pytest_internalerror(self, excrepr):
-        for line in str(excrepr).split("\n"):
-            self.out.line("INTERNALERROR> " + line)
-
-    def pytest_collectstart(self, collector):
-        self.outindent(collector)
-        self.indent += self.INDENT 
-    
-    def pytest_itemstart(self, item, node=None):
-        self.outindent(item)
-
-    def pytest_collectreport(self, report):
-        if not report.passed:
-            self.outindent("!!! %s !!!" % report.longrepr.reprcrash.message)
-            self._failed.append(report)
-        self.indent = self.indent[:-len(self.INDENT)]
-
-    def pytest_sessionfinish(self, session, exitstatus):
-        if self._failed:
-            self.out.sep("!", "collection failures")
-        for rep in self._failed:
-            rep.toterminal(self.out)
-                
-
-def repr_pythonversion(v=None):
-    if v is None:
-        v = sys.version_info
-    try:
-        return "%s.%s.%s-%s-%s" % v
-    except (TypeError, ValueError):
-        return str(v)
-
-def flatten(l):
-    for x in l:
-        if isinstance(x, (list, tuple)):
-            for y in flatten(x):
-                yield y
-        else:
-            yield x
-
-from py._test.session import Session
-class ShowFuncargSession(Session):
-    def main(self, colitems):
-        self.fspath = py.path.local()
-        self.sessionstarts()
-        try:
-            self.showargs(colitems[0])
-        finally:
-            self.sessionfinishes(exitstatus=1)
-
-    def showargs(self, colitem):
-        tw = py.io.TerminalWriter()
-        from py._test.funcargs import getplugins
-        from py._test.funcargs import FuncargRequest
-        plugins = getplugins(colitem, withpy=True)
-        verbose = self.config.getvalue("verbose")
-        for plugin in plugins:
-            available = []
-            for name, factory in vars(plugin).items():
-                if name.startswith(FuncargRequest._argprefix):
-                    name = name[len(FuncargRequest._argprefix):]
-                    if name not in available:
-                        available.append([name, factory]) 
-            if available:
-                pluginname = plugin.__name__
-                for name, factory in available:
-                    loc = self.getlocation(factory)
-                    if verbose:
-                        funcargspec = "%s -- %s" %(name, loc,)
-                    else:
-                        funcargspec = name
-                    tw.line(funcargspec, green=True)
-                    doc = factory.__doc__ or ""
-                    if doc:
-                        for line in doc.split("\n"):
-                            tw.line("    " + line.strip())
-                    else:
-                        tw.line("    %s: no docstring available" %(loc,), 
-                            red=True)
-
-    def getlocation(self, function):
-        import inspect
-        fn = py.path.local(inspect.getfile(function))
-        lineno = py.builtin._getcode(function).co_firstlineno
-        if fn.relto(self.fspath):
-            fn = fn.relto(self.fspath)
-        return "%s:%d" %(fn, lineno+1)

diff --git a/pypy/doc/config/objspace.usemodules._stackless.txt b/pypy/doc/config/objspace.usemodules._stackless.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._stackless.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Use the '_stackless' module. 
-
-Exposes the `stackless` primitives, and also implies a stackless build. 
-See also :config:`translation.stackless`.
-
-.. _`stackless`: ../stackless.html

diff --git a/pypy/doc/config/objspace.usemodules._testing.txt b/pypy/doc/config/objspace.usemodules._testing.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._testing.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Use the '_testing' module. This module exists only for PyPy own testing purposes.
- 
-This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/translation.gc.txt b/pypy/doc/config/translation.gc.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.gc.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-Choose the Garbage Collector used by the translated program:
-
-  - "ref": reference counting. Takes very long to translate and the result is
-    slow.
-
-  - "marksweep": naive mark & sweep.
-
-  - "semispace": a copying semi-space GC.
-
-  - "generation": a generational GC using the semi-space GC for the
-    older generation.
-
-  - "boehm": use the Boehm conservative GC.

diff --git a/pypy/doc/config/translation.instrument.txt b/pypy/doc/config/translation.instrument.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.instrument.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Internal option.
-
-.. internal

diff --git a/pypy/doc/config/objspace.usemodules.imp.txt b/pypy/doc/config/objspace.usemodules.imp.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.imp.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'imp' module.
-This module is included by default.

diff --git a/py/_test/cmdline.py b/py/_test/cmdline.py
deleted file mode 100644
--- a/py/_test/cmdline.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import py
-import sys
-
-#
-# main entry point
-#
-
-def main(args=None):
-    if args is None:
-        args = sys.argv[1:]
-    config = py.test.config
-    try:
-        config.parse(args) 
-        config.pluginmanager.do_configure(config)
-        session = config.initsession()
-        colitems = config.getinitialnodes()
-        exitstatus = session.main(colitems)
-        config.pluginmanager.do_unconfigure(config)
-    except config.Error:
-        e = sys.exc_info()[1]
-        sys.stderr.write("ERROR: %s\n" %(e.args[0],))
-        exitstatus = 3
-    py.test.config = py.test.config.__class__()
-    return exitstatus

diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline.txt b/pypy/doc/config/translation.backendopt.profile_based_inline.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.profile_based_inline.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-Inline flowgraphs only for call-sites for which there was a minimal
-number of calls during an instrumented run of the program. Callee
-flowgraphs are considered candidates based on a weight heuristic like
-for basic inlining. (see :config:`translation.backendopt.inline`,
-:config:`translation.backendopt.profile_based_inline_threshold` ).
-
-The option takes as value a string which is the arguments to pass to
-the program for the instrumented run.
-
-This optimization is not used by default.
\ No newline at end of file

diff --git a/pypy/doc/config/objspace.usemodules.pypyjit.txt b/pypy/doc/config/objspace.usemodules.pypyjit.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.pypyjit.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Use the 'pypyjit' module. 

diff --git a/py/_cmdline/pywhich.py b/py/_cmdline/pywhich.py
deleted file mode 100755
--- a/py/_cmdline/pywhich.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python 
-
-"""\
-py.which [name]
-
-print the location of the given python module or package name 
-"""
-
-import sys
-
-def main():
-    name = sys.argv[1]
-    try:
-        mod = __import__(name)
-    except ImportError:
-        sys.stderr.write("could not import: " +  name + "\n")
-    else:
-        try:
-            location = mod.__file__ 
-        except AttributeError:
-            sys.stderr.write("module (has no __file__): " + str(mod))
-        else:
-            print(location)

diff --git a/pypy/doc/config/translation.insist.txt b/pypy/doc/config/translation.insist.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.insist.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Don't stop on the first `rtyping`_ error. Instead, try to rtype as much as
-possible and show the collected error messages in the end.
-
-.. _`rtyping`: ../rtyper.html

diff --git a/py/bin/win32/py.convert_unittest.cmd b/py/bin/win32/py.convert_unittest.cmd
deleted file mode 100644
--- a/py/bin/win32/py.convert_unittest.cmd
+++ /dev/null
@@ -1,2 +0,0 @@
- at echo off
-python "%~dp0\..\py.convert_unittest" %*
\ No newline at end of file

diff --git a/pypy/jit/metainterp/resume.py b/pypy/jit/metainterp/resume.py
--- a/pypy/jit/metainterp/resume.py
+++ b/pypy/jit/metainterp/resume.py
@@ -10,6 +10,7 @@
 from pypy.rlib.objectmodel import we_are_translated, specialize
 from pypy.rlib.debug import have_debug_prints, ll_assert
 from pypy.rlib.debug import debug_start, debug_stop, debug_print
+from pypy.jit.metainterp.optimizeutil import InvalidLoop
 
 # Logic to encode the chain of frames and the state of the boxes at a
 # guard operation, and to decode it again.  This is a bit advanced,
@@ -427,12 +428,24 @@
     #    raise NotImplementedError
     def equals(self, fieldnums):
         return tagged_list_eq(self.fieldnums, fieldnums)
+
     def set_content(self, fieldnums):
         self.fieldnums = fieldnums
 
     def debug_prints(self):
         raise NotImplementedError
 
+    def generalization_of(self, other):
+        raise NotImplementedError
+
+    def generate_guards(self, other, box, cpu, extra_guards):
+        if self.generalization_of(other):
+            return
+        self._generate_guards(other, box, cpu, extra_guards)
+
+    def _generate_guards(self, other, box, cpu, extra_guards):
+        raise InvalidLoop
+        
 class AbstractVirtualStructInfo(AbstractVirtualInfo):
     def __init__(self, fielddescrs):
         self.fielddescrs = fielddescrs
@@ -452,6 +465,26 @@
                         str(self.fielddescrs[i]),
                         str(untag(self.fieldnums[i])))
 
+    def generalization_of(self, other):
+        if not self._generalization_of(other):
+            return False
+        assert len(self.fielddescrs) == len(self.fieldstate)
+        assert len(other.fielddescrs) == len(other.fieldstate)
+        if len(self.fielddescrs) != len(other.fielddescrs):
+            return False
+        
+        for i in range(len(self.fielddescrs)):
+            if other.fielddescrs[i] is not self.fielddescrs[i]:
+                return False
+            if not self.fieldstate[i].generalization_of(other.fieldstate[i]):
+                return False
+
+        return True
+
+    def _generalization_of(self, other):
+        raise NotImplementedError
+
+
 class VirtualInfo(AbstractVirtualStructInfo):
     def __init__(self, known_class, fielddescrs):
         AbstractVirtualStructInfo.__init__(self, fielddescrs)
@@ -467,6 +500,14 @@
         debug_print("\tvirtualinfo", self.known_class.repr_rpython())
         AbstractVirtualStructInfo.debug_prints(self)
 
+    def _generalization_of(self, other):        
+        if not isinstance(other, VirtualInfo):
+            return False
+        if not self.known_class.same_constant(other.known_class):
+            return False
+        return True
+        
+
 class VStructInfo(AbstractVirtualStructInfo):
     def __init__(self, typedescr, fielddescrs):
         AbstractVirtualStructInfo.__init__(self, fielddescrs)
@@ -482,6 +523,14 @@
         debug_print("\tvstructinfo", self.typedescr.repr_rpython())
         AbstractVirtualStructInfo.debug_prints(self)
 
+    def _generalization_of(self, other):        
+        if not isinstance(other, VStructInfo):
+            return False
+        if self.typedescr is not other.typedescr:
+            return False
+        return True
+        
+
 class VArrayInfo(AbstractVirtualInfo):
     def __init__(self, arraydescr):
         self.arraydescr = arraydescr
@@ -513,6 +562,16 @@
         for i in self.fieldnums:
             debug_print("\t\t", str(untag(i)))
 
+    def generalization_of(self, other):
+        if self.arraydescr is not other.arraydescr:
+            return False
+        if len(self.fieldstate) != len(other.fieldstate):
+            return False
+        for i in range(len(self.fieldstate)):
+            if not self.fieldstate[i].generalization_of(other.fieldstate[i]):
+                return False
+        return True
+
 
 class VStrPlainInfo(AbstractVirtualInfo):
     """Stands for the string made out of the characters of all fieldnums."""
@@ -647,6 +706,7 @@
         # Note that this may be called recursively; that's why the
         # allocate() methods must fill in the cache as soon as they
         # have the object, before they fill its fields.
+        assert self.virtuals_cache is not None
         v = self.virtuals_cache[index]
         if not v:
             v = self.rd_virtuals[index].allocate(self, index)

diff --git a/py/_plugin/pytest_runner.py b/py/_plugin/pytest_runner.py
deleted file mode 100644
--- a/py/_plugin/pytest_runner.py
+++ /dev/null
@@ -1,417 +0,0 @@
-""" 
-collect and run test items and create reports. 
-"""
-
-import py, sys
-
-def pytest_namespace():
-    return {
-        'raises'       : raises, 
-        'skip'         : skip,
-        'importorskip' : importorskip,
-        'fail'         : fail, 
-        'xfail'        : xfail, 
-        'exit'         : exit, 
-    }
-
-#
-# pytest plugin hooks 
-
-# XXX move to pytest_sessionstart and fix py.test owns tests 
-def pytest_configure(config):
-    config._setupstate = SetupState()
-
-def pytest_sessionfinish(session, exitstatus):
-    if hasattr(session.config, '_setupstate'):
-        hook = session.config.hook
-        rep = hook.pytest__teardown_final(session=session)
-        if rep:
-            hook.pytest__teardown_final_logerror(report=rep)
-
-def pytest_make_collect_report(collector):
-    result = excinfo = None
-    try:
-        result = collector._memocollect()
-    except KeyboardInterrupt:
-        raise
-    except:
-        excinfo = py.code.ExceptionInfo()
-    return CollectReport(collector, result, excinfo)
-
-def pytest_runtest_protocol(item):
-    runtestprotocol(item)
-    return True
-
-def runtestprotocol(item, log=True):
-    rep = call_and_report(item, "setup", log)
-    reports = [rep]
-    if rep.passed:
-        reports.append(call_and_report(item, "call", log))
-    reports.append(call_and_report(item, "teardown", log))
-    return reports
-
-def pytest_runtest_setup(item):
-    item.config._setupstate.prepare(item)
-
-def pytest_runtest_call(item):
-    if not item._deprecated_testexecution():
-        item.runtest()
-
-def pytest_runtest_makereport(item, call):
-    return ItemTestReport(item, call.excinfo, call.when)
-
-def pytest_runtest_teardown(item):
-    item.config._setupstate.teardown_exact(item)
-
-def pytest__teardown_final(session):
-    call = CallInfo(session.config._setupstate.teardown_all, when="teardown")
-    if call.excinfo:
-        ntraceback = call.excinfo.traceback .cut(excludepath=py._pydir)
-        call.excinfo.traceback = ntraceback.filter()
-        rep = TeardownErrorReport(call.excinfo)
-        return rep 
-
-def pytest_report_teststatus(report):
-    if report.when in ("setup", "teardown"):
-        if report.failed:
-            #      category, shortletter, verbose-word 
-            return "error", "E", "ERROR"
-        elif report.skipped:
-            return "skipped", "s", "SKIPPED"
-        else:
-            return "", "", ""
-#
-# Implementation
-
-def call_and_report(item, when, log=True):
-    call = call_runtest_hook(item, when)
-    hook = item.ihook
-    report = hook.pytest_runtest_makereport(item=item, call=call)
-    if log and (when == "call" or not report.passed):
-        hook.pytest_runtest_logreport(report=report) 
-    return report
-
-def call_runtest_hook(item, when):
-    hookname = "pytest_runtest_" + when 
-    ihook = getattr(item.ihook, hookname)
-    return CallInfo(lambda: ihook(item=item), when=when)
-
-class CallInfo:
-    excinfo = None 
-    def __init__(self, func, when):
-        self.when = when 
-        try:
-            self.result = func()
-        except KeyboardInterrupt:
-            raise
-        except:
-            self.excinfo = py.code.ExceptionInfo()
-
-    def __repr__(self):
-        if self.excinfo:
-            status = "exception: %s" % str(self.excinfo.value)
-        else:
-            status = "result: %r" % (self.result,)
-        return "<CallInfo when=%r %s>" % (self.when, status)
-
-class BaseReport(object):
-    def __repr__(self):
-        l = ["%s=%s" %(key, value)
-           for key, value in self.__dict__.items()]
-        return "<%s %s>" %(self.__class__.__name__, " ".join(l),)
-
-    def toterminal(self, out):
-        longrepr = self.longrepr 
-        if hasattr(longrepr, 'toterminal'):
-            longrepr.toterminal(out)
-        else:
-            out.line(str(longrepr))
-   
-class ItemTestReport(BaseReport):
-    failed = passed = skipped = False
-
-    def __init__(self, item, excinfo=None, when=None):
-        self.item = item 
-        self.when = when
-        if item and when != "setup":
-            self.keywords = item.readkeywords() 
-        else:
-            # if we fail during setup it might mean 
-            # we are not able to access the underlying object
-            # this might e.g. happen if we are unpickled 
-            # and our parent collector did not collect us 
-            # (because it e.g. skipped for platform reasons)
-            self.keywords = {}  
-        if not excinfo:
-            self.passed = True
-            self.shortrepr = "." 
-        else:
-            if not isinstance(excinfo, py.code.ExceptionInfo):
-                self.failed = True
-                shortrepr = "?"
-                longrepr = excinfo 
-            elif excinfo.errisinstance(py.test.skip.Exception):
-                self.skipped = True 
-                shortrepr = "s"
-                longrepr = self.item._repr_failure_py(excinfo)
-            else:
-                self.failed = True
-                shortrepr = self.item.shortfailurerepr
-                if self.when == "call":
-                    longrepr = self.item.repr_failure(excinfo)
-                else: # exception in setup or teardown 
-                    longrepr = self.item._repr_failure_py(excinfo)
-                    shortrepr = shortrepr.lower()
-            self.shortrepr = shortrepr 
-            self.longrepr = longrepr 
-
-    def __repr__(self):
-        status = (self.passed and "passed" or 
-                  self.skipped and "skipped" or 
-                  self.failed and "failed" or 
-                  "CORRUPT")
-        l = [repr(self.item.name), "when=%r" % self.when, "outcome %r" % status,]
-        if hasattr(self, 'node'):
-            l.append("txnode=%s" % self.node.gateway.id)
-        info = " " .join(map(str, l))
-        return "<ItemTestReport %s>" % info 
-
-    def getnode(self):
-        return self.item 
-
-class CollectReport(BaseReport):
-    skipped = failed = passed = False 
-
-    def __init__(self, collector, result, excinfo=None):
-        self.collector = collector 
-        if not excinfo:
-            self.passed = True
-            self.result = result 
-        else:
-            style = "short"
-            if collector.config.getvalue("fulltrace"):
-                style = "long"
-            self.longrepr = self.collector._repr_failure_py(excinfo, 
-                style=style)
-            if excinfo.errisinstance(py.test.skip.Exception):
-                self.skipped = True
-                self.reason = str(excinfo.value)
-            else:
-                self.failed = True
-
-    def getnode(self):
-        return self.collector 
-
-class TeardownErrorReport(BaseReport):
-    skipped = passed = False 
-    failed = True
-    when = "teardown"
-    def __init__(self, excinfo):
-        self.longrepr = excinfo.getrepr(funcargs=True)
-
-class SetupState(object):
-    """ shared state for setting up/tearing down test items or collectors. """
-    def __init__(self):
-        self.stack = []
-        self._finalizers = {}
-
-    def addfinalizer(self, finalizer, colitem):
-        """ attach a finalizer to the given colitem. 
-        if colitem is None, this will add a finalizer that 
-        is called at the end of teardown_all(). 
-        """
-        assert hasattr(finalizer, '__call__')
-        #assert colitem in self.stack
-        self._finalizers.setdefault(colitem, []).append(finalizer)
-
-    def _pop_and_teardown(self):
-        colitem = self.stack.pop()
-        self._teardown_with_finalization(colitem)
-
-    def _callfinalizers(self, colitem):
-        finalizers = self._finalizers.pop(colitem, None)
-        while finalizers:
-            fin = finalizers.pop()
-            fin()
-
-    def _teardown_with_finalization(self, colitem): 
-        self._callfinalizers(colitem) 
-        if colitem: 
-            colitem.teardown()
-        for colitem in self._finalizers:
-            assert colitem is None or colitem in self.stack
-
-    def teardown_all(self): 
-        while self.stack: 
-            self._pop_and_teardown()
-        self._teardown_with_finalization(None)
-        assert not self._finalizers
-
-    def teardown_exact(self, item):
-        if self.stack and item == self.stack[-1]:
-            self._pop_and_teardown()
-        else:
-            self._callfinalizers(item)
-     
-    def prepare(self, colitem): 
-        """ setup objects along the collector chain to the test-method
-            and teardown previously setup objects."""
-        needed_collectors = colitem.listchain() 
-        while self.stack: 
-            if self.stack == needed_collectors[:len(self.stack)]: 
-                break 
-            self._pop_and_teardown()
-        # check if the last collection node has raised an error 
-        for col in self.stack:
-            if hasattr(col, '_prepare_exc'):
-                py.builtin._reraise(*col._prepare_exc) 
-        for col in needed_collectors[len(self.stack):]: 
-            self.stack.append(col) 
-            try:
-                col.setup() 
-            except Exception:
-                col._prepare_exc = sys.exc_info()
-                raise
-
-# =============================================================
-# Test OutcomeExceptions and helpers for creating them. 
-
-
-class OutcomeException(Exception): 
-    """ OutcomeException and its subclass instances indicate and 
-        contain info about test and collection outcomes. 
-    """ 
-    def __init__(self, msg=None, excinfo=None): 
-        self.msg = msg 
-        self.excinfo = excinfo
-
-    def __repr__(self):
-        if self.msg: 
-            return repr(self.msg) 
-        return "<%s instance>" %(self.__class__.__name__,)
-    __str__ = __repr__
-
-class Skipped(OutcomeException): 
-    # XXX hackish: on 3k we fake to live in the builtins 
-    # in order to have Skipped exception printing shorter/nicer
-    __module__ = 'builtins'
-
-class Failed(OutcomeException): 
-    """ raised from an explicit call to py.test.fail() """
-    __module__ = 'builtins'
-
-class XFailed(OutcomeException): 
-    """ raised from an explicit call to py.test.xfail() """
-    __module__ = 'builtins'
-
-class ExceptionFailure(Failed): 
-    """ raised by py.test.raises on an exception-assertion mismatch. """
-    def __init__(self, expr, expected, msg=None, excinfo=None): 
-        Failed.__init__(self, msg=msg, excinfo=excinfo) 
-        self.expr = expr 
-        self.expected = expected
-
-class Exit(KeyboardInterrupt):
-    """ raised by py.test.exit for immediate program exits without tracebacks and reporter/summary. """
-    def __init__(self, msg="unknown reason"):
-        self.msg = msg 
-        KeyboardInterrupt.__init__(self, msg)
-
-# exposed helper methods 
-
-def exit(msg): 
-    """ exit testing process as if KeyboardInterrupt was triggered. """ 
-    __tracebackhide__ = True
-    raise Exit(msg)
-
-exit.Exception = Exit
-
-def skip(msg=""):
-    """ skip an executing test with the given message.  Note: it's usually
-    better use the py.test.mark.skipif marker to declare a test to be
-    skipped under certain conditions like mismatching platforms or 
-    dependencies.  See the pytest_skipping plugin for details. 
-    """
-    __tracebackhide__ = True
-    raise Skipped(msg=msg) 
-
-skip.Exception = Skipped
-
-def fail(msg=""):
-    """ explicitely fail an currently-executing test with the given Message. """
-    __tracebackhide__ = True
-    raise Failed(msg=msg) 
-
-fail.Exception = Failed
-
-def xfail(reason=""):
-    """ xfail an executing test or setup functions, taking an optional 
-    reason string.
-    """
-    __tracebackhide__ = True
-    raise XFailed(reason)
-xfail.Exception = XFailed
-
-def raises(ExpectedException, *args, **kwargs):
-    """ if args[0] is callable: raise AssertionError if calling it with 
-        the remaining arguments does not raise the expected exception.  
-        if args[0] is a string: raise AssertionError if executing the
-        the string in the calling scope does not raise expected exception. 
-        for examples:
-        x = 5
-        raises(TypeError, lambda x: x + 'hello', x=x)
-        raises(TypeError, "x + 'hello'")
-    """
-    __tracebackhide__ = True 
-    assert args
-    if isinstance(args[0], str):
-        code, = args
-        assert isinstance(code, str)
-        frame = sys._getframe(1)
-        loc = frame.f_locals.copy()
-        loc.update(kwargs)
-        #print "raises frame scope: %r" % frame.f_locals
-        try:
-            code = py.code.Source(code).compile()
-            py.builtin.exec_(code, frame.f_globals, loc)
-            # XXX didn'T mean f_globals == f_locals something special?
-            #     this is destroyed here ...
-        except ExpectedException:
-            return py.code.ExceptionInfo()
-    else:
-        func = args[0]
-        try:
-            func(*args[1:], **kwargs)
-        except ExpectedException:
-            return py.code.ExceptionInfo()
-        k = ", ".join(["%s=%r" % x for x in kwargs.items()])
-        if k:
-            k = ', ' + k
-        expr = '%s(%r%s)' %(getattr(func, '__name__', func), args, k)
-    raise ExceptionFailure(msg="DID NOT RAISE", 
-                           expr=args, expected=ExpectedException) 
-
-raises.Exception = ExceptionFailure
-
-def importorskip(modname, minversion=None):
-    """ return imported module if it has a higher __version__ than the 
-    optionally specified 'minversion' - otherwise call py.test.skip() 
-    with a message detailing the mismatch. 
-    """
-    compile(modname, '', 'eval') # to catch syntaxerrors
-    try:
-        mod = __import__(modname, None, None, ['__doc__'])
-    except ImportError:
-        py.test.skip("could not import %r" %(modname,))
-    if minversion is None:
-        return mod
-    verattr = getattr(mod, '__version__', None)
-    if isinstance(minversion, str):
-        minver = minversion.split(".")
-    else:
-        minver = list(minversion)
-    if verattr is None or verattr.split(".") < minver:
-        py.test.skip("module %r has __version__ %r, required is: %r" %(
-                     modname, verattr, minversion))
-    return mod
-

diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.txt
deleted file mode 100644
--- a/pypy/doc/coding-guide.txt
+++ /dev/null
@@ -1,1088 +0,0 @@
-=====================================
-PyPy - Coding Guide
-=====================================
-
-.. contents::
-.. sectnum::
-
-
-This document describes coding requirements and conventions for
-working with the PyPy code base.  Please read it carefully and
-ask back any questions you might have. The document does not talk
-very much about coding style issues. We mostly follow `PEP 8`_ though.
-If in doubt, follow the style that is already present in the code base.
-
-.. _`PEP 8`: http://www.python.org/dev/peps/pep-0008/
-
-.. _`RPython`:
-
-Overview and motivation
-========================
-
-We are writing a Python interpreter in Python, using Python's well known
-ability to step behind the algorithmic problems as a language. At first glance,
-one might think this achieves nothing but a better understanding how the
-interpreter works.  This alone would make it worth doing, but we have much
-larger goals.
-
-
-CPython vs. PyPy
--------------------
-
-Compared to the CPython implementation, Python takes the role of the C
-Code. We rewrite the CPython interpreter in Python itself.  We could
-also aim at writing a more flexible interpreter at C level but we
-want to use Python to give an alternative description of the interpreter.
-
-The clear advantage is that such a description is shorter and simpler to
-read, and many implementation details vanish. The drawback of this approach is
-that this interpreter will be unbearably slow as long as it is run on top
-of CPython.
-
-To get to a useful interpreter again, we need to translate our
-high-level description of Python to a lower level one.  One rather
-straight-forward way is to do a whole program analysis of the PyPy
-interpreter and create a C source, again. There are many other ways,
-but let's stick with this somewhat canonical approach.
-
-
-.. _`application-level`:
-.. _`interpreter-level`:
-
-Application-level and interpreter-level execution and objects
--------------------------------------------------------------
-
-Since Python is used for implementing all of our code base, there is a
-crucial distinction to be aware of: that between *interpreter-level* objects and 
-*application-level* objects.  The latter are the ones that you deal with
-when you write normal python programs.  Interpreter-level code, however,
-cannot invoke operations nor access attributes from application-level
-objects.  You will immediately recognize any interpreter level code in
-PyPy, because half the variable and object names start with a ``w_``, which
-indicates that they are `wrapped`_ application-level values. 
-
-Let's show the difference with a simple example.  To sum the contents of
-two variables ``a`` and ``b``, one would write the simple application-level
-``a+b`` -- in contrast, the equivalent interpreter-level code is
-``space.add(w_a, w_b)``, where ``space`` is an instance of an object space,
-and ``w_a`` and ``w_b`` are typical names for the wrapped versions of the
-two variables.
-
-It helps to remember how CPython deals with the same issue: interpreter
-level code, in CPython, is written in C and thus typical code for the
-addition is ``PyNumber_Add(p_a, p_b)`` where ``p_a`` and ``p_b`` are C
-variables of type ``PyObject*``. This is conceptually similar to how we write
-our interpreter-level code in Python.
-
-Moreover, in PyPy we have to make a sharp distinction between
-interpreter- and application-level *exceptions*: application exceptions
-are always contained inside an instance of ``OperationError``.  This
-makes it easy to distinguish failures (or bugs) in our interpreter-level code
-from failures appearing in a python application level program that we are
-interpreting.
-
-
-.. _`app-preferable`: 
-
-Application level is often preferable 
--------------------------------------
-
-Application-level code is substantially higher-level, and therefore
-correspondingly easier to write and debug.  For example, suppose we want
-to implement the ``update`` method of dict objects.  Programming at
-application level, we can write an obvious, simple implementation, one
-that looks like an **executable definition** of ``update``, for
-example::
-
-    def update(self, other):
-        for k in other.keys():
-            self[k] = other[k]
-
-If we had to code only at interpreter level, we would have to code
-something much lower-level and involved, say something like::
-
-    def update(space, w_self, w_other):
-        w_keys = space.call_method(w_other, 'keys')
-        w_iter = space.iter(w_keys)
-        while True:
-            try:
-                w_key = space.next(w_iter)
-            except OperationError, e:
-                if not e.match(space, space.w_StopIteration):
-                    raise       # re-raise other app-level exceptions
-                break
-            w_value = space.getitem(w_other, w_key)
-            space.setitem(w_self, w_key, w_value)
-
-This interpreter-level implementation looks much more similar to the C
-source code.  It is still more readable than its C counterpart because 
-it doesn't contain memory management details and can use Python's native 
-exception mechanism. 
-
-In any case, it should be obvious that the application-level implementation 
-is definitely more readable, more elegant and more maintainable than the
-interpreter-level one (and indeed, dict.update is really implemented at
-applevel in PyPy).
-
-In fact, in almost all parts of PyPy, you find application level code in
-the middle of interpreter-level code.  Apart from some bootstrapping
-problems (application level functions need a certain initialization
-level of the object space before they can be executed), application
-level code is usually preferable.  We have an abstraction (called the
-'Gateway') which allows the caller of a function to remain ignorant of
-whether a particular function is implemented at application or
-interpreter level. 
-
-our runtime interpreter is "restricted python"
-----------------------------------------------
-
-In order to make a C code generator feasible all code on interpreter level has
-to restrict itself to a subset of the Python language, and we adhere to some
-rules which make translation to lower level languages feasible. Code on
-application level can still use the full expressivity of Python.
-
-Unlike source-to-source translations (like e.g. Starkiller_ or more recently
-ShedSkin_) we start
-translation from live python code objects which constitute our Python
-interpreter.   When doing its work of interpreting bytecode our Python
-implementation must behave in a static way often referenced as
-"RPythonic".
-
-.. _Starkiller: http://www.python.org/pycon/dc2004/papers/1/paper.pdf
-.. _ShedSkin: http://shed-skin.blogspot.com/
-
-However, when the PyPy interpreter is started as a Python program, it
-can use all of the Python language until it reaches a certain point in
-time, from which on everything that is being executed must be static.
-That is, during initialization our program is free to use the
-full dynamism of Python, including dynamic code generation.
-
-An example can be found in the current implementation which is quite
-elegant: For the definition of all the opcodes of the Python
-interpreter, the module ``dis`` is imported and used to initialize our
-bytecode interpreter.  (See ``__initclass__`` in
-`pypy/interpreter/pyopcode.py`_).  This
-saves us from adding extra modules to PyPy. The import code is run at
-startup time, and we are allowed to use the CPython builtin import
-function.
-
-After the startup code is finished, all resulting objects, functions,
-code blocks etc. must adhere to certain runtime restrictions which we
-describe further below.  Here is some background for why this is so:
-during translation, a whole program analysis ("type inference") is
-performed, which makes use of the restrictions defined in RPython. This
-enables the code generator to emit efficient machine level replacements
-for pure integer objects, for instance.
-
-Restricted Python
-=================
-
-RPython Definition, not
------------------------
-
-The list and exact details of the "RPython" restrictions are a somewhat
-evolving topic.  In particular, we have no formal language definition
-as we find it more practical to discuss and evolve the set of
-restrictions while working on the whole program analysis.  If you
-have any questions about the restrictions below then please feel
-free to mail us at pypy-dev at codespeak net.
-
-.. _`wrapped object`: coding-guide.html#wrapping-rules
-
-Flow restrictions
--------------------------
-
-**variables**
-
-  variables should contain values of at most one type as described in
-  `Object restrictions`_ at each control flow point, that means for
-  example that joining control paths using the same variable to
-  contain both a string and a int must be avoided.  It is allowed to
-  mix None (basically with the role of a null pointer) with many other
-  types: `wrapped objects`, class instances, lists, dicts, strings, etc.
-  but *not* with int and floats.
-
-**constants**
-
-  all module globals are considered constants.  Their binding must not
-  be changed at run-time.  Moreover, global (i.e. prebuilt) lists and
-  dictionaries are supposed to be immutable: modifying e.g. a global
-  list will give inconsistent results.  However, global instances don't
-  have this restriction, so if you need mutable global state, store it
-  in the attributes of some prebuilt singleton instance.
-
-**control structures**
-
-  all allowed but yield, ``for`` loops restricted to builtin types
-
-**range**
-
-  ``range`` and ``xrange`` are identical. ``range`` does not necessarily create an array,
-  only if the result is modified. It is allowed everywhere and completely
-  implemented. The only visible difference to CPython is the inaccessibility
-  of the ``xrange`` fields start, stop and step.
-
-**definitions**
-
-  run-time definition of classes or functions is not allowed.
-
-**generators**
-
-  generators are not supported.
-
-**exceptions**
-
-+ fully supported
-+ see below `Exception rules`_ for restrictions on exceptions raised by built-in operations
-
-
-Object restrictions
--------------------------
-
-We are using
-
-**integer, float, boolean**
-
-  works.
-
-**strings**
-
-  a lot of, but not all string methods are supported.  Indexes can be
-  negative.  In case they are not, then you get slightly more efficient
-  code if the translator can prove that they are non-negative.  When
-  slicing a string it is necessary to prove that the slice start and
-  stop indexes are non-negative.
-
-**tuples**
-
-  no variable-length tuples; use them to store or return pairs or n-tuples of
-  values. Each combination of types for elements and length constitute a separate
-  and not mixable type.
-
-**lists**
-
-  lists are used as an allocated array.  Lists are over-allocated, so list.append()
-  is reasonably fast.  Negative or out-of-bound indexes are only allowed for the
-  most common operations, as follows:
-
-  - *indexing*:
-    positive and negative indexes are allowed. Indexes are checked when requested
-    by an IndexError exception clause.
-  
-  - *slicing*:
-    the slice start must be within bounds. The stop doesn't need to, but it must
-    not be smaller than the start.  All negative indexes are disallowed, except for
-    the [:-1] special case.  No step.
-
-  - *other operators*:
-    ``+``, ``+=``, ``in``, ``*``, ``*=``, ``==``, ``!=`` work as expected.
-
-  - *methods*:
-    append, index, insert, extend, reverse, pop.  The index used in pop() follows
-    the same rules as for *indexing* above.  The index used in insert() must be within
-    bounds and not negative.
-
-**dicts**
-
-  dicts with a unique key type only, provided it is hashable. 
-  String keys have been the only allowed key types for a while, but this was generalized. 
-  After some re-optimization,
-  the implementation could safely decide that all string dict keys should be interned.
-
-
-**list comprehensions**
-
-  may be used to create allocated, initialized arrays.
-  After list over-allocation was introduced, there is no longer any restriction.
-
-**functions**
-
-+ statically called functions may use defaults and a variable number of
-  arguments (which may be passed as a list instead of a tuple, so write code
-  that does not depend on it being a tuple).
-
-+ dynamic dispatch enforces the use of signatures that are equal for all
-  possible called function, or at least "compatible enough".  This
-  concerns mainly method calls, when the method is overridden or in any
-  way given different definitions in different classes.  It also concerns
-  the less common case of explicitly manipulated function objects.
-  Describing the exact compatibility rules is rather involved (but if you
-  break them, you should get explicit errors from the rtyper and not
-  obscure crashes.)
-
-**builtin functions**
-
-  A number of builtin functions can be used.  The precise set can be
-  found in `pypy/annotation/builtin.py`_ (see ``def builtin_xxx()``).
-  Some builtin functions may be limited in what they support, though.
-
-  ``int, float, str, ord, chr``... are available as simple conversion
-  functions.  Note that ``int, float, str``... have a special meaning as
-  a type inside of isinstance only.
-
-**classes**
-
-+ methods and other class attributes do not change after startup
-+ single inheritance is fully supported
-+ simple mixins work too, but the mixed in class needs a ``_mixin_ = True``
-  class attribute
-
-+ classes are first-class objects too
-
-**objects**
-
-  in PyPy, wrapped objects are borrowed from the object space. Just like
-  in CPython, code that needs e.g. a dictionary can use a wrapped dict
-  and the object space operations on it.
-
-This layout makes the number of types to take care about quite limited.
-
-
-Integer Types
--------------------------
-
-While implementing the integer type, we stumbled over the problem that
-integers are quite in flux in CPython right now. Starting on Python 2.2,
-integers mutate into longs on overflow.  However, shifting to the left
-truncates up to 2.3 but extends to longs as well in 2.4.  By contrast, we need
-a way to perform wrap-around machine-sized arithmetic by default, while still
-being able to check for overflow when we need it explicitly.  Moreover, we need
-a consistent behavior before and after translation.
-
-We use normal integers for signed arithmetic.  It means that before
-translation we get longs in case of overflow, and after translation we get a
-silent wrap-around.  Whenever we need more control, we use the following
-helpers (which live the `pypy/rlib/rarithmetic.py`_):
-
-.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py
-
-
-**ovfcheck()**
-
-  This special function should only be used with a single arithmetic operation
-  as its argument, e.g. ``z = ovfcheck(x+y)``.  Its intended meaning is to
-  perform the given operation in overflow-checking mode.
-
-  At run-time, in Python, the ovfcheck() function itself checks the result
-  and raises OverflowError if it is a ``long``.  But the code generators use
-  ovfcheck() as a hint: they replace the whole ``ovfcheck(x+y)`` expression
-  with a single overflow-checking addition in C.
-
-**ovfcheck_lshift()**
-
-  ovfcheck_lshift(x, y) is a workaround for ovfcheck(x<<y), because the
-  latter doesn't quite work in Python prior to 2.4, where the expression
-  ``x<<y`` will never return a long if the input arguments are ints.  There is
-  a specific function ovfcheck_lshift() to use instead of some convoluted
-  expression like ``x*2**y`` so that code generators can still recognize it as
-  a single simple operation.
-
-**intmask()**
-
-  This function is used for wrap-around arithmetic.  It returns the lower bits
-  of its argument, masking away anything that doesn't fit in a C "signed long int".
-  Its purpose is, in Python, to convert from a Python ``long`` that resulted from a
-  previous operation back to a Python ``int``.  The code generators ignore
-  intmask() entirely, as they are doing wrap-around signed arithmetic all the time
-  by default anyway.  (We have no equivalent of the "int" versus "long int"
-  distinction of C at the moment and assume "long ints" everywhere.)
-
-**r_uint**
-
-  In a few cases (e.g. hash table manipulation), we need machine-sized unsigned
-  arithmetic.  For these cases there is the r_uint class, which is a pure
-  Python implementation of word-sized unsigned integers that silently wrap
-  around.  The purpose of this class (as opposed to helper functions as above)
-  is consistent typing: both Python and the annotator will propagate r_uint
-  instances in the program and interpret all the operations between them as
-  unsigned.  Instances of r_uint are special-cased by the code generators to
-  use the appropriate low-level type and operations.
-  Mixing of (signed) integers and r_uint in operations produces r_uint that
-  means unsigned results.  To convert back from r_uint to signed integers, use
-  intmask().
-
-
-Exception rules
----------------------
-
-Exceptions are by default not generated for simple cases.::
-
-    #!/usr/bin/python
-
-        lst = [1,2,3,4,5]
-        item = lst[i]    # this code is not checked for out-of-bound access
-
-        try:
-            item = lst[i]
-        except IndexError:
-            # complain
-
-Code with no exception handlers does not raise exceptions (after it has been
-translated, that is.  When you run it on top of CPython, it may raise
-exceptions, of course). By supplying an exception handler, you ask for error
-checking. Without, you assure the system that the operation cannot fail.
-This rule does not apply to *function calls*: any called function is
-assumed to be allowed to raise any exception.
-
-For example::
-
-    x = 5.1
-    x = x + 1.2       # not checked for float overflow
-    try:
-        x = x + 1.2
-    except OverflowError:
-        # float result too big
-
-But::
-
-    z = some_function(x, y)    # can raise any exception
-    try:
-        z = some_other_function(x, y)
-    except IndexError:
-        # only catches explicitly-raised IndexErrors in some_other_function()
-        # other exceptions can be raised, too, and will not be caught here.
-
-The ovfcheck() function described above follows the same rule: in case of
-overflow, it explicitly raise OverflowError, which can be caught anywhere.
-
-Exceptions explicitly raised or re-raised will always be generated.
-
-PyPy is debuggable on top of CPython
-------------------------------------
-
-PyPy has the advantage that it is runnable on standard
-CPython.  That means, we can run all of PyPy with all exception
-handling enabled, so we might catch cases where we failed to
-adhere to our implicit assertions.
-
-.. _`wrapping rules`:
-.. _`wrapped`:
-
-
-RPylint
--------
-
-Pylint_ is a static code checker for Python. Recent versions
-(>=0.13.0) can be run with the ``--rpython-mode`` command line option. This option
-enables the RPython checker which will checks for some of the
-restrictions RPython adds on standard Python code (and uses a 
-more aggressive type inference than the one used by default by
-pylint). The full list of checks is available in the documentation of
-Pylint. 
-
-RPylint can be a nice tool to get some information about how much work
-will be needed to convert a piece of Python code to RPython, or to get
-started with RPython.  While this tool will not guarantee that the
-code it checks will be translate successfully, it offers a few nice
-advantages over running a translation:
-
-* it is faster and therefore provides feedback faster than  ``translate.py``
-
-* it does not stop at the first problem it finds, so you can get more
-  feedback on the code in one run
-
-* the messages tend to be a bit less cryptic 
-
-* you can easily run it from emacs, vi, eclipse or visual studio.
-
-Note: if pylint is not prepackaged for your OS/distribution, or if
-only an older version is available, you will need to install from
-source. In that case, there are a couple of dependencies,
-logilab-common_ and astng_ that you will need to install too before
-you can use the tool. 
-
-.. _Pylint: http://www.logilab.org/projects/pylint
-.. _logilab-common: http://www.logilab.org/projects/common
-.. _astng: http://www.logilab.org/projects/astng
-
-
-
-Wrapping rules
-==============
-
-Wrapping
---------- 
-
-PyPy is made of Python source code at two levels: there is on the one hand
-*application-level code* that looks like normal Python code, and that
-implements some functionalities as one would expect from Python code (e.g. one
-can give a pure Python implementation of some built-in functions like
-``zip()``).  There is also *interpreter-level code* for the functionalities
-that must more directly manipulate interpreter data and objects (e.g. the main
-loop of the interpreter, and the various object spaces).
-
-Application-level code doesn't see object spaces explicitly: it runs using an
-object space to support the objects it manipulates, but this is implicit.
-There is no need for particular conventions for application-level code.  The
-sequel is only about interpreter-level code.  (Ideally, no application-level
-variable should be called ``space`` or ``w_xxx`` to avoid confusion.)
-
-The ``w_`` prefixes so lavishly used in the example above indicate,
-by PyPy coding convention, that we are dealing with *wrapped* (or *boxed*) objects,
-that is, interpreter-level objects which the object space constructs
-to implement corresponding application-level objects.  Each object
-space supplies ``wrap``, ``unwrap``, ``int_w``, ``interpclass_w``,
-etc. operations that move between the two levels for objects of simple
-built-in types; each object space also implements other Python types
-with suitable interpreter-level classes with some amount of internal
-structure.
-
-For example, an application-level Python ``list``
-is implemented by the `standard object space`_ as an
-instance of ``W_ListObject``, which has an instance attribute
-``wrappeditems`` (an interpreter-level list which contains the
-application-level list's items as wrapped objects).
-
-The rules are described in more details below.
-
-
-Naming conventions
-------------------
-
-* ``space``: the object space is only visible at
-  interpreter-level code, where it is by convention passed around by the name
-  ``space``.
-
-* ``w_xxx``: any object seen by application-level code is an
-  object explicitly managed by the object space.  From the
-  interpreter-level point of view, this is called a *wrapped*
-  object.  The ``w_`` prefix is used for any type of
-  application-level object.
-
-* ``xxx_w``: an interpreter-level container for wrapped
-  objects, for example a list or a dict containing wrapped
-  objects.  Not to be confused with a wrapped object that
-  would be a list or a dict: these are normal wrapped objects,
-  so they use the ``w_`` prefix.
-
-
-Operations on ``w_xxx``
------------------------
-
-The core bytecode interpreter considers wrapped objects as black boxes.
-It is not allowed to inspect them directly.  The allowed
-operations are all implemented on the object space: they are
-called ``space.xxx()``, where ``xxx`` is a standard operation
-name (``add``, ``getattr``, ``call``, ``eq``...). They are documented in the
-`object space document`_.
-
-A short warning: **don't do** ``w_x == w_y`` or ``w_x is w_y``!
-rationale for this rule is that there is no reason that two
-wrappers are related in any way even if they contain what
-looks like the same object at application-level.  To check
-for equality, use ``space.is_true(space.eq(w_x, w_y))`` or
-even better the short-cut ``space.eq_w(w_x, w_y)`` returning
-directly a interpreter-level bool.  To check for identity,
-use ``space.is_true(space.is_(w_x, w_y))`` or better
-``space.is_w(w_x, w_y)``.
-
-.. _`object space document`: objspace.html#interface
-
-.. _`applevel-exceptions`: 
-
-Application-level exceptions
-----------------------------
-
-Interpreter-level code can use exceptions freely.  However,
-all application-level exceptions are represented as an
-``OperationError`` at interpreter-level.  In other words, all
-exceptions that are potentially visible at application-level
-are internally an ``OperationError``.  This is the case of all
-errors reported by the object space operations
-(``space.add()`` etc.).
-
-To raise an application-level exception::
-
-    raise OperationError(space.w_XxxError, space.wrap("message"))
-
-To catch a specific application-level exception::
-
-    try:
-        ...
-    except OperationError, e:
-        if not e.match(space, space.w_XxxError):
-            raise
-        ...
-
-This construct catches all application-level exceptions, so we
-have to match it against the particular ``w_XxxError`` we are
-interested in and re-raise other exceptions.  The exception
-instance ``e`` holds two attributes that you can inspect:
-``e.w_type`` and ``e.w_value``.  Do not use ``e.w_type`` to
-match an exception, as this will miss exceptions that are
-instances of subclasses.
-
-We are thinking about replacing ``OperationError`` with a
-family of common exception classes (e.g. ``AppKeyError``,
-``AppIndexError``...) so that we can more easily catch them.
-The generic ``AppError`` would stand for all other
-application-level classes.
-
-
-.. _`modules`:
-
-Modules in PyPy
-===============
-
-Modules visible from application programs are imported from
-interpreter or application level files.  PyPy reuses almost all python
-modules of CPython's standard library, currently from version 2.5.2.  We
-sometimes need to `modify modules`_ and - more often - regression tests
-because they rely on implementation details of CPython.
-
-If we don't just modify an original CPython module but need to rewrite
-it from scratch we put it into `lib_pypy/`_ as a pure application level
-module.
-
-When we need access to interpreter-level objects we put the module into
-`pypy/module`_.  Such modules use a `mixed module mechanism`_
-which makes it convenient to use both interpreter- and application-level parts
-for the implementation.  Note that there is no extra facility for
-pure-interpreter level modules, you just write a mixed module and leave the
-application-level part empty.
-
-Determining the location of a module implementation
----------------------------------------------------
-
-You can interactively find out where a module comes from, when running py.py.
-here are examples for the possible locations::
-
-    >>>> import sys
-    >>>> sys.__file__
-    '/home/hpk/pypy-dist/pypy/module/sys/*.py'
-
-    >>>> import operator
-    >>>> operator.__file__
-    '/home/hpk/pypy-dist/lib_pypy/operator.py'
-
-    >>>> import opcode
-    >>>> opcode.__file__
-    '/home/hpk/pypy-dist/lib-python/modified-2.5.2/opcode.py'
-
-    >>>> import os
-    faking <type 'posix.stat_result'>
-    faking <type 'posix.statvfs_result'>
-    >>>> os.__file__
-    '/home/hpk/pypy-dist/lib-python/2.5.2/os.py'
-    >>>>
-
-Module directories / Import order
----------------------------------
-
-Here is the order in which PyPy looks up Python modules:
-
-*pypy/modules*
-
-    mixed interpreter/app-level builtin modules, such as
-    the ``sys`` and ``__builtin__`` module.
-
-*contents of PYTHONPATH*
-
-    lookup application level modules in each of the ``:`` separated
-    list of directories, specified in the ``PYTHONPATH`` environment
-    variable.
-
-*lib_pypy/*
-
-    contains pure Python reimplementation of modules.
-
-*lib-python/modified-2.5.2/*
-
-    The files and tests that we have modified from the CPython library.
-
-*lib-python/2.5.2/*
-
-    The unmodified CPython library. **Never ever check anything in there**.
-
-.. _`modify modules`:
-
-Modifying a CPython library module or regression test
--------------------------------------------------------
-
-Although PyPy is very compatible with CPython we sometimes need
-to change modules contained in our copy of the standard library,
-often due to the fact that PyPy works with all new-style classes
-by default and CPython has a number of places where it relies
-on some classes being old-style.
-
-If you want to change a module or test contained in ``lib-python/2.5.2``
-then make sure that you copy the file to our ``lib-python/modified-2.5.2``
-directory first.  In subversion commandline terms this reads::
-
-    svn cp lib-python/2.5.2/somemodule.py lib-python/modified-2.5.2/
-
-and subsequently you edit and commit
-``lib-python/modified-2.5.2/somemodule.py``.  This copying operation is
-important because it keeps the original CPython tree clean and makes it
-obvious what we had to change.
-
-.. _`mixed module mechanism`:
-.. _`mixed modules`:
-
-Implementing a mixed interpreter/application level Module
----------------------------------------------------------
-
-If a module needs to access PyPy's interpreter level
-then it is implemented as a mixed module.
-
-Mixed modules are directories in `pypy/module`_ with an  `__init__.py`
-file containing specifications where each name in a module comes from.
-Only specified names will be exported to a Mixed Module's applevel
-namespace.
-
-Sometimes it is necessary to really write some functions in C (or
-whatever target language). See `rffi`_ and `external functions
-documentation`_ for details. The latter approach is cumbersome and
-being phased out and former has currently quite a few rough edges.
-
-.. _`rffi`: rffi.html
-.. _`external functions documentation`: translation.html#extfunccalls
-
-application level definitions
-.............................
-
-Application level specifications are found in the `appleveldefs`
-dictionary found in ``__init__.py`` files of directories in ``pypy/module``.
-For example, in `pypy/module/__builtin__/__init__.py`_ you find the following
-entry specifying where ``__builtin__.locals`` comes from::
-
-     ...
-     'locals'        : 'app_inspect.locals',
-     ...
-
-The ``app_`` prefix indicates that the submodule ``app_inspect`` is
-interpreted at application level and the wrapped function value for ``locals``
-will be extracted accordingly.
-
-interpreter level definitions
-.............................
-
-Interpreter level specifications are found in the ``interpleveldefs``
-dictionary found in ``__init__.py`` files of directories in ``pypy/module``.
-For example, in `pypy/module/__builtin__/__init__.py`_ the following
-entry specifies where ``__builtin__.len`` comes from::
-
-     ...
-     'len'       : 'operation.len',
-     ...
-
-The ``operation`` submodule lives at interpreter level and ``len``
-is expected to be exposable to application level.  Here is
-the definition for ``operation.len()``::
-
-    def len(space, w_obj):
-        "len(object) -> integer\n\nReturn the number of items of a sequence or mapping."
-        return space.len(w_obj)
-
-Exposed interpreter level functions usually take a ``space`` argument
-and some wrapped values (see `wrapping rules`_) .
-
-You can also use a convenient shortcut in ``interpleveldefs`` dictionaries:
-namely an expression in parentheses to specify an interpreter level
-expression directly (instead of pulling it indirectly from a file)::
-
-    ...
-    'None'          : '(space.w_None)',
-    'False'         : '(space.w_False)',
-    ...
-
-The interpreter level expression has a ``space`` binding when
-it is executed.
-
-Adding an entry under pypy/module (e.g. mymodule) entails automatic
-creation of a new config option (such as --withmod-mymodule and
---withoutmod-mymodule (the later being the default)) for py.py and
-translate.py.
-
-Testing modules in ``lib_pypy/``
---------------------------------
-
-You can go to the `lib_pypy/pypy_test/`_ directory and invoke the testing tool
-("py.test" or "python ../../pypy/test_all.py") to run tests against the
-lib_pypy hierarchy.  Note, that tests in `lib_pypy/pypy_test/`_ are allowed
-and encouraged to let their tests run at interpreter level although
-`lib_pypy/`_ modules eventually live at PyPy's application level.
-This allows us to quickly test our python-coded reimplementations
-against CPython.
-
-Testing modules in ``pypy/module``
-----------------------------------
-
-Simply change to ``pypy/module`` or to a subdirectory and `run the
-tests as usual`_.
-
-
-Testing modules in ``lib-python``
------------------------------------
-
-In order to let CPython's regression tests run against PyPy
-you can switch to the `lib-python/`_ directory and run
-the testing tool in order to start compliance tests.
-(XXX check windows compatibility for producing test reports).
-
-Naming conventions and directory layout
-===========================================
-
-Directory and File Naming
--------------------------
-
-- directories/modules/namespaces are always **lowercase**
-
-- never use plural names in directory and file names
-
-- ``__init__.py`` is usually empty except for
-  ``pypy/objspace/*`` and ``pypy/module/*/__init__.py``.
-
-- don't use more than 4 directory nesting levels
-
-- keep filenames concise and completion-friendly.
-
-Naming of python objects
-------------------------
-
-- class names are **CamelCase**
-
-- functions/methods are lowercase and ``_`` separated
-
-- objectspace classes are spelled ``XyzObjSpace``. e.g.
-
-  - StdObjSpace
-  - FlowObjSpace
-
-- at interpreter level and in ObjSpace all boxed values
-  have a leading ``w_`` to indicate "wrapped values".  This
-  includes w_self.  Don't use ``w_`` in application level
-  python only code.
-
-Committing & Branching to the repository
------------------------------------------------------
-
-- write good log messages because several people
-  are reading the diffs.
-
-- if you add (text/py) files to the repository then please run
-  pypy/tool/fixeol in that directory.  This will make sure
-  that the property 'svn:eol-style' is set to native which
-  allows checkin/checkout in native line-ending format.
-
-- branching (aka "svn copy") of source code should usually
-  happen at ``svn/pypy/trunk`` level in order to have a full
-  self-contained pypy checkout for each branch.   For branching
-  a ``try1`` branch you would for example do::
-
-    svn cp http://codespeak.net/svn/pypy/trunk \
-           http://codespeak.net/svn/pypy/branch/try1
-
-  This allows to checkout the ``try1`` branch and receive a
-  self-contained working-copy for the branch.   Note that
-  branching/copying is a cheap operation with subversion, as it
-  takes constant time irrespective of the size of the tree.
-
-- To learn more about how to use subversion read `this document`_.
-
-.. _`this document`: svn-help.html
-
-
-
-.. _`using development tracker`:
-
-Using the development bug/feature tracker
-=========================================
-
-We have a `development tracker`_, based on Richard Jones'
-`roundup`_ application.  You can file bugs,
-feature requests or see what's going on
-for the next milestone, both from an E-Mail and from a
-web interface.
-
-use your codespeak login or register
-------------------------------------
-
-If you already committed to the PyPy source code, chances
-are that you can simply use your codespeak login that
-you use for subversion or for shell access.
-
-If you are not a commiter then you can still `register with
-the tracker`_ easily.
-
-modifying Issues from svn commit messages
------------------------------------------
-
-If you are committing something related to
-an issue in the development tracker you
-can correlate your login message to a tracker
-item by following these rules:
-
-- put the content of ``issueN STATUS`` on a single
-  new line
-
-- `N` must be an existing issue number from the `development tracker`_.
-
-- STATUS is one of::
-
-    unread
-    chatting
-    in-progress
-    testing
-    duplicate
-    resolved
-
-.. _`register with the tracker`: https://codespeak.net/issue/pypy-dev/user?@template=register
-.. _`development tracker`: http://codespeak.net/issue/pypy-dev/
-.. _`roundup`: http://roundup.sf.net
-
-
-.. _`testing in PyPy`:
-.. _`test-design`: 
-
-Testing in PyPy
-===============
-
-Our tests are based on the new `py.test`_ tool which lets you write
-unittests without boilerplate.  All tests of modules
-in a directory usually reside in a subdirectory **test**.  There are
-basically two types of unit tests:
-
-- **Interpreter Level tests**. They run at the same level as PyPy's
-  interpreter.
-
-- **Application Level tests**. They run at application level which means
-  that they look like straight python code but they are interpreted by PyPy.
-
-Both types of tests need an `objectspace`_ they can run with (the interpreter
-dispatches operations on objects to an objectspace).  If you run a test you
-can usually give the '-o' switch to select an object space.  E.g. '-o thunk'
-will select the thunk object space. The default is the `Standard Object Space`_
-which aims to implement unmodified Python semantics.
-
-.. _`standard object space`: objspace.html#standard-object-space
-.. _`objectspace`: objspace.html
-.. _`py.test`: http://codespeak.net/py/current/doc/test.html
-
-Interpreter level tests
------------------------
-
-You can write test functions and methods like this::
-
-    def test_something(space):
-        # use space ...
-
-    class TestSomething:
-        def test_some(self):
-            # use 'self.space' here
-
-Note that the prefix `test` for test functions and `Test` for test
-classes is mandatory.  In both cases you can import Python modules at
-module global level and use plain 'assert' statements thanks to the
-usage of the `py.test`_ tool.
-
-Application Level tests
------------------------
-
-For testing the conformance and well-behavedness of PyPy it
-is often sufficient to write "normal" application-level
-Python code that doesn't need to be aware of any particular
-coding style or restrictions.  If we have a choice we often
-use application level tests which usually look like this::
-
-    def app_test_something():
-        # application level test code
-
-    class AppTestSomething:
-        def test_this(self):
-            # application level test code
-
-These application level test functions will run on top
-of PyPy, i.e. they have no access to interpreter details.
-You cannot use imported modules from global level because
-they are imported at interpreter-level while you test code
-runs at application level. If you need to use modules
-you have to import them within the test function.
-
-Another possibility to pass in data into the AppTest is to use
-the ``setup_class`` method of the AppTest. All wrapped objects that are
-attached to the class there and start with ``w_`` can be accessed
-via self (but without the ``w_``) in the actual test method. An example::
-
-    from pypy.objspace.std import StdObjSpace 
-
-    class AppTestErrno: 
-        def setup_class(cls): 
-            cls.space = StdObjSpace()
-            cls.w_d = cls.space.wrap({"a": 1, "b", 2})
-
-        def test_dict(self):
-            assert self.d["a"] == 1
-            assert self.d["b"] == 2
-
-.. _`run the tests as usual`:
-
-Command line tool test_all
---------------------------
-
-You can run almost all of PyPy's tests by invoking::
-
-  python test_all.py file_or_directory
-
-which is a synonym for the general `py.test`_ utility
-located in the ``pypy`` directory.  For switches to
-modify test execution pass the ``-h`` option.
-
-Test conventions
-----------------
-
-- adding features requires adding appropriate tests.  (It often even
-  makes sense to first write the tests so that you are sure that they
-  actually can fail.)
-
-- All over the pypy source code there are test/ directories
-  which contain unittests.  Such scripts can usually be executed
-  directly or are collectively run by pypy/test_all.py
-
-- each test directory needs a copy of pypy/tool/autopath.py which
-  upon import will make sure that sys.path contains the directory
-  where 'pypy' is in.
-
-.. _`change documentation and website`:
-
-Changing documentation and website
-==================================
-
-documentation/website files in your local checkout
----------------------------------------------------
-
-Most of the PyPy's documentation and website is kept in
-`pypy/documentation` and `pypy/documentation/website` respectively.
-You can simply edit or add '.txt' files which contain ReST-markuped
-files.  Here is a `ReST quickstart`_ but you can also just look
-at the existing documentation and see how things work.
-
-.. _`ReST quickstart`: http://docutils.sourceforge.net/docs/rst/quickref.html
-
-Automatically test documentation/website changes
-------------------------------------------------
-
-.. _`docutils home page`:
-.. _`docutils`: http://docutils.sourceforge.net/
-
-We automatically check referential integrity and ReST-conformance.  In order to
-run the tests you need docutils_ installed.  Then go to the local checkout
-of the documentation directory and run the tests::
-
-    cd .../pypy/documentation
-    python ../test_all.py
-
-If you see no failures chances are high that your modifications at least
-don't produce ReST-errors or wrong local references.  A side effect of running
-the tests is that you have `.html` files in the documentation directory
-which you can point your browser to!
-
-Additionally, if you also want to check for remote references inside
-the documentation issue::
-
-    python ../test_all.py --checkremote
-
-which will check that remote URLs are reachable.
-
-
-.. include:: _ref.txt

diff --git a/pypy/doc/config/objspace.usemodules._ssl.txt b/pypy/doc/config/objspace.usemodules._ssl.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._ssl.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Use the '_ssl' module, which implements SSL socket operations.

diff --git a/pypy/doc/config/objspace.std.withrope.txt b/pypy/doc/config/objspace.std.withrope.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withrope.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-Enable ropes to be the default string implementation.
-
-See the section in `Standard Interpreter Optimizations`_ for more details.
-
-.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes
-
-

diff --git a/pypy/rpython/lltypesystem/ll2ctypes.py b/pypy/rpython/lltypesystem/ll2ctypes.py
--- a/pypy/rpython/lltypesystem/ll2ctypes.py
+++ b/pypy/rpython/lltypesystem/ll2ctypes.py
@@ -1028,7 +1028,10 @@
             funcname, place))
 
     # get_ctypes_type() can raise NotImplementedError too
-    cfunc.argtypes = [get_ctypes_type(T) for T in FUNCTYPE.ARGS
+    from pypy.rpython.lltypesystem import rffi
+    cfunc.argtypes = [get_ctypes_type(T) if T is not rffi.VOIDP
+                                         else ctypes.c_void_p
+                      for T in FUNCTYPE.ARGS
                       if not T is lltype.Void]
     if FUNCTYPE.RESULT is lltype.Void:
         cfunc.restype = None

diff --git a/pypy/doc/config/translation.linkerflags.txt b/pypy/doc/config/translation.linkerflags.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.linkerflags.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Experimental. Specify extra flags to pass to the linker.

diff --git a/py/_compat/dep_doctest.py b/py/_compat/dep_doctest.py
deleted file mode 100644
--- a/py/_compat/dep_doctest.py
+++ /dev/null
@@ -1,5 +0,0 @@
-import py
-
-py.log._apiwarn("1.1", "py.compat.doctest deprecated, use standard library version.", 
-stacklevel="apipkg")
-doctest = py.std.doctest

diff --git a/pypy/doc/config/objspace.usemodules._file.txt b/pypy/doc/config/objspace.usemodules._file.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._file.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Use the '_file' module. It is an internal module that contains helper
-functionality for the builtin ``file`` type.
-
-.. internal

diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.txt
deleted file mode 100644
--- a/pypy/doc/_ref.txt
+++ /dev/null
@@ -1,107 +0,0 @@
-.. _`demo/`: ../../demo
-.. _`demo/pickle_coroutine.py`: ../../demo/pickle_coroutine.py
-.. _`lib-python/`: ../../lib-python
-.. _`lib-python/2.5.2/dis.py`: ../../lib-python/2.5.2/dis.py
-.. _`annotation/`:
-.. _`pypy/annotation`: ../../pypy/annotation
-.. _`pypy/annotation/annrpython.py`: ../../pypy/annotation/annrpython.py
-.. _`annotation/binaryop.py`: ../../pypy/annotation/binaryop.py
-.. _`pypy/annotation/builtin.py`: ../../pypy/annotation/builtin.py
-.. _`pypy/annotation/model.py`: ../../pypy/annotation/model.py
-.. _`bin/`: ../../pypy/bin
-.. _`config/`: ../../pypy/config
-.. _`pypy/config/pypyoption.py`: ../../pypy/config/pypyoption.py
-.. _`doc/`: ../../pypy/doc
-.. _`doc/config/`: ../../pypy/doc/config
-.. _`doc/discussion/`: ../../pypy/doc/discussion
-.. _`interpreter/`:
-.. _`pypy/interpreter`: ../../pypy/interpreter
-.. _`pypy/interpreter/argument.py`: ../../pypy/interpreter/argument.py
-.. _`interpreter/astcompiler/`:
-.. _`pypy/interpreter/astcompiler`: ../../pypy/interpreter/astcompiler
-.. _`pypy/interpreter/executioncontext.py`: ../../pypy/interpreter/executioncontext.py
-.. _`pypy/interpreter/function.py`: ../../pypy/interpreter/function.py
-.. _`interpreter/gateway.py`:
-.. _`pypy/interpreter/gateway.py`: ../../pypy/interpreter/gateway.py
-.. _`pypy/interpreter/generator.py`: ../../pypy/interpreter/generator.py
-.. _`pypy/interpreter/mixedmodule.py`: ../../pypy/interpreter/mixedmodule.py
-.. _`pypy/interpreter/module.py`: ../../pypy/interpreter/module.py
-.. _`pypy/interpreter/nestedscope.py`: ../../pypy/interpreter/nestedscope.py
-.. _`pypy/interpreter/pyopcode.py`: ../../pypy/interpreter/pyopcode.py
-.. _`interpreter/pyparser/`:
-.. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser
-.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../pypy/interpreter/pyparser/pytokenizer.py
-.. _`pypy/interpreter/pyparser/parser.py`: ../../pypy/interpreter/pyparser/parser.py
-.. _`pypy/interpreter/pyparser/pyparse.py`: ../../pypy/interpreter/pyparser/pyparse.py
-.. _`pypy/interpreter/pyparser/future.py`: ../../pypy/interpreter/pyparser/future.py
-.. _`pypy/interpreter/pyparser/metaparser.py`: ../../pypy/interpreter/pyparser/metaparser.py
-.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../pypy/interpreter/astcompiler/astbuilder.py
-.. _`pypy/interpreter/astcompiler/optimize.py`: ../../pypy/interpreter/astcompiler/optimize.py
-.. _`pypy/interpreter/astcompiler/codegen.py`: ../../pypy/interpreter/astcompiler/codegen.py
-.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../pypy/interpreter/astcompiler/tools/asdl_py.py
-.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../pypy/interpreter/astcompiler/tools/Python.asdl
-.. _`pypy/interpreter/astcompiler/assemble.py`: ../../pypy/interpreter/astcompiler/assemble.py
-.. _`pypy/interpreter/astcompiler/symtable.py`: ../../pypy/interpreter/astcompiler/symtable.py
-.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../pypy/interpreter/astcompiler/asthelpers.py
-.. _`pypy/interpreter/astcompiler/ast.py`: ../../pypy/interpreter/astcompiler/ast.py
-.. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py
-.. _`lib/`:
-.. _`lib_pypy/`: ../../lib_pypy
-.. _`lib/distributed/`: ../../lib_pypy/distributed
-.. _`lib_pypy/stackless.py`: ../../lib_pypy/stackless.py
-.. _`lib_pypy/pypy_test/`: ../../lib_pypy/pypy_test
-.. _`module/`:
-.. _`pypy/module`:
-.. _`pypy/module/`: ../../pypy/module
-.. _`pypy/module/__builtin__/__init__.py`: ../../pypy/module/__builtin__/__init__.py
-.. _`pypy/module/_stackless/test/test_clonable.py`: ../../pypy/module/_stackless/test/test_clonable.py
-.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../pypy/module/_stackless/test/test_composable_coroutine.py
-.. _`objspace/`:
-.. _`pypy/objspace`: ../../pypy/objspace
-.. _`objspace/dump.py`: ../../pypy/objspace/dump.py
-.. _`objspace/flow/`: ../../pypy/objspace/flow
-.. _`objspace/std/`:
-.. _`pypy/objspace/std`: ../../pypy/objspace/std
-.. _`objspace/taint.py`: ../../pypy/objspace/taint.py
-.. _`objspace/thunk.py`:
-.. _`pypy/objspace/thunk.py`: ../../pypy/objspace/thunk.py
-.. _`objspace/trace.py`:
-.. _`pypy/objspace/trace.py`: ../../pypy/objspace/trace.py
-.. _`pypy/rlib`:
-.. _`rlib/`: ../../pypy/rlib
-.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py
-.. _`pypy/rlib/test`: ../../pypy/rlib/test
-.. _`pypy/rpython`:
-.. _`pypy/rpython/`:
-.. _`rpython/`: ../../pypy/rpython
-.. _`rpython/lltypesystem/`: ../../pypy/rpython/lltypesystem
-.. _`pypy/rpython/lltypesystem/lltype.py`:
-.. _`rpython/lltypesystem/lltype.py`: ../../pypy/rpython/lltypesystem/lltype.py
-.. _`rpython/memory/`: ../../pypy/rpython/memory
-.. _`rpython/memory/gc/generation.py`: ../../pypy/rpython/memory/gc/generation.py
-.. _`rpython/memory/gc/hybrid.py`: ../../pypy/rpython/memory/gc/hybrid.py
-.. _`rpython/memory/gc/markcompact.py`: ../../pypy/rpython/memory/gc/markcompact.py
-.. _`rpython/memory/gc/marksweep.py`: ../../pypy/rpython/memory/gc/marksweep.py
-.. _`rpython/memory/gc/semispace.py`: ../../pypy/rpython/memory/gc/semispace.py
-.. _`rpython/ootypesystem/`: ../../pypy/rpython/ootypesystem
-.. _`rpython/ootypesystem/ootype.py`: ../../pypy/rpython/ootypesystem/ootype.py
-.. _`rpython/rint.py`: ../../pypy/rpython/rint.py
-.. _`rpython/rlist.py`: ../../pypy/rpython/rlist.py
-.. _`rpython/rmodel.py`: ../../pypy/rpython/rmodel.py
-.. _`pypy/rpython/rtyper.py`: ../../pypy/rpython/rtyper.py
-.. _`pypy/rpython/test/test_llinterp.py`: ../../pypy/rpython/test/test_llinterp.py
-.. _`pypy/test_all.py`: ../../pypy/test_all.py
-.. _`tool/`: ../../pypy/tool
-.. _`tool/algo/`: ../../pypy/tool/algo
-.. _`tool/pytest/`: ../../pypy/tool/pytest
-.. _`pypy/translator`:
-.. _`translator/`: ../../pypy/translator
-.. _`translator/backendopt/`: ../../pypy/translator/backendopt
-.. _`translator/c/`: ../../pypy/translator/c
-.. _`translator/cli/`: ../../pypy/translator/cli
-.. _`translator/goal/`: ../../pypy/translator/goal
-.. _`pypy/translator/goal/targetnopstandalone.py`: ../../pypy/translator/goal/targetnopstandalone.py
-.. _`translator/jvm/`: ../../pypy/translator/jvm
-.. _`translator/stackless/`: ../../pypy/translator/stackless
-.. _`translator/tool/`: ../../pypy/translator/tool
-.. _`translator/js/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/translator/js/

diff --git a/pypy/doc/config/objspace.usemodules._ffi.txt b/pypy/doc/config/objspace.usemodules._ffi.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._ffi.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Applevel interface to libffi.  It is more high level than _rawffi, and most importantly it is JIT friendly

diff --git a/py/bin/win32/py.svnwcrevert.cmd b/py/bin/win32/py.svnwcrevert.cmd
deleted file mode 100644
--- a/py/bin/win32/py.svnwcrevert.cmd
+++ /dev/null
@@ -1,2 +0,0 @@
- at echo off
-python "%~dp0\..\py.svnwcrevert" %*
\ No newline at end of file

diff --git a/pypy/doc/config/objspace.usemodules.itertools.txt b/pypy/doc/config/objspace.usemodules.itertools.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.itertools.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the interp-level 'itertools' module.
-If not included, a slower app-level version of itertools is used.

diff --git a/pypy/doc/config/translation.backendopt.inline_heuristic.txt b/pypy/doc/config/translation.backendopt.inline_heuristic.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.inline_heuristic.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Internal option. Switch to a different weight heuristic for inlining.
-This is for basic inlining (:config:`translation.backendopt.inline`).
-
-.. internal

diff --git a/pypy/doc/config/translation.jit.txt b/pypy/doc/config/translation.jit.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.jit.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Enable the JIT generator, for targets that have JIT support.
-Experimental so far.

diff --git a/py/_plugin/pytest_pastebin.py b/py/_plugin/pytest_pastebin.py
deleted file mode 100644
--- a/py/_plugin/pytest_pastebin.py
+++ /dev/null
@@ -1,83 +0,0 @@
-"""
-submit failure or test session information to a pastebin service. 
-
-Usage
-----------
-
-**Creating a URL for each test failure**::
-
-    py.test --pastebin=failed 
-
-This will submit test run information to a remote Paste service and
-provide a URL for each failure.  You may select tests as usual or add
-for example ``-x`` if you only want to send one particular failure. 
-
-**Creating a URL for a whole test session log**::
-
-    py.test --pastebin=all 
-
-Currently only pasting to the http://paste.pocoo.org service is implemented.  
-
-"""
-import py, sys
-
-class url:
-    base = "http://paste.pocoo.org"
-    xmlrpc = base + "/xmlrpc/"
-    show = base + "/show/"
-
-def pytest_addoption(parser):
-    group = parser.getgroup("terminal reporting")
-    group._addoption('--pastebin', metavar="mode",
-        action='store', dest="pastebin", default=None, 
-        type="choice", choices=['failed', 'all'], 
-        help="send failed|all info to Pocoo pastebin service.")
-
-def pytest_configure(__multicall__, config):
-    import tempfile
-    __multicall__.execute()
-    if config.option.pastebin == "all":
-        config._pastebinfile = tempfile.TemporaryFile('w+')
-        tr = config.pluginmanager.getplugin('terminalreporter')
-        oldwrite = tr._tw.write 
-        def tee_write(s, **kwargs):
-            oldwrite(s, **kwargs)
-            config._pastebinfile.write(str(s))
-        tr._tw.write = tee_write 
-
-def pytest_unconfigure(config): 
-    if hasattr(config, '_pastebinfile'):
-        config._pastebinfile.seek(0)
-        sessionlog = config._pastebinfile.read()
-        config._pastebinfile.close()
-        del config._pastebinfile
-        proxyid = getproxy().newPaste("python", sessionlog)
-        pastebinurl = "%s%s" % (url.show, proxyid)
-        sys.stderr.write("pastebin session-log: %s\n" % pastebinurl)
-        tr = config.pluginmanager.getplugin('terminalreporter')
-        del tr._tw.__dict__['write']
-        
-def getproxy():
-    return py.std.xmlrpclib.ServerProxy(url.xmlrpc).pastes
-
-def pytest_terminal_summary(terminalreporter):
-    if terminalreporter.config.option.pastebin != "failed":
-        return
-    tr = terminalreporter
-    if 'failed' in tr.stats:
-        terminalreporter.write_sep("=", "Sending information to Paste Service")
-        if tr.config.option.debug:
-            terminalreporter.write_line("xmlrpcurl: %s" %(url.xmlrpc,))
-        serverproxy = getproxy()
-        for rep in terminalreporter.stats.get('failed'):
-            try:
-                msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc
-            except AttributeError:
-                msg = tr._getfailureheadline(rep)
-            tw = py.io.TerminalWriter(stringio=True)
-            rep.toterminal(tw)
-            s = tw.stringio.getvalue()
-            assert len(s)
-            proxyid = serverproxy.newPaste("python", s)
-            pastebinurl = "%s%s" % (url.show, proxyid)
-            tr.write_line("%s --> %s" %(msg, pastebinurl))

diff --git a/pypy/doc/config/objspace.name.txt b/pypy/doc/config/objspace.name.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.name.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-Determine which `Object Space`_ to use. The `Standard Object Space`_ gives the
-normal Python semantics, the others are `Object Space Proxies`_ giving
-additional features (except the Flow Object Space which is not intended
-for normal usage):
-
-  * thunk_: The thunk object space adds lazy evaluation to PyPy.
-  * taint_: The taint object space adds soft security features.
-  * dump_:  Using this object spaces results in the dumpimp of all operations
-    to a log.
-
-.. _`Object Space`: ../objspace.html
-.. _`Object Space Proxies`: ../objspace-proxies.html
-.. _`Standard Object Space`: ../objspace.html#standard-object-space
-.. _thunk: ../objspace-proxies.html#thunk
-.. _taint: ../objspace-proxies.html#taint
-.. _dump: ../objspace-proxies.html#dump

diff --git a/pypy/doc/config/objspace.std.methodcachesizeexp.txt b/pypy/doc/config/objspace.std.methodcachesizeexp.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.methodcachesizeexp.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Set the cache size (number of entries) for :config:`objspace.std.withmethodcache`.

diff --git a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt b/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-Enable a pair of bytecodes that speed up method calls.
-See ``pypy.interpreter.callmethod`` for a description.
-
-The goal is to avoid creating the bound method object in the common
-case.  So far, this only works for calls with no keyword, no ``*arg``
-and no ``**arg`` but it would be easy to extend.
-
-For more information, see the section in `Standard Interpreter Optimizations`_.
-
-.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#lookup-method-call-method

diff --git a/pypy/doc/config/objspace.std.withprebuiltint.txt b/pypy/doc/config/objspace.std.withprebuiltint.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withprebuiltint.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-This option enables the caching of small integer objects (similar to what
-CPython does). The range of which integers are cached can be influenced with
-the :config:`objspace.std.prebuiltintfrom` and
-:config:`objspace.std.prebuiltintto` options.
-

diff --git a/pypy/doc/config/objspace.usemodules.marshal.txt b/pypy/doc/config/objspace.usemodules.marshal.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.marshal.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'marshal' module. 
-This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/objspace.std.withsmallint.txt b/pypy/doc/config/objspace.std.withsmallint.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withsmallint.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Use "tagged pointers" to represent small enough integer values: Integers that
-fit into 31 bits (respective 63 bits on 64 bit machines) are not represented by
-boxing them in an instance of ``W_IntObject``. Instead they are represented as a
-pointer having the lowest bit set and the rest of the bits used to store the
-value of the integer. This gives a small speedup for integer operations as well
-as better memory behaviour.

diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Weight threshold used to decide whether to inline flowgraphs.
-This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`).

diff --git a/pypy/doc/cleanup-todo.txt b/pypy/doc/cleanup-todo.txt
deleted file mode 100644
--- a/pypy/doc/cleanup-todo.txt
+++ /dev/null
@@ -1,30 +0,0 @@
-
-PyPy cleanup areas
-==================
-
-This is a todo list that lists various areas of PyPy that should be cleaned up
-(for whatever reason: less mess, less code duplication, etc).
-
-translation toolchain
----------------------
-
- - low level backends should share more code
- - all backends should have more consistent interfaces
- - geninterp is a hack
- - delegate finding type stuff like vtables etc to GC, cleaner interface for rtti,
-   simplify translator/c/gc.py
- - clean up the tangle of including headers in the C backend
- - make approach for loading modules more sane, mixedmodule capture
-   too many platform dependencies especially for pypy-cli
- - review pdbplus, especially the graph commands, also in the light of
-   https://codespeak.net/issue/pypy-dev/issue303 and the fact that
-   we can have more than one translator/annotator around (with the
-   timeshifter)
-
-interpreter
------------
-
- - review the things implemented at applevel whether they are performance-
-   critical
-
- - review CPython regression test suite, enable running tests, fix bugs

diff --git a/pypy/doc/config/objspace.usepycfiles.txt b/pypy/doc/config/objspace.usepycfiles.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usepycfiles.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-If this option is used, then PyPy imports and generates "pyc" files in the
-same way as CPython.  This is true by default and there is not much reason
-to turn it off nowadays.  If off, PyPy never produces "pyc" files and
-ignores any "pyc" file that might already be present.

diff --git a/pypy/doc/config/translation.backendopt.print_statistics.txt b/pypy/doc/config/translation.backendopt.print_statistics.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.print_statistics.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Debugging option. Print statics about the forest of flowgraphs as they
-go through the various backend optimizations.
\ No newline at end of file

diff --git a/pypy/doc/config/translation.gcremovetypeptr.txt b/pypy/doc/config/translation.gcremovetypeptr.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.gcremovetypeptr.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-If set, save one word in every object.  Framework GC only.

diff --git a/pypy/doc/config/translation.gctransformer.txt b/pypy/doc/config/translation.gctransformer.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.gctransformer.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-internal option

diff --git a/pypy/doc/config/objspace.timing.txt b/pypy/doc/config/objspace.timing.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.timing.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-timing of various parts of the interpreter (simple profiling)

diff --git a/pypy/module/pyexpat/interp_pyexpat.py b/pypy/module/pyexpat/interp_pyexpat.py
--- a/pypy/module/pyexpat/interp_pyexpat.py
+++ b/pypy/module/pyexpat/interp_pyexpat.py
@@ -1,7 +1,7 @@
 from pypy.interpreter.baseobjspace import Wrappable
 from pypy.interpreter.typedef import TypeDef, GetSetProperty
-from pypy.interpreter.gateway import ObjSpace, W_Root, NoneNotWrapped
-from pypy.interpreter.gateway import interp2app
+from pypy.interpreter.gateway import NoneNotWrapped
+from pypy.interpreter.gateway import interp2app, unwrap_spec
 from pypy.interpreter.error import OperationError
 from pypy.objspace.descroperation import object_setattr
 from pypy.rpython.lltypesystem import rffi, lltype
@@ -325,6 +325,10 @@
         space.wrap(XML_MINOR_VERSION),
         space.wrap(XML_MICRO_VERSION)])
 
+class Cache:
+    def __init__(self, space):
+        self.w_error = space.new_exception_class("pyexpat.ExpatError")
+
 class W_XMLParserType(Wrappable):
 
     def __init__(self, space, parser, w_intern):
@@ -357,6 +361,7 @@
             global_storage.free_nonmoving_id(
                 rffi.cast(lltype.Signed, self.itself))
 
+    @unwrap_spec(flag=int)
     def SetParamEntityParsing(self, space, flag):
         """SetParamEntityParsing(flag) -> success
 Controls parsing of parameter entities (including the external DTD
@@ -365,7 +370,6 @@
 XML_PARAM_ENTITY_PARSING_ALWAYS. Returns true if setting the flag
 was successful."""
         XML_SetParamEntityParsing(self.itself, flag)
-    SetParamEntityParsing.unwrap_spec = ['self', ObjSpace, int]
 
     def UseForeignDTD(self, space, w_flag=True):
         """UseForeignDTD([flag])
@@ -376,7 +380,6 @@
 'flag' defaults to True if not provided."""
         flag = space.is_true(w_flag)
         XML_UseForeignDTD(self.itself, flag)
-    UseForeignDTD.unwrap_spec = ['self', ObjSpace, W_Root]
 
     # Handlers management
 
@@ -499,6 +502,7 @@
         return True
 
 
+    @unwrap_spec(name=str)
     def setattr(self, space, name, w_value):
         if name == "namespace_prefixes":
             XML_SetReturnNSTriplet(self.itself, space.int_w(w_value))
@@ -513,15 +517,15 @@
         return space.call_function(
             object_setattr(space),
             space.wrap(self), space.wrap(name), w_value)
-    setattr.unwrap_spec = ['self', ObjSpace, str, W_Root]
 
     # Parse methods
 
+    @unwrap_spec(data=str, isfinal=bool)
     def Parse(self, space, data, isfinal=False):
         """Parse(data[, isfinal])
 Parse XML data.  `isfinal' should be true at end of input."""
 
-        res = XML_Parse(self.itself, data, len(data), bool(isfinal))
+        res = XML_Parse(self.itself, data, len(data), isfinal)
         if self._exc_info:
             e = self._exc_info
             self._exc_info = None
@@ -531,7 +535,6 @@
             raise exc
         self.flush_character_buffer(space)
         return space.wrap(res)
-    Parse.unwrap_spec = ['self', ObjSpace, str, int]
 
     def ParseFile(self, space, w_file):
         """ParseFile(file)
@@ -540,11 +543,10 @@
         w_data = space.call_method(w_file, 'read')
         data = space.str_w(w_data)
         return self.Parse(space, data, isfinal=True)
-    ParseFile.unwrap_spec = ['self', ObjSpace, W_Root]
 
+    @unwrap_spec(base=str)
     def SetBase(self, space, base):
         XML_SetBase(self.itself, base)
-    SetBase.unwrap_spec = ['self', ObjSpace, str]
 
     def ExternalEntityParserCreate(self, space, w_context, w_encoding=None):
         """ExternalEntityParserCreate(context[, encoding])
@@ -572,7 +574,6 @@
             parser.handlers[i] = self.handlers[i]
 
         return space.wrap(parser)
-    ExternalEntityParserCreate.unwrap_spec = ['self', ObjSpace, W_Root, W_Root]
 
     def flush_character_buffer(self, space):
         if not self.buffer_w:
@@ -593,8 +594,7 @@
         lineno = XML_GetCurrentLineNumber(self.itself)
         colno = XML_GetCurrentColumnNumber(self.itself)
         msg = "%s: line %d, column %d" % (err, lineno, colno)
-        w_module = space.getbuiltinmodule('pyexpat')
-        w_errorcls = space.getattr(w_module, space.wrap('error'))
+        w_errorcls = space.fromcache(Cache).w_error
         w_error = space.call_function(w_errorcls, space.wrap(msg))
         space.setattr(w_error, space.wrap("code"), space.wrap(code))
         space.setattr(w_error, space.wrap("offset"), space.wrap(colno))
@@ -603,21 +603,21 @@
         self.w_error = w_error
         return OperationError(w_errorcls, w_error)
 
-    def descr_ErrorCode(space, self):
+    def descr_ErrorCode(self, space):
         return space.wrap(XML_GetErrorCode(self.itself))
 
-    def descr_ErrorLineNumber(space, self):
+    def descr_ErrorLineNumber(self, space):
         return space.wrap(XML_GetErrorLineNumber(self.itself))
 
-    def descr_ErrorColumnNumber(space, self):
+    def descr_ErrorColumnNumber(self, space):
         return space.wrap(XML_GetErrorColumnNumber(self.itself))
 
-    def descr_ErrorByteIndex(space, self):
+    def descr_ErrorByteIndex(self, space):
         return space.wrap(XML_GetErrorByteIndex(self.itself))
 
-    def get_buffer_size(space, self):
+    def get_buffer_size(self, space):
         return space.wrap(self.buffer_size)
-    def set_buffer_size(space, self, w_value):
+    def set_buffer_size(self, space, w_value):
         value = space.getindex_w(w_value, space.w_TypeError)
         if value <= 0:
             raise OperationError(space.w_ValueError, space.wrap(
@@ -625,9 +625,9 @@
         self.flush_character_buffer(space)
         self.buffer_size = value
 
-    def get_buffer_text(space, self):
+    def get_buffer_text(self, space):
         return space.wrap(self.buffer_w is not None)
-    def set_buffer_text(space, self, w_value):
+    def set_buffer_text(self, space, w_value):
         if space.is_true(w_value):
             self.buffer_w = []
             self.buffer_used = 0
@@ -635,7 +635,7 @@
             self.flush_character_buffer(space)
             self.buffer_w = None
 
-    def get_intern(space, self):
+    def get_intern(self, space):
         if self.w_intern:
             return self.w_intern
         else:
@@ -676,9 +676,7 @@
     CurrentColumnNumber = GetSetProperty(W_XMLParserType.descr_ErrorColumnNumber, cls=W_XMLParserType),
     CurrentByteIndex = GetSetProperty(W_XMLParserType.descr_ErrorByteIndex, cls=W_XMLParserType),
 
-    **dict((name, interp2app(getattr(W_XMLParserType, name),
-                             unwrap_spec=getattr(W_XMLParserType,
-                                                 name).unwrap_spec))
+    **dict((name, interp2app(getattr(W_XMLParserType, name)))
            for name in XMLParser_methods)
     )
 
@@ -740,11 +738,10 @@
         parser.itself, UnknownEncodingHandlerData_callback,
         rffi.cast(rffi.VOIDP, parser.id))
     return space.wrap(parser)
-ParserCreate.unwrap_spec = [ObjSpace, W_Root, W_Root, W_Root]
 
+ at unwrap_spec(code=int)
 def ErrorString(space, code):
     """ErrorString(errno) -> string
 Returns string error for given number."""
     return space.wrap(rffi.charp2str(XML_ErrorString(code)))
-ErrorString.unwrap_spec = [ObjSpace, int]
 

diff --git a/pypy/doc/config/objspace.std.withtproxy.txt b/pypy/doc/config/objspace.std.withtproxy.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withtproxy.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Enable `transparent proxies`_.
-
-.. _`transparent proxies`: ../objspace-proxies.html#tproxy

diff --git a/pypy/doc/config/translation.output.txt b/pypy/doc/config/translation.output.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.output.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Specify file name that the produced executable gets.

diff --git a/py/_cmdline/pysvnwcrevert.py b/py/_cmdline/pysvnwcrevert.py
deleted file mode 100755
--- a/py/_cmdline/pysvnwcrevert.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#! /usr/bin/env python
-"""\
-py.svnwcrevert [options] WCPATH
-
-Running this script and then 'svn up' puts the working copy WCPATH in a state
-as clean as a fresh check-out.
-
-WARNING: you'll loose all local changes, obviously!
-
-This script deletes all files that have been modified
-or that svn doesn't explicitly know about, including svn:ignored files
-(like .pyc files, hint hint).
-
-The goal of this script is to leave the working copy with some files and
-directories possibly missing, but - most importantly - in a state where
-the following 'svn up' won't just crash.
-"""
-
-import sys, py
-
-def kill(p, root):
-    print('<    %s' % (p.relto(root),))
-    p.remove(rec=1)
-
-def svnwcrevert(path, root=None, precious=[]):
-    if root is None:
-        root = path
-    wcpath = py.path.svnwc(path)
-    try:
-        st = wcpath.status()
-    except ValueError:   # typically, "bad char in wcpath"
-        kill(path, root)
-        return
-    for p in path.listdir():
-        if p.basename == '.svn' or p.basename in precious:
-            continue
-        wcp = py.path.svnwc(p)
-        if wcp not in st.unchanged and wcp not in st.external:
-            kill(p, root)
-        elif p.check(dir=1):
-            svnwcrevert(p, root)
-
-# XXX add a functional test
-
-parser = py.std.optparse.OptionParser(usage=__doc__)
-parser.add_option("-p", "--precious",
-                  action="append", dest="precious", default=[],
-                  help="preserve files with this name")
-
-def main():
-    opts, args = parser.parse_args()
-    if len(args) != 1:
-        parser.print_help()
-        sys.exit(2)
-    svnwcrevert(py.path.local(args[0]), precious=opts.precious)

diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -1,7 +1,7 @@
 """ MiniMark GC.
 
 Environment variables can be used to fine-tune the following parameters:
-    
+
  PYPY_GC_NURSERY        The nursery size.  Defaults to half the size of
                         the L2 cache.  Try values like '1.2MB'.  Small values
                         (like 1 or 1KB) are useful for debugging.
@@ -108,12 +108,13 @@
 GCFLAG_HAS_CARDS    = first_gcflag << 5
 GCFLAG_CARDS_SET    = first_gcflag << 6     # <- at least one card bit is set
 
+TID_MASK            = (first_gcflag << 7) - 1
+
 
 FORWARDSTUB = lltype.GcStruct('forwarding_stub',
                               ('forw', llmemory.Address))
 FORWARDSTUBPTR = lltype.Ptr(FORWARDSTUB)
 
-
 # ____________________________________________________________
 
 class MiniMarkGC(MovingGCBase):
@@ -852,9 +853,13 @@
         that can never be set on a young object -- except if tid == -42.
         """
         assert self.is_in_nursery(obj)
-        result = (self.header(obj).tid & GCFLAG_FINALIZATION_ORDERING != 0)
+        tid = self.header(obj).tid
+        result = (tid & GCFLAG_FINALIZATION_ORDERING != 0)
         if result:
-            ll_assert(self.header(obj).tid == -42, "bogus header for young obj")
+            ll_assert(tid == -42, "bogus header for young obj")
+        else:
+            ll_assert(bool(tid), "bogus header (1)")
+            ll_assert(tid & ~TID_MASK == 0, "bogus header (2)")
         return result
 
     def get_forwarding_address(self, obj):

diff --git a/pypy/doc/buildtool.txt b/pypy/doc/buildtool.txt
deleted file mode 100644
--- a/pypy/doc/buildtool.txt
+++ /dev/null
@@ -1,249 +0,0 @@
-============
-PyPyBuilder
-============
-
-What is this?
-=============
-
-PyPyBuilder is an application that allows people to build PyPy instances on
-demand. If you have a nice idle machine connected to the Internet, and don't
-mind us 'borrowing' it every once in a while, you can start up the client
-script (in bin/client) and have the server send compile jobs to your machine.
-If someone requests a build of PyPy that is not already available on the PyPy
-website, and your machine is capable of making such a build, the server may ask
-your machine to create it. If enough people participate, with diverse enough
-machines, a 'build farm' is created.
-
-Quick usage instructions
-========================
-
-For the impatient, that just want to get started, some quick instructions.
-
-First you'll need to have a checkout of the 'buildtool' package, that can
-be found here::
-
-  https://codespeak.net/svn/pypy/build/buildtool
-
-To start a compilation, run (from the buildtool root directory)::
-
-  $ ./bin/startcompile.py [options] <email address>
-
-where the options can be found by using --help, and the email address will be
-used to send mail to once the compilation is finished.
-
-To start a build server, to participate in the build farm, do::
-
-  $ ./bin/buildserver.py
-
-That's it for the compilation script and build server, if you have your own
-project and want to set up your own meta server, you'll have to be a bit more
-patient and read the details below...
-
-Components
-==========
-
-The application consists of 3 main components: a meta server component, a
-client component that handles compilations (let's call this a 'build server')
-and a small client component to start compile jobs (which we'll call
-'requesting clients' for now).
-
-The server waits for build server to register, and for compile job
-requests. When participating clients register, they pass the server information
-about what compilations the system can handle (system info), and a set of
-options to use for compilation (compile info).
-
-When now a requesting client requests a compilation job, the server checks
-whether a suitable binary is already available based on the system and compile
-info, and if so returns that. If there isn't one, the server walks through a
-list of connected participating clients to see if one of them can handle the
-job, and if so dispatches the compilation. If there's no participating client
-to handle the job, it gets queued until there is.
-
-If a client crashes during compilation, the build is restarted, or error
-information is sent to the logs and requesting client, depending on the type of
-error. As long as no compilation error occurs (read: on disconnects, system
-errors, etc.) compilation will be retried until a build is available.
-
-Once a build is available, the server will send an email to all clients waiting
-for the build (it could be that more than one person asked for some build at
-the same time!).
-
-Configuration
-=============
-
-There are several aspects to configuration on this system. Of course, for the
-meta server, build server and startcompile components there is configuration
-for the host and port to connect to, and there is some additional configuration
-for things like which mailhost to use (only applies to the server), but also
-there is configuration data passed around to determine what client is picked,
-and what the client needs to compile exactly.
-
-Config file
------------
-
-The host/port configuration etc. can be found in the file 'config.py' in the
-build tool dir. There are several things that can be configured here, mostly
-related to what application to build, and where to build it. Please read the
-file carefully when setting up a new build network, or when participating for
-compilation, because certain items (e.g. the svnpath_to_url function, or the
-client_checkers) can make the system a lot less secure when not configured
-properly.
-
-Note that all client-related configuration is done from command-line switches,
-so the configuration file is supposed to be changed on a per-project basis:
-unless you have specific needs, use a test version of the build tool, or are
-working on another project than PyPy, you will not want to modify the it.
-
-System configuration
---------------------
-
-This information is used by the client and startcompile components. On the
-participating clients this information is retrieved by querying the system, on
-the requesting clients the system values are used by default, but may be
-overridden (so a requesting client running an x86 can still request PPC builds,
-for instance). The clients compare their own system config to that of a build
-request, and will (should) refuse a build if it can not be executed because
-of incompatibilities.
-
-Compilation configuration
--------------------------
-
-The third form of configuration is that of the to-be-built application itself,
-its compilation arguments. This configuration is only provided by the
-requesting clients, build servers can examine the information and refuse a
-compilation based on this configuration (just like with the system config, see
-'client_checkers' in 'config.py'). Compilation configuration can be controlled
-using command-line arguments (use 'bin/startcompile.py --help' for an
-overview).
-
-Build tool options
-------------------
-
-Yet another part of the configuration are the options that are used by the
-startcompile.py script itself: the user can specify what SVN path (relative to
-a certain base path) and what Subversion revision is desired.  The revision can
-either be specified exactly, or as a range of versions.
-
-Installation
-============
-
-Build Server
-------------
-
-Installing the system should not be required: just run './bin/buildserver' to
-start. Note that it depends on the `py lib`_ (as does the rest of PyPy).
-
-When starting a build server with PyPy's default configuration, it will connect
-to a meta server we have running in codespeak.net.
-
-Meta Server
------------
-
-Also for the server there's no real setup required, and again there's a 
-dependency on the `py lib`_. Starting it is done by running
-'./bin/metaserver'.
-
-Running a compile job
----------------------
-
-Again installation is not required, just run './bin/startcompile.py [options]
-<email>' (see --help for the options) to start. Again, you need to have the
-`py lib`_ installed.
-
-Normally the codespeak.net meta server will be used when this script is issued.
-
-.. _`py lib`: http://codespeak.net/py
-
-Using the build tool for other projects
-=======================================
-
-The code for the build tool is meant to be generic. Using it for other projects
-than PyPy (for which it was originally written) is relatively straight-forward:
-just change the configuration, and implement a build client script (probably
-highly resembling bin/buildserver.py).
-
-Note that there is a test project in 'tool/build/testproject' that can serve
-as an example.
-
-Prerequisites
---------------
-
-Your project can use the build tool if:
-
-  * it can be built from Python
-
-    Of course this is a rather vague requirement: theoretically _anything_ can
-    be built from Python; it's just a matter of integrating it into the tool
-    properly... A project that can entirely be built from Python code (like
-    PyPy) is easier to integrate than something that is built from the command
-    line, though (although implementing that won't be very hard either, see
-    the test project for instance).
-
-  * it is located in Subversion
-
-    The build tool makes very little hard-coded assumptions, but having code
-    in Subversion is one of them. There are several locations in the code where
-    SVN is assumed: the command line options (see `build tool options`_),
-    the server (which checks SVN urls for validity, and converts HEAD revision
-    requests to actual revision ids) and and build client (which checks out the
-    data) all make this assumption, changing to a different revision control
-    system is currently not easy and unsupported (but who knows what the future
-    will bring).
-
-  * it uses PyPy's config mechanism
-
-    PyPy has a very nice, generic configuration mechanism (essentially wrapper
-    OptionParser stuff) that makes dealing with fragmented configuration
-    and command-line options a lot easier. This mechanism is used by the build
-    tool: it assumes configuration is provided in this format. If your project
-    uses this configuration mechanism already, you can provide the root Config
-    object from config.compile_config; if not it should be fairly straight-
-    forward to wrap your existing configuration with the PyPy stuff.
-
-Basically that's it: if your project is stored in SVN, and you don't mind using
-Python a bit, it shouldn't be too hard to get things going (note that more
-documentation about this subject will follow in the future).
-
-Web Front-End
-=============
-
-To examine the status of the meta server, connected build servers and build
-requests, there is a web server available. This can be started using
-'./bin/webserver' and uses port 8080 by default (override in
-config.py).
-
-The web server presents a number of different pages:
-
-  * / and /metaserverstatus - meta server status
-
-    this displays a small list of information about the meta server, such
-    as the amount of connected build servers, the amount of builds available,
-    the amount of waiting clients, etc.
-
-  * /buildservers - connected build servers
-
-    this page contains a list of all connected build servers, system
-    information and what build they're currently working on (if any)
-
-  * /builds - a list of builds
-
-    here you'll find a list of all builds, both done and in-progress and
-    queued ones, with links to the details pages, the date they were
-    requested and their status
-
-  * /build/<id> - build details
-
-    the 'build' (virtual) directory contains pages of information for each
-    build - each of those pages displays status information, time requested,
-    time started and finished (if appropriate), links to the zip and logs,
-    and system and compile information
-
-There's a build tool status web server for the meta server on codespeak.net
-available at http://codespeak.net/pypy/buildstatus/.
-
-More info
-=========
-
-For more information, bug reports, patches, etc., please send an email to 
-guido at merlinux.de.
-

diff --git a/pypy/doc/config/objspace.usemodules.exceptions.txt b/pypy/doc/config/objspace.usemodules.exceptions.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.exceptions.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'exceptions' module.
-This module is essential, included by default and should not be removed.

diff --git a/pypy/doc/config/objspace.std.mutable_builtintypes.txt b/pypy/doc/config/objspace.std.mutable_builtintypes.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.mutable_builtintypes.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Allow modification of builtin types.  Disabled by default.

diff --git a/pypy/doc/config/objspace.usemodules.math.txt b/pypy/doc/config/objspace.usemodules.math.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.math.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'math' module. 
-This module is expected to be working and is included by default.

diff --git a/py/apipkg.py b/py/apipkg.py
deleted file mode 100644
--- a/py/apipkg.py
+++ /dev/null
@@ -1,95 +0,0 @@
-"""
-apipkg: control the exported namespace of a python package.
-
-see http://pypi.python.org/pypi/apipkg
-
-(c) holger krekel, 2009 - MIT license
-"""
-import sys
-from types import ModuleType
-
-__version__ = "1.0b6"
-
-def initpkg(pkgname, exportdefs):
-    """ initialize given package from the export definitions. """
-    mod = ApiModule(pkgname, exportdefs, implprefix=pkgname)
-    oldmod = sys.modules[pkgname]
-    mod.__file__ = getattr(oldmod, '__file__', None)
-    mod.__version__ = getattr(oldmod, '__version__', '0')
-    for name in ('__path__', '__loader__'):
-        if hasattr(oldmod, name):
-            setattr(mod, name, getattr(oldmod, name))
-    sys.modules[pkgname]  = mod
-
-def importobj(modpath, attrname):
-    module = __import__(modpath, None, None, ['__doc__'])
-    return getattr(module, attrname)
-
-class ApiModule(ModuleType):
-    def __init__(self, name, importspec, implprefix=None):
-        self.__name__ = name
-        self.__all__ = [x for x in importspec if x != '__onfirstaccess__']
-        self.__map__ = {}
-        self.__implprefix__ = implprefix or name
-        for name, importspec in importspec.items():
-            if isinstance(importspec, dict):
-                subname = '%s.%s'%(self.__name__, name)
-                apimod = ApiModule(subname, importspec, implprefix)
-                sys.modules[subname] = apimod
-                setattr(self, name, apimod)
-            else:
-                modpath, attrname = importspec.split(':')
-                if modpath[0] == '.':
-                    modpath = implprefix + modpath
-                if name == '__doc__':
-                    self.__doc__ = importobj(modpath, attrname)
-                else:
-                    self.__map__[name] = (modpath, attrname)
-
-    def __repr__(self):
-        l = []
-        if hasattr(self, '__version__'):
-            l.append("version=" + repr(self.__version__))
-        if hasattr(self, '__file__'):
-            l.append('from ' + repr(self.__file__))
-        if l:
-            return '<ApiModule %r %s>' % (self.__name__, " ".join(l))
-        return '<ApiModule %r>' % (self.__name__,)
-
-    def __makeattr(self, name):
-        """lazily compute value for name or raise AttributeError if unknown."""
-        target = None
-        if '__onfirstaccess__' in self.__map__:
-            target = self.__map__.pop('__onfirstaccess__')
-            importobj(*target)()
-        try:
-            modpath, attrname = self.__map__[name]
-        except KeyError:
-            if target is not None and name != '__onfirstaccess__':
-                # retry, onfirstaccess might have set attrs
-                return getattr(self, name)
-            raise AttributeError(name)
-        else:
-            result = importobj(modpath, attrname)
-            setattr(self, name, result)
-            try:
-                del self.__map__[name]
-            except KeyError:
-                pass # in a recursive-import situation a double-del can happen
-            return result
-
-    __getattr__ = __makeattr
-
-    def __dict__(self):
-        # force all the content of the module to be loaded when __dict__ is read
-        dictdescr = ModuleType.__dict__['__dict__']
-        dict = dictdescr.__get__(self)
-        if dict is not None:
-            hasattr(self, 'some')
-            for name in self.__all__:
-                try:
-                    self.__makeattr(name)
-                except AttributeError:
-                    pass
-        return dict
-    __dict__ = property(__dict__)

diff --git a/pypy/doc/config/objspace.std.sharesmallstr.txt b/pypy/doc/config/objspace.std.sharesmallstr.txt
deleted file mode 100644

diff --git a/pypy/doc/config/opt.txt b/pypy/doc/config/opt.txt
deleted file mode 100644
--- a/pypy/doc/config/opt.txt
+++ /dev/null
@@ -1,50 +0,0 @@
-The ``--opt`` or ``-O`` translation option
-==========================================
-
-This meta-option selects a default set of optimization
-settings to use during a translation.  Usage::
-
-    translate.py --opt=#
-    translate.py -O#
-
-where ``#`` is the desired optimization level.  The valid choices are:
-
-    =============  ========================================================
-      Level        Description
-    =============  ========================================================
-    `--opt=0`      all optimizations off; fastest translation `(*)`_
-    `--opt=1`      non-time-consuming optimizations on `(*)`_
-    `--opt=size`   minimize the size of the final executable `(*)`_
-    `--opt=mem`    minimize the run-time RAM consumption (in-progress)
-    `--opt=2`      all optimizations on; good run-time performance
-    `--opt=3`      same as `--opt=2`; remove asserts; gcc profiling `(**)`_
-    `--opt=jit`    includes the JIT and tweak other optimizations for it
-    =============  ========================================================
-
-.. _`(*)`:
-
-`(*)`: The levels `0, 1` and `size` use the `Boehm-Demers-Weiser
-garbage collector`_ (Debian package ``libgc-dev``).  The translation
-itself is faster and consumes less memory; the final executable is
-smaller but slower.  The other levels use one of our built-in `custom
-garbage collectors`_.
-
-.. _`(**)`:
-    
-`(**)`: The level `3` enables gcc profile-driven recompilation when
-translating PyPy.
-
-The exact set of optimizations enabled by each level depends
-on the backend.  Individual translation targets can also
-select their own options based on the level: when translating
-PyPy, the level `mem` enables the memory-saving object
-implementations in the object space; levels `2` and `3` enable
-the advanced object implementations that give an increase in
-performance; level `3` also enables gcc profile-driven
-recompilation.
-
-The default level is `2`.
-
-
-.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/
-.. _`custom garbage collectors`: ../garbage_collection.html

diff --git a/py/bin/env.py b/py/bin/env.py
deleted file mode 100644
--- a/py/bin/env.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-
-import sys, os, os.path
-
-progpath = sys.argv[0]
-packagedir = os.path.dirname(os.path.dirname(os.path.abspath(progpath)))
-packagename = os.path.basename(packagedir)
-bindir = os.path.join(packagedir, 'bin')
-if sys.platform == 'win32':
-    bindir = os.path.join(bindir, 'win32')
-rootdir = os.path.dirname(packagedir)
-
-def prepend_path(name, value):
-    sep = os.path.pathsep
-    curpath = os.environ.get(name, '')
-    newpath = [value] + [ x for x in curpath.split(sep) if x and x != value ]
-    return setenv(name, sep.join(newpath))
-
-def setenv(name, value):
-    shell = os.environ.get('SHELL', '')
-    comspec = os.environ.get('COMSPEC', '')
-    if shell.endswith('csh'):
-        cmd = 'setenv %s "%s"' % (name, value)
-    elif shell.endswith('sh'):
-        cmd = '%s="%s"; export %s' % (name, value, name)
-    elif comspec.endswith('cmd.exe'):
-        cmd = 'set %s=%s' % (name, value)
-    else:
-        assert False, 'Shell not supported.'
-    return cmd
-
-print(prepend_path('PATH', bindir))
-print(prepend_path('PYTHONPATH', rootdir))

diff --git a/pypy/doc/config/objspace.usemodules._locale.txt b/pypy/doc/config/objspace.usemodules._locale.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._locale.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Use the '_locale' module.
-This module runs _locale written in RPython (instead of ctypes version).
-It's not really finished yet; it's enabled by default on Windows.

diff --git a/py/_test/funcargs.py b/py/_test/funcargs.py
deleted file mode 100644
--- a/py/_test/funcargs.py
+++ /dev/null
@@ -1,176 +0,0 @@
-import py
-
-def getfuncargnames(function):
-    argnames = py.std.inspect.getargs(py.code.getrawcode(function))[0]
-    startindex = py.std.inspect.ismethod(function) and 1 or 0
-    defaults = getattr(function, 'func_defaults', 
-                       getattr(function, '__defaults__', None)) or ()
-    numdefaults = len(defaults)
-    if numdefaults:
-        return argnames[startindex:-numdefaults]
-    return argnames[startindex:]
-    
-def fillfuncargs(function):
-    """ fill missing funcargs. """ 
-    request = FuncargRequest(pyfuncitem=function)
-    request._fillfuncargs()
-
-def getplugins(node, withpy=False): # might by any node
-    plugins = node.config._getmatchingplugins(node.fspath)
-    if withpy:
-        mod = node.getparent(py.test.collect.Module)
-        if mod is not None:
-            plugins.append(mod.obj)
-        inst = node.getparent(py.test.collect.Instance)
-        if inst is not None:
-            plugins.append(inst.obj)
-    return plugins
-
-_notexists = object()
-class CallSpec:
-    def __init__(self, funcargs, id, param):
-        self.funcargs = funcargs 
-        self.id = id
-        if param is not _notexists:
-            self.param = param 
-    def __repr__(self):
-        return "<CallSpec id=%r param=%r funcargs=%r>" %(
-            self.id, getattr(self, 'param', '?'), self.funcargs)
-
-class Metafunc:
-    def __init__(self, function, config=None, cls=None, module=None):
-        self.config = config
-        self.module = module 
-        self.function = function
-        self.funcargnames = getfuncargnames(function)
-        self.cls = cls
-        self.module = module
-        self._calls = []
-        self._ids = py.builtin.set()
-
-    def addcall(self, funcargs=None, id=_notexists, param=_notexists):
-        assert funcargs is None or isinstance(funcargs, dict)
-        if id is None:
-            raise ValueError("id=None not allowed") 
-        if id is _notexists:
-            id = len(self._calls)
-        id = str(id)
-        if id in self._ids:
-            raise ValueError("duplicate id %r" % id)
-        self._ids.add(id)
-        self._calls.append(CallSpec(funcargs, id, param))
-
-class FuncargRequest:
-    _argprefix = "pytest_funcarg__"
-    _argname = None
-
-    class LookupError(LookupError):
-        """ error on performing funcarg request. """ 
-
-    def __init__(self, pyfuncitem):
-        self._pyfuncitem = pyfuncitem
-        self.function = pyfuncitem.obj
-        self.module = pyfuncitem.getparent(py.test.collect.Module).obj
-        clscol = pyfuncitem.getparent(py.test.collect.Class)
-        self.cls = clscol and clscol.obj or None
-        self.instance = py.builtin._getimself(self.function)
-        self.config = pyfuncitem.config
-        self.fspath = pyfuncitem.fspath
-        if hasattr(pyfuncitem, '_requestparam'):
-            self.param = pyfuncitem._requestparam 
-        self._plugins = getplugins(pyfuncitem, withpy=True)
-        self._funcargs  = self._pyfuncitem.funcargs.copy()
-        self._name2factory = {}
-        self._currentarg = None
-
-    def _fillfuncargs(self):
-        argnames = getfuncargnames(self.function)
-        if argnames:
-            assert not getattr(self._pyfuncitem, '_args', None), (
-                "yielded functions cannot have funcargs")
-        for argname in argnames:
-            if argname not in self._pyfuncitem.funcargs:
-                self._pyfuncitem.funcargs[argname] = self.getfuncargvalue(argname)
-
-    def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
-        """ cache and return result of calling setup().  
-
-        The requested argument name, the scope and the ``extrakey`` 
-        determine the cache key.  The scope also determines when 
-        teardown(result) will be called.  valid scopes are: 
-        scope == 'function': when the single test function run finishes. 
-        scope == 'module': when tests in a different module are run
-        scope == 'session': when tests of the session have run. 
-        """
-        if not hasattr(self.config, '_setupcache'):
-            self.config._setupcache = {} # XXX weakref? 
-        cachekey = (self._currentarg, self._getscopeitem(scope), extrakey)
-        cache = self.config._setupcache
-        try:
-            val = cache[cachekey]
-        except KeyError:
-            val = setup()
-            cache[cachekey] = val 
-            if teardown is not None:
-                def finalizer():
-                    del cache[cachekey]
-                    teardown(val)
-                self._addfinalizer(finalizer, scope=scope)
-        return val 
-
-    def getfuncargvalue(self, argname):
-        try:
-            return self._funcargs[argname]
-        except KeyError:
-            pass
-        if argname not in self._name2factory:
-            self._name2factory[argname] = self.config.pluginmanager.listattr(
-                    plugins=self._plugins, 
-                    attrname=self._argprefix + str(argname)
-            )
-        #else: we are called recursively  
-        if not self._name2factory[argname]:
-            self._raiselookupfailed(argname)
-        funcargfactory = self._name2factory[argname].pop()
-        oldarg = self._currentarg
-        self._currentarg = argname 
-        try:
-            self._funcargs[argname] = res = funcargfactory(request=self)
-        finally:
-            self._currentarg = oldarg
-        return res
-
-    def _getscopeitem(self, scope):
-        if scope == "function":
-            return self._pyfuncitem
-        elif scope == "module":
-            return self._pyfuncitem.getparent(py.test.collect.Module)
-        elif scope == "session":
-            return None
-        raise ValueError("unknown finalization scope %r" %(scope,))
-
-    def _addfinalizer(self, finalizer, scope):
-        colitem = self._getscopeitem(scope)
-        self.config._setupstate.addfinalizer(
-            finalizer=finalizer, colitem=colitem)
-
-    def addfinalizer(self, finalizer):
-        """ call the given finalizer after test function finished execution. """ 
-        self._addfinalizer(finalizer, scope="function") 
-
-    def __repr__(self):
-        return "<FuncargRequest for %r>" %(self._pyfuncitem)
-
-    def _raiselookupfailed(self, argname):
-        available = []
-        for plugin in self._plugins:
-            for name in vars(plugin):
-                if name.startswith(self._argprefix):
-                    name = name[len(self._argprefix):]
-                    if name not in available:
-                        available.append(name) 
-        fspath, lineno, msg = self._pyfuncitem.reportinfo()
-        msg = "LookupError: no factory found for function argument %r" % (argname,)
-        msg += "\n available funcargs: %s" %(", ".join(available),)
-        msg += "\n use 'py.test --funcargs [testpath]' for help on them."
-        raise self.LookupError(msg)

diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.log.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Include debug prints in the translation.
-
-These must be enabled by setting the PYPYLOG environment variable.
-The exact set of features supported by PYPYLOG is described in
-pypy/translation/c/src/debug.h.

diff --git a/pypy/doc/config/objspace.translationmodules.txt b/pypy/doc/config/objspace.translationmodules.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.translationmodules.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-This option enables all modules which are needed to translate PyPy using PyPy.

diff --git a/pypy/doc/config/objspace.usemodules.rbench.txt b/pypy/doc/config/objspace.usemodules.rbench.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.rbench.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Use the built-in 'rbench' module.
-This module contains geninterpreted versions of pystone and richards,
-so it is useful to measure the interpretation overhead of the various
-pypy-\*.

diff --git a/pypy/doc/config/objspace.std.withstrslice.txt b/pypy/doc/config/objspace.std.withstrslice.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withstrslice.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-Enable "string slice" objects.
-
-See the page about `Standard Interpreter Optimizations`_ for more details.
-
-.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-slice-objects
-
-

diff --git a/pypy/annotation/model.py b/pypy/annotation/model.py
--- a/pypy/annotation/model.py
+++ b/pypy/annotation/model.py
@@ -34,7 +34,7 @@
 from pypy.tool.pairtype import pair, extendabletype
 from pypy.tool.tls import tlsobject
 from pypy.rlib.rarithmetic import r_uint, r_ulonglong, base_int
-from pypy.rlib.rarithmetic import r_singlefloat, r_longfloat, isnan
+from pypy.rlib.rarithmetic import r_singlefloat, r_longfloat
 import inspect, weakref
 
 DEBUG = False    # set to False to disable recording of debugging information
@@ -165,12 +165,12 @@
     def __eq__(self, other):
         if (type(self) is SomeFloat and type(other) is SomeFloat and
             self.is_constant() and other.is_constant()):
+            from pypy.rlib.rfloat import isnan, copysign
             # NaN unpleasantness.
             if isnan(self.const) and isnan(other.const):
                 return True
             # 0.0 vs -0.0 unpleasantness.
             if not self.const and not other.const:
-                from pypy.rlib.rarithmetic import copysign
                 return copysign(1., self.const) == copysign(1., other.const)
             #
         return super(SomeFloat, self).__eq__(other)

diff --git a/py/_compat/dep_textwrap.py b/py/_compat/dep_textwrap.py
deleted file mode 100644
--- a/py/_compat/dep_textwrap.py
+++ /dev/null
@@ -1,5 +0,0 @@
-import py
-
-py.log._apiwarn("1.1", "py.compat.textwrap deprecated, use standard library version.", 
-    stacklevel="apipkg")
-textwrap = py.std.textwrap

diff --git a/pypy/doc/config/objspace.std.withdictmeasurement.txt b/pypy/doc/config/objspace.std.withdictmeasurement.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withdictmeasurement.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Internal option.
-
-.. internal

diff --git a/pypy/doc/config/objspace.usemodules.select.txt b/pypy/doc/config/objspace.usemodules.select.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.select.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'select' module. 
-This module is expected to be fully working.

diff --git a/pypy/doc/config/objspace.std.getattributeshortcut.txt b/pypy/doc/config/objspace.std.getattributeshortcut.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.getattributeshortcut.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Performance only: track types that override __getattribute__.

diff --git a/py/_cmdline/pylookup.py b/py/_cmdline/pylookup.py
deleted file mode 100755
--- a/py/_cmdline/pylookup.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python 
-
-"""\
-py.lookup [search_directory] SEARCH_STRING [options]
-
-Looks recursively at Python files for a SEARCH_STRING, starting from the
-present working directory. Prints the line, with the filename and line-number
-prepended."""
-
-import sys, os
-import py
-from py.io import ansi_print, get_terminal_width
-import re
-
-def rec(p):
-    return p.check(dotfile=0)
-
-parser = py.std.optparse.OptionParser(usage=__doc__)
-parser.add_option("-i", "--ignore-case", action="store_true", dest="ignorecase",
-                  help="ignore case distinctions")
-parser.add_option("-C", "--context", action="store", type="int", dest="context",
-            default=0, help="How many lines of output to show")
-
-terminal_width = get_terminal_width()
-
-def find_indexes(search_line, string):
-    indexes = []
-    before = 0
-    while 1:
-        i = search_line.find(string, before)
-        if i == -1:
-            break
-        indexes.append(i)
-        before = i + len(string)
-    return indexes
-
-def main():
-    (options, args) = parser.parse_args()
-    if len(args) == 2:
-        search_dir, string = args
-        search_dir = py.path.local(search_dir)
-    else:
-        search_dir = py.path.local()
-        string = args[0]
-    if options.ignorecase:
-        string = string.lower()
-    for x in search_dir.visit('*.py', rec):
-        # match filename directly
-        s = x.relto(search_dir)
-        if options.ignorecase:
-            s = s.lower()
-        if s.find(string) != -1:
-            sys.stdout.write("%s: filename matches %r" %(x, string) + "\n")
-
-        try:
-            s = x.read()
-        except py.error.ENOENT:
-            pass # whatever, probably broken link (ie emacs lock)
-        searchs = s
-        if options.ignorecase:
-            searchs = s.lower()
-        if s.find(string) != -1:
-            lines = s.splitlines()
-            if options.ignorecase:
-                searchlines = s.lower().splitlines()
-            else:
-                searchlines = lines
-            for i, (line, searchline) in enumerate(zip(lines, searchlines)): 
-                indexes = find_indexes(searchline, string)
-                if not indexes:
-                    continue
-                if not options.context:
-                    sys.stdout.write("%s:%d: " %(x.relto(search_dir), i+1))
-                    last_index = 0
-                    for index in indexes:
-                        sys.stdout.write(line[last_index: index])
-                        ansi_print(line[index: index+len(string)],
-                                   file=sys.stdout, esc=31, newline=False)
-                        last_index = index + len(string)
-                    sys.stdout.write(line[last_index:] + "\n")
-                else:
-                    context = (options.context)/2
-                    for count in range(max(0, i-context), min(len(lines) - 1, i+context+1)):
-                        print("%s:%d:  %s" %(x.relto(search_dir), count+1, lines[count].rstrip()))
-                    print("-" * terminal_width)

diff --git a/py/_plugin/pytest_pylint.py b/py/_plugin/pytest_pylint.py
deleted file mode 100644
--- a/py/_plugin/pytest_pylint.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""pylint plugin
-
-XXX: Currently in progress, NOT IN WORKING STATE.
-"""
-import py
-
-pylint = py.test.importorskip("pylint.lint") 
-
-def pytest_addoption(parser):
-    group = parser.getgroup('pylint options')
-    group.addoption('--pylint', action='store_true',
-                    default=False, dest='pylint',
-                    help='run pylint on python files.')
-
-def pytest_collect_file(path, parent):
-    if path.ext == ".py":
-        if parent.config.getvalue('pylint'):
-            return PylintItem(path, parent)
-
-#def pytest_terminal_summary(terminalreporter):
-#    print 'placeholder for pylint output'
-
-class PylintItem(py.test.collect.Item):
-    def runtest(self):
-        capture = py.io.StdCaptureFD()
-        try:
-            linter = pylint.lint.PyLinter()
-            linter.check(str(self.fspath))
-        finally:
-            out, err = capture.reset()
-        rating = out.strip().split('\n')[-1]
-        sys.stdout.write(">>>")
-        print(rating)
-        assert 0
-
-

diff --git a/py/_cmdline/pytest.py b/py/_cmdline/pytest.py
deleted file mode 100755
--- a/py/_cmdline/pytest.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python 
-import py
-
-def main(args=None):
-    raise SystemExit(py.test.cmdline.main(args))

diff --git a/pypy/rpython/memory/gctransform/framework.py b/pypy/rpython/memory/gctransform/framework.py
--- a/pypy/rpython/memory/gctransform/framework.py
+++ b/pypy/rpython/memory/gctransform/framework.py
@@ -47,7 +47,7 @@
             return True
         return graphanalyze.GraphAnalyzer.analyze_external_call(self, op,
                                                                 seen)
-    def analyze_simple_operation(self, op):
+    def analyze_simple_operation(self, op, graphinfo):
         if op.opname in ('malloc', 'malloc_varsize'):
             flags = op.args[1].value
             return flags['flavor'] == 'gc' and not flags.get('nocollect', False)

diff --git a/py/_test/config.py b/py/_test/config.py
deleted file mode 100644
--- a/py/_test/config.py
+++ /dev/null
@@ -1,291 +0,0 @@
-import py, os
-from py._test.conftesthandle import Conftest
-from py._test.pluginmanager import PluginManager
-from py._test import parseopt
-from py._test.collect import RootCollector
-
-def ensuretemp(string, dir=1): 
-    """ (deprecated) return temporary directory path with
-        the given string as the trailing part.  It is usually 
-        better to use the 'tmpdir' function argument which will
-        take care to provide empty unique directories for each 
-        test call even if the test is called multiple times. 
-    """ 
-    #py.log._apiwarn(">1.1", "use tmpdir function argument")
-    return py.test.config.ensuretemp(string, dir=dir)
-  
-class CmdOptions(object):
-    """ holds cmdline options as attributes."""
-    def __init__(self, **kwargs):
-        self.__dict__.update(kwargs)
-    def __repr__(self):
-        return "<CmdOptions %r>" %(self.__dict__,)
-
-class Error(Exception):
-    """ Test Configuration Error. """
-
-class Config(object): 
-    """ access to config values, pluginmanager and plugin hooks.  """
-    Option = py.std.optparse.Option 
-    Error = Error
-    basetemp = None
-    _sessionclass = None
-
-    def __init__(self, topdir=None, option=None): 
-        self.option = option or CmdOptions()
-        self.topdir = topdir
-        self._parser = parseopt.Parser(
-            usage="usage: %prog [options] [file_or_dir] [file_or_dir] [...]",
-            processopt=self._processopt,
-        )
-        self.pluginmanager = PluginManager()
-        self._conftest = Conftest(onimport=self._onimportconftest)
-        self.hook = self.pluginmanager.hook
-
-    def _onimportconftest(self, conftestmodule):
-        self.trace("loaded conftestmodule %r" %(conftestmodule,))
-        self.pluginmanager.consider_conftest(conftestmodule)
-
-    def _getmatchingplugins(self, fspath):
-        allconftests = self._conftest._conftestpath2mod.values()
-        plugins = [x for x in self.pluginmanager.getplugins() 
-                        if x not in allconftests]
-        plugins += self._conftest.getconftestmodules(fspath)
-        return plugins
-
-    def trace(self, msg):
-        if getattr(self.option, 'traceconfig', None):
-            self.hook.pytest_trace(category="config", msg=msg)
-
-    def _processopt(self, opt):
-        if hasattr(opt, 'default') and opt.dest:
-            val = os.environ.get("PYTEST_OPTION_" + opt.dest.upper(), None)
-            if val is not None:
-                if opt.type == "int":
-                    val = int(val)
-                elif opt.type == "long":
-                    val = long(val)
-                elif opt.type == "float":
-                    val = float(val)
-                elif not opt.type and opt.action in ("store_true", "store_false"):
-                    val = eval(val)
-                opt.default = val 
-            else:
-                name = "option_" + opt.dest
-                try:
-                    opt.default = self._conftest.rget(name)
-                except (ValueError, KeyError):
-                    pass
-            if not hasattr(self.option, opt.dest):
-                setattr(self.option, opt.dest, opt.default)
-
-    def _preparse(self, args):
-        self.pluginmanager.consider_setuptools_entrypoints()
-        self.pluginmanager.consider_env()
-        self.pluginmanager.consider_preparse(args)
-        self._conftest.setinitial(args) 
-        self.pluginmanager.do_addoption(self._parser)
-
-    def parse(self, args): 
-        """ parse cmdline arguments into this config object. 
-            Note that this can only be called once per testing process. 
-        """ 
-        assert not hasattr(self, 'args'), (
-                "can only parse cmdline args at most once per Config object")
-        self._preparse(args)
-        self._parser.hints.extend(self.pluginmanager._hints)
-        args = self._parser.parse_setoption(args, self.option)
-        if not args:
-            args.append(py.std.os.getcwd())
-        self.topdir = gettopdir(args)
-        self._rootcol = RootCollector(config=self)
-        self._setargs(args)
-
-    def _setargs(self, args):
-        self.args = list(args)
-        self._argfspaths = [py.path.local(decodearg(x)[0]) for x in args]
-
-    # config objects are usually pickled across system
-    # barriers but they contain filesystem paths. 
-    # upon getstate/setstate we take care to do everything
-    # relative to "topdir". 
-    def __getstate__(self):
-        l = []
-        for path in self.args:
-            path = py.path.local(path)
-            l.append(path.relto(self.topdir)) 
-        return l, self.option.__dict__
-
-    def __setstate__(self, repr):
-        # we have to set py.test.config because loading 
-        # of conftest files may use it (deprecated) 
-        # mainly by py.test.config.addoptions() 
-        global config_per_process
-        py.test.config = config_per_process = self 
-        args, cmdlineopts = repr 
-        cmdlineopts = CmdOptions(**cmdlineopts)
-        # next line will registers default plugins 
-        self.__init__(topdir=py.path.local(), option=cmdlineopts)
-        self._rootcol = RootCollector(config=self)
-        args = [str(self.topdir.join(x)) for x in args]
-        self._preparse(args)
-        self._setargs(args)
-
-    def ensuretemp(self, string, dir=True):
-        return self.getbasetemp().ensure(string, dir=dir) 
-
-    def getbasetemp(self):
-        if self.basetemp is None:
-            basetemp = self.option.basetemp 
-            if basetemp:
-                basetemp = py.path.local(basetemp)
-                if not basetemp.check(dir=1):
-                    basetemp.mkdir()
-            else:
-                basetemp = py.path.local.make_numbered_dir(prefix='pytest-')
-            self.basetemp = basetemp
-        return self.basetemp 
-
-    def mktemp(self, basename, numbered=False):
-        basetemp = self.getbasetemp()
-        if not numbered:
-            return basetemp.mkdir(basename)
-        else:
-            return py.path.local.make_numbered_dir(prefix=basename,
-                keep=0, rootdir=basetemp, lock_timeout=None)
-
-    def getinitialnodes(self):
-        return [self.getnode(arg) for arg in self.args]
-
-    def getnode(self, arg):
-        parts = decodearg(arg)
-        path = py.path.local(parts.pop(0))
-        if not path.check():
-            raise self.Error("file not found: %s" %(path,))
-        topdir = self.topdir
-        if path != topdir and not path.relto(topdir):
-            raise self.Error("path %r is not relative to %r" %
-                (str(path), str(topdir)))
-        # assumtion: pytest's fs-collector tree follows the filesystem tree
-        names = list(filter(None, path.relto(topdir).split(path.sep)))
-        names += parts
-        try:
-            return self._rootcol.getbynames(names)
-        except ValueError:
-            e = py.std.sys.exc_info()[1]
-            raise self.Error("can't collect: %s\n%s" % (arg, e.args[0]))
-
-    def _getcollectclass(self, name, path):
-        try:
-            cls = self._conftest.rget(name, path)
-        except KeyError:
-            return getattr(py.test.collect, name)
-        else:
-            py.log._apiwarn(">1.1", "%r was found in a conftest.py file, "
-                "use pytest_collect hooks instead." % (cls,))
-            return cls
-
-    def getconftest_pathlist(self, name, path=None):
-        """ return a matching value, which needs to be sequence
-            of filenames that will be returned as a list of Path
-            objects (they can be relative to the location 
-            where they were found).
-        """
-        try:
-            mod, relroots = self._conftest.rget_with_confmod(name, path)
-        except KeyError:
-            return None
-        modpath = py.path.local(mod.__file__).dirpath()
-        l = []
-        for relroot in relroots:
-            if not isinstance(relroot, py.path.local):
-                relroot = relroot.replace("/", py.path.local.sep)
-                relroot = modpath.join(relroot, abs=True)
-            l.append(relroot)
-        return l 
-             
-    def addoptions(self, groupname, *specs): 
-        """ add a named group of options to the current testing session. 
-            This function gets invoked during testing session initialization. 
-        """ 
-        py.log._apiwarn("1.0", "define pytest_addoptions(parser) to add options", stacklevel=2)
-        group = self._parser.getgroup(groupname)
-        for opt in specs:
-            group._addoption_instance(opt)
-        return self.option 
-
-    def addoption(self, *optnames, **attrs):
-        return self._parser.addoption(*optnames, **attrs)
-
-    def getvalueorskip(self, name, path=None): 
-        """ return getvalue() or call py.test.skip if no value exists. """
-        try:
-            val = self.getvalue(name, path)
-            if val is None:
-                raise KeyError(name)
-            return val
-        except KeyError:
-            py.test.skip("no %r value found" %(name,))
-
-    def getvalue(self, name, path=None): 
-        """ return 'name' value looked up from the 'options'
-            and then from the first conftest file found up 
-            the path (including the path itself). 
-            if path is None, lookup the value in the initial
-            conftest modules found during command line parsing. 
-        """
-        try:
-            return getattr(self.option, name)
-        except AttributeError:
-            return self._conftest.rget(name, path)
-
-    def setsessionclass(self, cls):
-        if self._sessionclass is not None:
-            raise ValueError("sessionclass already set to: %r" %(
-                self._sessionclass))
-        self._sessionclass = cls
-
-    def initsession(self):
-        """ return an initialized session object. """
-        cls = self._sessionclass 
-        if cls is None:
-            from py._test.session import Session
-            cls = Session
-        session = cls(self)
-        self.trace("instantiated session %r" % session)
-        return session
-
-#
-# helpers
-#
-
-def gettopdir(args): 
-    """ return the top directory for the given paths.
-        if the common base dir resides in a python package 
-        parent directory of the root package is returned. 
-    """
-    fsargs = [py.path.local(decodearg(arg)[0]) for arg in args]
-    p = fsargs and fsargs[0] or None
-    for x in fsargs[1:]:
-        p = p.common(x)
-    assert p, "cannot determine common basedir of %s" %(fsargs,)
-    pkgdir = p.pypkgpath()
-    if pkgdir is None:
-        if p.check(file=1):
-            p = p.dirpath()
-        return p
-    else:
-        return pkgdir.dirpath()
-
-def decodearg(arg):
-    arg = str(arg)
-    return arg.split("::")
-
-def onpytestaccess():
-    # it's enough to have our containing module loaded as 
-    # it initializes a per-process config instance
-    # which loads default plugins which add to py.test.*
-    pass 
-
-# a default per-process instance of py.test configuration 
-config_per_process = Config()

diff --git a/pypy/rpython/lltypesystem/test/test_rffi.py b/pypy/rpython/lltypesystem/test/test_rffi.py
--- a/pypy/rpython/lltypesystem/test/test_rffi.py
+++ b/pypy/rpython/lltypesystem/test/test_rffi.py
@@ -769,6 +769,9 @@
 def test_ptradd_interpret():
     interpret(test_ptradd, [])
 
+def test_voidptr():
+    assert repr(VOIDP) == "<* Array of void >"
+
 class TestCRffi(BaseTestRffi):
     def compile(self, func, args, **kwds):
         return compile_c(func, args, **kwds)

diff --git a/pypy/doc/config/translation.platform.txt b/pypy/doc/config/translation.platform.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.platform.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-select the target platform, in case of cross-compilation

diff --git a/pypy/doc/config/translation.backendopt.mallocs.txt b/pypy/doc/config/translation.backendopt.mallocs.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.mallocs.txt
+++ /dev/null
@@ -1,29 +0,0 @@
-This optimization enables "malloc removal", which "explodes"
-allocations of structures which do not escape from the function they
-are allocated in into one or more additional local variables.
-
-An example.  Consider this rather unlikely seeming code::
-
-    class C:
-        pass
-    def f(y):
-        c = C()
-        c.x = y
-        return c.x
-
-Malloc removal will spot that the ``C`` object can never leave ``f``
-and replace the above with code like this::
-
-    def f(y):
-        _c__x = y
-        return _c__x
-
-It is rare for code to be directly written in a way that allows this
-optimization to be useful, but inlining often results in opportunities
-for its use (and indeed, this is one of the main reasons PyPy does its
-own inlining rather than relying on the C compilers).
-
-For much more information about this and other optimizations you can
-read section 4.1 of the technical report on "Massive Parallelism and
-Translation Aspects" which you can find on the `Technical reports page
-<../index-report.html>`__.

diff --git a/pypy/doc/config/objspace.std.withmapdict.txt b/pypy/doc/config/objspace.std.withmapdict.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withmapdict.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Enable the new version of "sharing dictionaries".
-
-See the section in `Standard Interpreter Optimizations`_ for more details.
-
-.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#sharing-dicts

diff --git a/pypy/doc/config/objspace.std.withstrjoin.txt b/pypy/doc/config/objspace.std.withstrjoin.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.std.withstrjoin.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-Enable "string join" objects.
-
-See the page about `Standard Interpreter Optimizations`_ for more details.
-
-.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-join-objects
-
-

diff --git a/pypy/doc/config/objspace.usemodules.zlib.txt b/pypy/doc/config/objspace.usemodules.zlib.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.zlib.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'zlib' module. 
-This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/objspace.usemodules.posix.txt b/pypy/doc/config/objspace.usemodules.posix.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.posix.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Use the essential 'posix' module.
-This module is essential, included by default and cannot be removed (even when
-specified explicitly, the option gets overridden later).

diff --git a/pypy/doc/config/objspace.usemodules.symbol.txt b/pypy/doc/config/objspace.usemodules.symbol.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.symbol.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the 'symbol' module. 
-This module is expected to be working and is included by default.

diff --git a/pypy/doc/config/translation.backendopt.constfold.txt b/pypy/doc/config/translation.backendopt.constfold.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.backendopt.constfold.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Do constant folding of operations and constant propagation on flowgraphs.

diff --git a/pypy/jit/metainterp/optimizeopt/rewrite.py b/pypy/jit/metainterp/optimizeopt/rewrite.py
--- a/pypy/jit/metainterp/optimizeopt/rewrite.py
+++ b/pypy/jit/metainterp/optimizeopt/rewrite.py
@@ -155,16 +155,24 @@
             self.emit_operation(op)
 
     def optimize_CALL_PURE(self, op):
+        arg_consts = []
         for i in range(op.numargs()):
             arg = op.getarg(i)
-            if self.get_constant_box(arg) is None:
+            const = self.get_constant_box(arg)
+            if const is None:
                 break
+            arg_consts.append(const)
         else:
-            # all constant arguments: constant-fold away
-            self.make_constant(op.result, op.getarg(0))
-            return
+            # all constant arguments: check if we already know the reslut
+            try:
+                result = self.optimizer.call_pure_results[arg_consts]
+            except KeyError:
+                pass
+            else:
+                self.make_constant(op.result, result)
+                return
         # replace CALL_PURE with just CALL
-        args = op.getarglist()[1:]
+        args = op.getarglist()
         self.emit_operation(ResOperation(rop.CALL, args, op.result,
                                          op.getdescr()))
 

diff --git a/py/_plugin/__init__.py b/py/_plugin/__init__.py
deleted file mode 100644
--- a/py/_plugin/__init__.py
+++ /dev/null
@@ -1,1 +0,0 @@
-#

diff --git a/py/_test/conftesthandle.py b/py/_test/conftesthandle.py
deleted file mode 100644
--- a/py/_test/conftesthandle.py
+++ /dev/null
@@ -1,113 +0,0 @@
-import py
-
-class Conftest(object):
-    """ the single place for accessing values and interacting 
-        towards conftest modules from py.test objects. 
-
-        (deprecated)
-        Note that triggering Conftest instances to import 
-        conftest.py files may result in added cmdline options. 
-    """ 
-    def __init__(self, onimport=None, confcutdir=None):
-        self._path2confmods = {}
-        self._onimport = onimport
-        self._conftestpath2mod = {}
-        self._confcutdir = confcutdir
-
-    def setinitial(self, args):
-        """ try to find a first anchor path for looking up global values
-            from conftests. This function is usually called _before_  
-            argument parsing.  conftest files may add command line options
-            and we thus have no completely safe way of determining
-            which parts of the arguments are actually related to options
-            and which are file system paths.  We just try here to get 
-            bootstrapped ... 
-        """
-        current = py.path.local()
-        opt = '--confcutdir'
-        for i in range(len(args)):
-            opt1 = str(args[i])
-            if opt1.startswith(opt):
-                if opt1 == opt:
-                    if len(args) > i:
-                        p = current.join(args[i+1], abs=True)
-                elif opt1.startswith(opt + "="):
-                    p = current.join(opt1[len(opt)+1:], abs=1)
-                self._confcutdir = p 
-                break
-        for arg in args + [current]:
-            anchor = current.join(arg, abs=1)
-            if anchor.check(): # we found some file object 
-                self._path2confmods[None] = self.getconftestmodules(anchor)
-                # let's also consider test* dirs 
-                if anchor.check(dir=1):
-                    for x in anchor.listdir(lambda x: x.check(dir=1, dotfile=0)):
-                        self.getconftestmodules(x)
-                break
-        else:
-            assert 0, "no root of filesystem?"
-
-    def getconftestmodules(self, path):
-        """ return a list of imported conftest modules for the given path.  """ 
-        try:
-            clist = self._path2confmods[path]
-        except KeyError:
-            if path is None:
-                raise ValueError("missing default confest.")
-            dp = path.dirpath()
-            if dp == path:
-                clist = []
-            else:
-                cutdir = self._confcutdir
-                clist = self.getconftestmodules(dp)
-                if cutdir and path != cutdir and not path.relto(cutdir):
-                    pass
-                else:
-                    conftestpath = path.join("conftest.py")
-                    if conftestpath.check(file=1):
-                        clist.append(self.importconftest(conftestpath))
-            self._path2confmods[path] = clist
-        # be defensive: avoid changes from caller side to
-        # affect us by always returning a copy of the actual list 
-        return clist[:]
-
-    def rget(self, name, path=None):
-        mod, value = self.rget_with_confmod(name, path)
-        return value
-
-    def rget_with_confmod(self, name, path=None):
-        modules = self.getconftestmodules(path)
-        modules.reverse()
-        for mod in modules:
-            try:
-                return mod, getattr(mod, name)
-            except AttributeError:
-                continue
-        raise KeyError(name)
-
-    def importconftest(self, conftestpath):
-        assert conftestpath.check(), conftestpath
-        try:
-            return self._conftestpath2mod[conftestpath]
-        except KeyError:
-            if not conftestpath.dirpath('__init__.py').check(file=1): 
-                # HACK: we don't want any "globally" imported conftest.py, 
-                #       prone to conflicts and subtle problems 
-                modname = str(conftestpath).replace('.', conftestpath.sep)
-                mod = conftestpath.pyimport(modname=modname)
-            else:
-                mod = conftestpath.pyimport()
-            self._conftestpath2mod[conftestpath] = mod
-            dirpath = conftestpath.dirpath()
-            if dirpath in self._path2confmods:
-                for path, mods in self._path2confmods.items():
-                    if path and path.relto(dirpath) or path == dirpath:
-                        assert mod not in mods
-                        mods.append(mod)
-            self._postimport(mod)
-            return mod
-
-    def _postimport(self, mod):
-        if self._onimport:
-            self._onimport(mod)
-        return mod

diff --git a/pypy/doc/config/translation.debug.txt b/pypy/doc/config/translation.debug.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.debug.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Record extra debugging information during annotation. This leads to slightly
-less obscure error messages.

diff --git a/pypy/doc/config/objspace.usemodules.thread.txt b/pypy/doc/config/objspace.usemodules.thread.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.thread.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Use the 'thread' module. 

diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
--- a/pypy/config/pypyoption.py
+++ b/pypy/config/pypyoption.py
@@ -32,7 +32,8 @@
      "crypt", "signal", "_rawffi", "termios", "zlib", "bz2",
      "struct", "_hashlib", "_md5", "_sha", "_minimal_curses", "cStringIO",
      "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array",
-     "_bisect", "binascii", "_multiprocessing", '_warnings']
+     "_bisect", "binascii", "_multiprocessing", '_warnings',
+     "_collections"]
 ))
 
 translation_modules = default_modules.copy()
@@ -79,8 +80,7 @@
     "_rawffi": [("objspace.usemodules.struct", True)],
     "cpyext": [("translation.secondaryentrypoints", "cpyext"),
                ("translation.shared", sys.platform == "win32")],
-    "_ffi": [("translation.jit_ffi", True)],
-    }
+}
 
 module_import_dependencies = {
     # no _rawffi if importing pypy.rlib.clibffi raises ImportError
@@ -351,7 +351,7 @@
         config.objspace.std.suggest(builtinshortcut=True)
         config.objspace.std.suggest(optimized_list_getitem=True)
         config.objspace.std.suggest(getattributeshortcut=True)
-        config.objspace.std.suggest(newshortcut=True)        
+        config.objspace.std.suggest(newshortcut=True)
         if not IS_64_BITS:
             config.objspace.std.suggest(withsmalllong=True)
 

diff --git a/pypy/doc/config/translation.make_jobs.txt b/pypy/doc/config/translation.make_jobs.txt
deleted file mode 100644
--- a/pypy/doc/config/translation.make_jobs.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-Specify number of make jobs for make command.

diff --git a/pypy/doc/config/objspace.usemodules.rctime.txt b/pypy/doc/config/objspace.usemodules.rctime.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules.rctime.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-Use the 'rctime' module. 
-
-'rctime' is our `rffi`_ based implementation of the builtin 'time' module.
-It supersedes the less complete :config:`objspace.usemodules.time`,
-at least for C-like targets (the C and LLVM backends).
-
-.. _`rffi`: ../rffi.html

diff --git a/pypy/doc/config/objspace.usemodules._sre.txt b/pypy/doc/config/objspace.usemodules._sre.txt
deleted file mode 100644
--- a/pypy/doc/config/objspace.usemodules._sre.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Use the '_sre' module. 
-This module is expected to be working and is included by default.


More information about the Pypy-commit mailing list