[pypy-commit] pypy py3k: merge default (phew)

pjenvey noreply at buildbot.pypy.org
Tue Mar 26 02:09:26 CET 2013


Author: Philip Jenvey <pjenvey at underboss.org>
Branch: py3k
Changeset: r62779:c769fa58fbef
Date: 2013-03-25 18:08 -0700
http://bitbucket.org/pypy/pypy/changeset/c769fa58fbef/

Log:	merge default (phew)

diff too long, truncating to 2000 out of 23592 lines

diff --git a/lib-python/2/collections.py b/lib-python/2/collections.py
--- a/lib-python/2/collections.py
+++ b/lib-python/2/collections.py
@@ -12,6 +12,11 @@
 import heapq as _heapq
 from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
 from itertools import imap as _imap
+try:
+    from __pypy__ import newdict
+except ImportError:
+    assert '__pypy__' not in _sys.builtin_module_names
+    newdict = lambda _ : {}
 
 try:
     from thread import get_ident as _get_ident
@@ -326,8 +331,11 @@
 
     # Execute the template string in a temporary namespace and
     # support tracing utilities by setting a value for frame.f_globals['__name__']
-    namespace = dict(__name__='namedtuple_%s' % typename,
-                     OrderedDict=OrderedDict, _property=property, _tuple=tuple)
+    namespace = newdict('module')
+    namespace['OrderedDict'] = OrderedDict
+    namespace['_property'] = property
+    namespace['_tuple'] = tuple
+    namespace['__name__'] = 'namedtuple_%s' % typename
     try:
         exec template in namespace
     except SyntaxError, e:
diff --git a/lib-python/2/pickle.py b/lib-python/2/pickle.py
--- a/lib-python/2/pickle.py
+++ b/lib-python/2/pickle.py
@@ -34,8 +34,6 @@
 import struct
 import re
 
-from __pypy__.builders import StringBuilder
-
 __all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
            "Unpickler", "dump", "dumps", "load", "loads"]
 
@@ -1411,24 +1409,11 @@
 except ImportError:
     from StringIO import StringIO
 
-
-class StringBuilderFile(object):
-    ''' pickle uses only file.write - provide this method, 
-    use StringBuilder for speed
-    '''
-    def __init__(self):
-        self.builder = StringBuilder()
-        self.write = self.builder.append
-
-    def getvalue(self):
-        return self.builder.build()
-
-
 def dump(obj, file, protocol=None):
     Pickler(file, protocol).dump(obj)
 
 def dumps(obj, protocol=None):
-    file = StringBuilderFile()
+    file = StringIO()
     Pickler(file, protocol).dump(obj)
     return file.getvalue()
 
diff --git a/lib-python/2/sre_parse.py b/lib-python/2/sre_parse.py
--- a/lib-python/2/sre_parse.py
+++ b/lib-python/2/sre_parse.py
@@ -19,8 +19,8 @@
 try:
     from __pypy__ import newdict
 except ImportError:
-    def newdict(tp):
-        return {}
+    assert '__pypy__' not in sys.builtin_module_names
+    newdict = lambda _ : {}
 
 SPECIAL_CHARS = ".\\[{()*+?^$|"
 REPEAT_CHARS = "*+?{"
diff --git a/lib_pypy/tputil.py b/lib_pypy/tputil.py
--- a/lib_pypy/tputil.py
+++ b/lib_pypy/tputil.py
@@ -1,69 +1,69 @@
 """
 
-application level support module for transparent proxies. 
+application level support module for transparent proxies.
 
 """
-from __pypy__ import tproxy 
+from __pypy__ import tproxy
 from types import MethodType
 
 _dummy = object()
 origtype = type
 
-def make_proxy(controller, type=_dummy, obj=_dummy): 
-    """ return a tranparent proxy controlled by the given 
-        'controller' callable.  The proxy will appear 
-        as a completely regular instance of the given 
-        type but all operations on it are send to the 
-        specified controller - which receives on 
-        ProxyOperation instance on each such call.  
-        A non-specified type will default to type(obj) 
-        if obj is specified. 
+def make_proxy(controller, type=_dummy, obj=_dummy):
+    """ return a tranparent proxy controlled by the given
+        'controller' callable.  The proxy will appear
+        as a completely regular instance of the given
+        type but all operations on it are send to the
+        specified controller - which receives on
+        ProxyOperation instance on each such call.
+        A non-specified type will default to type(obj)
+        if obj is specified.
     """
-    if type is _dummy: 
-        if obj is _dummy: 
-            raise TypeError("you must specify a type or an instance obj of it") 
-        type = origtype(obj) 
+    if type is _dummy:
+        if obj is _dummy:
+            raise TypeError("you must specify a type or an instance obj of it")
+        type = origtype(obj)
     def perform(opname, *args, **kwargs):
         operation = ProxyOperation(tp, obj, opname, args, kwargs)
-        return controller(operation) 
-    tp = tproxy(type, perform) 
-    return tp 
+        return controller(operation)
+    tp = tproxy(type, perform)
+    return tp
 
 class ProxyOperation(object):
     def __init__(self, proxyobj, obj, opname, args, kwargs):
         self.proxyobj = proxyobj
-        self.opname = opname 
+        self.opname = opname
         self.args = args
         self.kwargs = kwargs
-        if obj is not _dummy: 
-            self.obj = obj 
+        if obj is not _dummy:
+            self.obj = obj
 
     def delegate(self):
-        """ return result from delegating this operation to the 
-            underyling self.obj - which must exist and is usually 
-            provided through the initial make_proxy(..., obj=...) 
-            creation. 
-        """ 
+        """ return result from delegating this operation to the
+            underyling self.obj - which must exist and is usually
+            provided through the initial make_proxy(..., obj=...)
+            creation.
+        """
         try:
             obj = getattr(self, 'obj')
-        except AttributeError: 
+        except AttributeError:
             raise TypeError("proxy does not have an underlying 'obj', "
                             "cannot delegate")
-        objattr = getattr(obj, self.opname) 
-        res = objattr(*self.args, **self.kwargs) 
-        if self.opname == "__getattribute__": 
+        objattr = getattr(obj, self.opname)
+        res = objattr(*self.args, **self.kwargs)
+        if self.opname == "__getattribute__":
             if (isinstance(res, MethodType) and
                 res.__self__ is self.instance):
                 res = MethodType(res.__func__, self.proxyobj, res.__self__.__class__)
-        if res is self.obj: 
+        if res is self.obj:
             res = self.proxyobj
-        return res 
+        return res
 
     def __repr__(self):
         args = ", ".join([repr(x) for x in self.args])
-        args = "<0x%x>, " % id(self.proxyobj) + args 
+        args = "<0x%x>, " % id(self.proxyobj) + args
         if self.kwargs:
-            args += ", ".join(["%s=%r" % item 
+            args += ", ".join(["%s=%r" % item
                                   for item in self.kwargs.items()])
         return "<ProxyOperation %s.%s(%s)>" %(
                     type(self.proxyobj).__name__, self.opname, args)
diff --git a/pypy/bin/checkmodule.py b/pypy/bin/checkmodule.py
--- a/pypy/bin/checkmodule.py
+++ b/pypy/bin/checkmodule.py
@@ -33,7 +33,7 @@
         modname = os.path.basename(modname)
     try:
         checkmodule(modname)
-    except Exception, e:
+    except Exception:
         import traceback, pdb
         traceback.print_exc()
         pdb.post_mortem(sys.exc_info()[2])
diff --git a/pypy/bin/dotviewer.py b/pypy/bin/dotviewer.py
--- a/pypy/bin/dotviewer.py
+++ b/pypy/bin/dotviewer.py
@@ -4,6 +4,7 @@
 Run with no arguments for help.
 """
 
+import os
 import sys
 sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..')))
 from dotviewer.dotviewer import main
diff --git a/pypy/bin/pyinteractive.py b/pypy/bin/pyinteractive.py
--- a/pypy/bin/pyinteractive.py
+++ b/pypy/bin/pyinteractive.py
@@ -6,14 +6,13 @@
 
 """
 
-import os, sys
+import os
+import sys
 import time
 
 sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
 
-import pypy
 from pypy.tool import option
-from optparse import make_option
 from pypy.interpreter import main, interactive, error, gateway
 from rpython.config.config import OptionDescription, BoolOption, StrOption
 from rpython.config.config import Config, to_optparse
diff --git a/pypy/bin/reportstaticdata.py b/pypy/bin/reportstaticdata.py
--- a/pypy/bin/reportstaticdata.py
+++ b/pypy/bin/reportstaticdata.py
@@ -61,7 +61,6 @@
 
 
 def main():
-    import sys
     try:
         kwds = parse_options(sys.argv[1:])
     except AssertionError:
diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
--- a/pypy/config/pypyoption.py
+++ b/pypy/config/pypyoption.py
@@ -188,11 +188,6 @@
         BoolOption("withtproxy", "support transparent proxies",
                    default=True),
 
-        BoolOption("withsmallint", "use tagged integers",
-                   default=False,
-                   requires=[("objspace.std.withprebuiltint", False),
-                             ("translation.taggedpointers", True)]),
-
         BoolOption("withprebuiltint", "prebuild commonly used int objects",
                    default=False),
 
@@ -203,9 +198,7 @@
                   default=100, cmdline="--prebuiltintto"),
 
         BoolOption("withsmalllong", "use a version of 'long' in a C long long",
-                   default=False,
-                   requires=[("objspace.std.withsmallint", False)]),
-                             #  ^^^ because of missing delegate_xx2yy
+                   default=False),
 
         BoolOption("withstrbuf", "use strings optimized for addition (ver 2)",
                    default=False),
diff --git a/pypy/config/test/test_makerestdoc.py b/pypy/config/test/test_makerestdoc.py
--- a/pypy/config/test/test_makerestdoc.py
+++ b/pypy/config/test/test_makerestdoc.py
@@ -1,6 +1,7 @@
+import py
+
 from rpython.config.config import *
 from pypy.config.makerestdoc import make_cmdline_overview
-
 from pypy.tool.rest.rest import process as restcheck
 
 tempdir = py.test.ensuretemp('config')
@@ -19,7 +20,7 @@
 def generate_html(descr):
     config = Config(descr)
     txt = descr.make_rest_doc().text()
-    
+
     result = {"": txt}
     for path in config.getpaths(include_groups=True):
         subconf, step = config._cfgimpl_get_home_by_path(path)
diff --git a/pypy/doc/discussion/improve-rpython.rst b/pypy/doc/discussion/improve-rpython.rst
--- a/pypy/doc/discussion/improve-rpython.rst
+++ b/pypy/doc/discussion/improve-rpython.rst
@@ -8,7 +8,7 @@
   implement a pypy module. A typical rpython file is likely to contain many
   `import` statements::
 
-    from pypy.interpreter.baseobjspace import Wrappable
+    from pypy.interpreter.baseobjspace import W_Root
     from pypy.interpreter.gateway import ObjSpace, W_Root
     from pypy.interpreter.argument import Arguments
     from pypy.interpreter.typedef import TypeDef, GetSetProperty
@@ -19,7 +19,7 @@
 
 - A more direct declarative way to write Typedef::
 
-    class W_Socket(Wrappable):
+    class W_Socket(W_Root):
         _typedef_name_ = 'socket'
         _typedef_base_ = W_EventualBaseClass
 
diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst
--- a/pypy/doc/getting-started-dev.rst
+++ b/pypy/doc/getting-started-dev.rst
@@ -107,8 +107,7 @@
 
 There is a small-to-medium demo showing the translator and the annotator::
 
-    cd demo
-    ../rpython/translator/goal/translate.py --view --annotate bpnn.py
+    python bin/rpython --view --annotate translator/goal/bpnn.py
 
 This causes ``bpnn.py`` to display itself as a call graph and class
 hierarchy.  Clicking on functions shows the flow graph of the particular
@@ -119,7 +118,7 @@
 To turn this example to C code (compiled to the executable ``bpnn-c``),
 type simply::
 
-    ../rpython/translator/goal/translate.py bpnn.py
+    python bin/rpython translator/goal/bpnn.py
 
 
 Translating Full Programs
@@ -129,8 +128,7 @@
 ``rpython/translator/goal``. Examples for this are a slightly changed version of
 Pystone::
 
-    cd rpython/translator/goal
-    python translate.py targetrpystonedalone
+    python bin/rpython translator/goal/targetrpystonedalone
 
 This will produce the executable "targetrpystonedalone-c".
 
@@ -138,6 +136,17 @@
 interpreter`_. There is also an FAQ about how to set up this process for `your
 own interpreters`_.
 
+There are several environment variables you can find useful while playing with the RPython:
+
+``PYPY_USESSION_DIR``
+    RPython uses temporary session directories to store files that are generated during the 
+    translation process(e.g., translated C files). ``PYPY_USESSION_DIR`` serves as a base directory for these session
+    dirs. The default value for this variable is the system's temporary dir.
+
+``PYPY_USESSION_KEEP``
+    By default RPython keeps only the last ``PYPY_USESSION_KEEP`` (defaults to 3) session dirs inside ``PYPY_USESSION_DIR``. 
+    Increase this value if you want to preserve C files longer (useful when producing lots of lldebug builds).
+
 .. _`your own interpreters`: faq.html#how-do-i-compile-my-own-interpreters
 
 .. _`start reading sources`: 
diff --git a/pypy/doc/interpreter.rst b/pypy/doc/interpreter.rst
--- a/pypy/doc/interpreter.rst
+++ b/pypy/doc/interpreter.rst
@@ -1,5 +1,5 @@
 ===================================
-Bytecode Interpreter 
+Bytecode Interpreter
 ===================================
 
 .. contents::
@@ -9,8 +9,8 @@
 Introduction and Overview
 ===============================
 
-This document describes the implementation of PyPy's 
-Bytecode Interpreter and related Virtual Machine functionalities. 
+This document describes the implementation of PyPy's
+Bytecode Interpreter and related Virtual Machine functionalities.
 
 PyPy's bytecode interpreter has a structure reminiscent of CPython's
 Virtual Machine: It processes code objects parsed and compiled from
@@ -32,14 +32,14 @@
 translated with the rest of PyPy.
 
 Code objects contain
-condensed information about their respective functions, class and 
+condensed information about their respective functions, class and
 module body source codes.  Interpreting such code objects means
 instantiating and initializing a `Frame class`_ and then
-calling its ``frame.eval()`` method.  This main entry point 
-initialize appropriate namespaces and then interprets each 
+calling its ``frame.eval()`` method.  This main entry point
+initialize appropriate namespaces and then interprets each
 bytecode instruction.  Python's standard library contains
-the `lib-python/2.7/dis.py`_ module which allows to inspection 
-of the virtual machine's bytecode instructions:: 
+the `lib-python/2.7/dis.py`_ module which allows to inspection
+of the virtual machine's bytecode instructions::
 
     >>> import dis
     >>> def f(x):
@@ -47,103 +47,103 @@
     >>> dis.dis(f)
     2         0 LOAD_FAST                0 (x)
               3 LOAD_CONST               1 (1)
-              6 BINARY_ADD          
-              7 RETURN_VALUE        
+              6 BINARY_ADD
+              7 RETURN_VALUE
 
 CPython and PyPy are stack-based virtual machines, i.e.
 they don't have registers but instead push object to and pull objects
 from a stack.  The bytecode interpreter is only responsible
 for implementing control flow and pushing and pulling black
-box objects to and from this value stack.  The bytecode interpreter 
+box objects to and from this value stack.  The bytecode interpreter
 does not know how to perform operations on those black box
 (`wrapped`_) objects for which it delegates to the `object
 space`_.  In order to implement a conditional branch in a program's
 execution, however, it needs to gain minimal knowledge about a
 wrapped object.  Thus, each object space has to offer a
 ``is_true(w_obj)`` operation which returns an
-interpreter-level boolean value.  
+interpreter-level boolean value.
 
 For the understanding of the interpreter's inner workings it
 is crucial to recognize the concepts of `interpreter-level and
 application-level`_ code.  In short, interpreter-level is executed
-directly on the machine and invoking application-level functions 
-leads to an bytecode interpretation indirection. However, 
+directly on the machine and invoking application-level functions
+leads to an bytecode interpretation indirection. However,
 special care must be taken regarding exceptions because
-application level exceptions are wrapped into ``OperationErrors`` 
-which are thus distinguished from plain interpreter-level exceptions. 
+application level exceptions are wrapped into ``OperationErrors``
+which are thus distinguished from plain interpreter-level exceptions.
 See `application level exceptions`_ for some more information
-on ``OperationErrors``. 
+on ``OperationErrors``.
 
 The interpreter implementation offers mechanisms to allow a
-caller to be unaware of whether a particular function invocation 
+caller to be unaware of whether a particular function invocation
 leads to bytecode interpretation or is executed directly at
 interpreter-level.  The two basic kinds of `Gateway classes`_
 expose either an interpreter-level function to
 application-level execution (``interp2app``) or allow
 transparent invocation of application-level helpers
-(``app2interp``) at interpreter-level. 
+(``app2interp``) at interpreter-level.
 
-Another task of the bytecode interpreter is to care for exposing its 
-basic code, frame, module and function objects to application-level 
-code.  Such runtime introspection and modification abilities are 
-implemented via `interpreter descriptors`_ (also see Raymond Hettingers 
-`how-to guide for descriptors`_ in Python, PyPy uses this model extensively). 
+Another task of the bytecode interpreter is to care for exposing its
+basic code, frame, module and function objects to application-level
+code.  Such runtime introspection and modification abilities are
+implemented via `interpreter descriptors`_ (also see Raymond Hettingers
+`how-to guide for descriptors`_ in Python, PyPy uses this model extensively).
 
-A significant complexity lies in `function argument parsing`_.  Python as a 
-language offers flexible ways of providing and receiving arguments 
-for a particular function invocation.  Not only does it take special care 
+A significant complexity lies in `function argument parsing`_.  Python as a
+language offers flexible ways of providing and receiving arguments
+for a particular function invocation.  Not only does it take special care
 to get this right, it also presents difficulties for the `annotation
 pass`_ which performs a whole-program analysis on the
 bytecode interpreter, argument parsing and gatewaying code
 in order to infer the types of all values flowing across function
-calls. 
+calls.
 
 It is for this reason that PyPy resorts to generate
 specialized frame classes and functions at `initialization
-time`_ in order to let the annotator only see rather static 
-program flows with homogeneous name-value assignments on 
-function invocations. 
+time`_ in order to let the annotator only see rather static
+program flows with homogeneous name-value assignments on
+function invocations.
 
 .. _`how-to guide for descriptors`: http://users.rcn.com/python/download/Descriptor.htm
 .. _`annotation pass`: translation.html#the-annotation-pass
 .. _`initialization time`: translation.html#initialization-time
-.. _`interpreter-level and application-level`: coding-guide.html#interpreter-level 
+.. _`interpreter-level and application-level`: coding-guide.html#interpreter-level
 .. _`wrapped`: coding-guide.html#wrapping-rules
 .. _`object space`: objspace.html
 .. _`application level exceptions`: coding-guide.html#applevel-exceptions
 .. _`here`: coding-guide.html#modules
 
 
-Bytecode Interpreter Implementation Classes  
+Bytecode Interpreter Implementation Classes
 ================================================
 
-.. _`Frame class`: 
-.. _`Frame`: 
+.. _`Frame class`:
+.. _`Frame`:
 
 Frame classes
 -----------------
 
-The concept of Frames is pervasive in executing programs and 
+The concept of Frames is pervasive in executing programs and
 on virtual machines in particular. They are sometimes called
 *execution frame* because they hold crucial information
 regarding the execution of a Code_ object, which in turn is
 often directly related to a Python `Function`_.  Frame
-instances hold the following state: 
+instances hold the following state:
 
-- the local scope holding name-value bindings, usually implemented 
+- the local scope holding name-value bindings, usually implemented
   via a "fast scope" which is an array of wrapped objects
 
 - a blockstack containing (nested) information regarding the
-  control flow of a function (such as ``while`` and ``try`` constructs) 
+  control flow of a function (such as ``while`` and ``try`` constructs)
 
 - a value stack where bytecode interpretation pulls object
   from and puts results on.
 
 - a reference to the *globals* dictionary, containing
-  module-level name-value bindings 
+  module-level name-value bindings
 
-- debugging information from which a current line-number and 
-  file location can be constructed for tracebacks 
+- debugging information from which a current line-number and
+  file location can be constructed for tracebacks
 
 Moreover the Frame class itself has a number of methods which implement
 the actual bytecodes found in a code object.  The methods of the ``PyFrame``
@@ -156,104 +156,104 @@
 - nested scope support is added to the ``PyFrame`` class in
   `pypy/interpreter/nestedscope.py`_.
 
-.. _Code: 
+.. _Code:
 
-Code Class 
------------- 
+Code Class
+------------
 
-PyPy's code objects contain the same information found in CPython's code objects. 
-They differ from Function_ objects in that they are only immutable representations 
+PyPy's code objects contain the same information found in CPython's code objects.
+They differ from Function_ objects in that they are only immutable representations
 of source code and don't contain execution state or references to the execution
-environment found in `Frames`.  Frames and Functions have references 
+environment found in `Frames`.  Frames and Functions have references
 to a code object. Here is a list of Code attributes:
 
-* ``co_flags`` flags if this code object has nested scopes/generators 
+* ``co_flags`` flags if this code object has nested scopes/generators
 * ``co_stacksize`` the maximum depth the stack can reach while executing the code
-* ``co_code`` the actual bytecode string 
- 
-* ``co_argcount`` number of arguments this code object expects 
+* ``co_code`` the actual bytecode string
+
+* ``co_argcount`` number of arguments this code object expects
 * ``co_varnames`` a tuple of all argument names pass to this code object
-* ``co_nlocals`` number of local variables 
+* ``co_nlocals`` number of local variables
 * ``co_names`` a tuple of all names used in the code object
-* ``co_consts`` a tuple of prebuilt constant objects ("literals") used in the code object 
-* ``co_cellvars`` a tuple of Cells containing values for access from nested scopes 
-* ``co_freevars`` a tuple of Cell names from "above" scopes 
- 
-* ``co_filename`` source file this code object was compiled from 
-* ``co_firstlineno`` the first linenumber of the code object in its source file 
-* ``co_name`` name of the code object (often the function name) 
-* ``co_lnotab`` a helper table to compute the line-numbers corresponding to bytecodes 
+* ``co_consts`` a tuple of prebuilt constant objects ("literals") used in the code object
+* ``co_cellvars`` a tuple of Cells containing values for access from nested scopes
+* ``co_freevars`` a tuple of Cell names from "above" scopes
+
+* ``co_filename`` source file this code object was compiled from
+* ``co_firstlineno`` the first linenumber of the code object in its source file
+* ``co_name`` name of the code object (often the function name)
+* ``co_lnotab`` a helper table to compute the line-numbers corresponding to bytecodes
 
 In PyPy, code objects also have the responsibility of creating their Frame_ objects
 via the `'create_frame()`` method.  With proper parser and compiler support this would
 allow to create custom Frame objects extending the execution of functions
 in various ways.  The several Frame_ classes already utilize this flexibility
-in order to implement Generators and Nested Scopes. 
+in order to implement Generators and Nested Scopes.
 
-.. _Function: 
+.. _Function:
 
 Function and Method classes
 ----------------------------
 
-The PyPy ``Function`` class (in `pypy/interpreter/function.py`_) 
-represents a Python function.  A ``Function`` carries the following 
-main attributes: 
+The PyPy ``Function`` class (in `pypy/interpreter/function.py`_)
+represents a Python function.  A ``Function`` carries the following
+main attributes:
 
-* ``func_doc`` the docstring (or None) 
-* ``func_name`` the name of the function 
-* ``func_code`` the Code_ object representing the function source code 
+* ``func_doc`` the docstring (or None)
+* ``func_name`` the name of the function
+* ``func_code`` the Code_ object representing the function source code
 * ``func_defaults`` default values for the function (built at function definition time)
-* ``func_dict`` dictionary for additional (user-defined) function attributes 
-* ``func_globals`` reference to the globals dictionary 
-* ``func_closure`` a tuple of Cell references  
+* ``func_dict`` dictionary for additional (user-defined) function attributes
+* ``func_globals`` reference to the globals dictionary
+* ``func_closure`` a tuple of Cell references
 
 ``Functions`` classes also provide a ``__get__`` descriptor which creates a Method
 object holding a binding to an instance or a class.  Finally, ``Functions``
-and ``Methods`` both offer a ``call_args()`` method which executes 
-the function given an `Arguments`_ class instance. 
+and ``Methods`` both offer a ``call_args()`` method which executes
+the function given an `Arguments`_ class instance.
 
-.. _Arguments: 
-.. _`function argument parsing`: 
+.. _Arguments:
+.. _`function argument parsing`:
 
-Arguments Class 
--------------------- 
+Arguments Class
+--------------------
 
 The Argument class (in `pypy/interpreter/argument.py`_) is
-responsible for parsing arguments passed to functions.  
+responsible for parsing arguments passed to functions.
 Python has rather complex argument-passing concepts:
 
-- positional arguments 
+- positional arguments
 
-- keyword arguments specified by name 
+- keyword arguments specified by name
 
 - default values for positional arguments, defined at function
-  definition time 
+  definition time
 
 - "star args" allowing a function to accept remaining
-  positional arguments 
+  positional arguments
 
-- "star keyword args" allow a function to accept additional 
-  arbitrary name-value bindings 
+- "star keyword args" allow a function to accept additional
+  arbitrary name-value bindings
 
-Moreover, a Function_ object can get bound to a class or instance 
+Moreover, a Function_ object can get bound to a class or instance
 in which case the first argument to the underlying function becomes
-the bound object.  The ``Arguments`` provides means to allow all 
-this argument parsing and also cares for error reporting. 
+the bound object.  The ``Arguments`` provides means to allow all
+this argument parsing and also cares for error reporting.
 
 
-.. _`Module`: 
+.. _`Module`:
 
-Module Class 
-------------------- 
+Module Class
+-------------------
 
-A ``Module`` instance represents execution state usually constructed 
+A ``Module`` instance represents execution state usually constructed
 from executing the module's source file.  In addition to such a module's
-global ``__dict__`` dictionary it has the following application level 
-attributes: 
+global ``__dict__`` dictionary it has the following application level
+attributes:
 
 * ``__doc__`` the docstring of the module
-* ``__file__`` the source filename from which this module was instantiated 
-* ``__path__`` state used for relative imports 
+* ``__file__`` the source filename from which this module was instantiated
+* ``__path__`` state used for relative imports
 
 Apart from the basic Module used for importing
 application-level files there is a more refined
@@ -262,80 +262,80 @@
 level and at interpreter level.  See the ``__builtin__``
 module's `pypy/module/__builtin__/__init__.py`_ file for an
 example and the higher level `chapter on Modules in the coding
-guide`_. 
+guide`_.
 
 .. _`__builtin__ module`: https://bitbucket.org/pypy/pypy/src/tip/pypy/module/__builtin__/
-.. _`chapter on Modules in the coding guide`: coding-guide.html#modules 
+.. _`chapter on Modules in the coding guide`: coding-guide.html#modules
 
-.. _`Gateway classes`: 
+.. _`Gateway classes`:
 
-Gateway classes 
----------------------- 
+Gateway classes
+----------------------
 
 A unique PyPy property is the ability to easily cross the barrier
 between interpreted and machine-level code (often referred to as
-the difference between `interpreter-level and application-level`_). 
-Be aware that the according code (in `pypy/interpreter/gateway.py`_) 
+the difference between `interpreter-level and application-level`_).
+Be aware that the according code (in `pypy/interpreter/gateway.py`_)
 for crossing the barrier in both directions is somewhat
 involved, mostly due to the fact that the type-inferring
 annotator needs to keep track of the types of objects flowing
-across those barriers. 
+across those barriers.
 
 .. _typedefs:
 
 Making interpreter-level functions available at application-level
 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 
-In order to make an interpreter-level function available at 
-application level, one invokes ``pypy.interpreter.gateway.interp2app(func)``. 
-Such a function usually takes a ``space`` argument and any number 
+In order to make an interpreter-level function available at
+application level, one invokes ``pypy.interpreter.gateway.interp2app(func)``.
+Such a function usually takes a ``space`` argument and any number
 of positional arguments. Additionally, such functions can define
 an ``unwrap_spec`` telling the ``interp2app`` logic how
 application-level provided arguments should be unwrapped
-before the actual interpreter-level function is invoked. 
-For example, `interpreter descriptors`_ such as the ``Module.__new__`` 
-method for allocating and constructing a Module instance are 
-defined with such code:: 
+before the actual interpreter-level function is invoked.
+For example, `interpreter descriptors`_ such as the ``Module.__new__``
+method for allocating and constructing a Module instance are
+defined with such code::
 
     Module.typedef = TypeDef("module",
         __new__ = interp2app(Module.descr_module__new__.im_func,
                              unwrap_spec=[ObjSpace, W_Root, Arguments]),
         __init__ = interp2app(Module.descr_module__init__),
                         # module dictionaries are readonly attributes
-        __dict__ = GetSetProperty(descr_get_dict, cls=Module), 
-        __doc__ = 'module(name[, doc])\n\nCreate a module object...' 
+        __dict__ = GetSetProperty(descr_get_dict, cls=Module),
+        __doc__ = 'module(name[, doc])\n\nCreate a module object...'
         )
 
-The actual ``Module.descr_module__new__`` interpreter-level method 
-referenced from the ``__new__`` keyword argument above is defined 
-like this:: 
+The actual ``Module.descr_module__new__`` interpreter-level method
+referenced from the ``__new__`` keyword argument above is defined
+like this::
 
     def descr_module__new__(space, w_subtype, __args__):
         module = space.allocate_instance(Module, w_subtype)
         Module.__init__(module, space, None)
         return space.wrap(module)
 
-Summarizing, the ``interp2app`` mechanism takes care to route 
-an application level access or call to an internal interpreter-level 
+Summarizing, the ``interp2app`` mechanism takes care to route
+an application level access or call to an internal interpreter-level
 object appropriately to the descriptor, providing enough precision
-and hints to keep the type-inferring annotator happy. 
+and hints to keep the type-inferring annotator happy.
 
 
-Calling into application level code from interpreter-level 
+Calling into application level code from interpreter-level
 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 
-Application level code is `often preferable`_. Therefore, 
-we often like to invoke application level code from interpreter-level. 
+Application level code is `often preferable`_. Therefore,
+we often like to invoke application level code from interpreter-level.
 This is done via the Gateway's ``app2interp`` mechanism
-which we usually invoke at definition time in a module. 
-It generates a hook which looks like an interpreter-level 
-function accepting a space and an arbitrary number of arguments. 
-When calling a function at interpreter-level the caller side 
+which we usually invoke at definition time in a module.
+It generates a hook which looks like an interpreter-level
+function accepting a space and an arbitrary number of arguments.
+When calling a function at interpreter-level the caller side
 does usually not need to be aware if its invoked function
 is run through the PyPy interpreter or if it will directly
-execute on the machine (after translation). 
+execute on the machine (after translation).
 
-Here is an example showing how we implement the Metaclass 
+Here is an example showing how we implement the Metaclass
 finding algorithm of the Python language in PyPy::
 
     app = gateway.applevel(r'''
@@ -359,9 +359,9 @@
 
     find_metaclass  = app.interphook('find_metaclass')
 
-The ``find_metaclass`` interpreter-level hook is invoked 
+The ``find_metaclass`` interpreter-level hook is invoked
 with five arguments from the ``BUILD_CLASS`` opcode implementation
-in `pypy/interpreter/pyopcode.py`_:: 
+in `pypy/interpreter/pyopcode.py`_::
 
     def BUILD_CLASS(f):
         w_methodsdict = f.valuestack.pop()
@@ -374,32 +374,32 @@
                                            w_bases, w_methodsdict)
         f.valuestack.push(w_newclass)
 
-Note that at a later point we can rewrite the ``find_metaclass`` 
-implementation at interpreter-level and we would not have 
-to modify the calling side at all. 
+Note that at a later point we can rewrite the ``find_metaclass``
+implementation at interpreter-level and we would not have
+to modify the calling side at all.
 
 .. _`often preferable`: coding-guide.html#app-preferable
-.. _`interpreter descriptors`: 
+.. _`interpreter descriptors`:
 
-Introspection and Descriptors 
+Introspection and Descriptors
 ------------------------------
 
-Python traditionally has a very far-reaching introspection model 
+Python traditionally has a very far-reaching introspection model
 for bytecode interpreter related objects. In PyPy and in CPython read
-and write accesses to such objects are routed to descriptors. 
+and write accesses to such objects are routed to descriptors.
 Of course, in CPython those are implemented in ``C`` while in
-PyPy they are implemented in interpreter-level Python code. 
+PyPy they are implemented in interpreter-level Python code.
 
 All instances of a Function_, Code_, Frame_ or Module_ classes
-are also ``Wrappable`` instances which means they can be represented 
+are also ``W_Root`` instances which means they can be represented
 at application level.  These days, a PyPy object space needs to
 work with a basic descriptor lookup when it encounters
 accesses to an interpreter-level object:  an object space asks
-a wrapped object for its type via a ``getclass`` method and then 
-calls the type's ``lookup(name)`` function in order to receive a descriptor 
+a wrapped object for its type via a ``getclass`` method and then
+calls the type's ``lookup(name)`` function in order to receive a descriptor
 function.  Most of PyPy's internal object descriptors are defined at the
-end of `pypy/interpreter/typedef.py`_.  You can use these definitions 
-as a reference for the exact attributes of interpreter classes visible 
-at application level. 
+end of `pypy/interpreter/typedef.py`_.  You can use these definitions
+as a reference for the exact attributes of interpreter classes visible
+at application level.
 
 .. include:: _ref.txt
diff --git a/pypy/doc/jit/pyjitpl5.rst b/pypy/doc/jit/pyjitpl5.rst
--- a/pypy/doc/jit/pyjitpl5.rst
+++ b/pypy/doc/jit/pyjitpl5.rst
@@ -13,7 +13,7 @@
 implemented.  It's helpful to have an understanding of how the `RPython translation
 toolchain`_ works before digging into the sources.
 
-Almost all JIT specific code is found in pypy/jit subdirectories.  Translation
+Almost all JIT specific code is found in rpython/jit subdirectories.  Translation
 time code is in the codewriter directory.  The metainterp directory holds
 platform independent code including the the tracer and the optimizer.  Code in
 the backend directory is responsible for generating machine code.
@@ -175,7 +175,7 @@
 
 * `Tracing the Meta-Level: PyPy's Tracing JIT Compiler`__
 
-.. __: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit-final.pdf
+.. __: https://bitbucket.org/pypy/extradoc/src/tip/talk/icooolps2009/bolz-tracing-jit-final.pdf
 
 as well as the `blog posts with the JIT tag.`__
 
diff --git a/pypy/doc/objspace.rst b/pypy/doc/objspace.rst
--- a/pypy/doc/objspace.rst
+++ b/pypy/doc/objspace.rst
@@ -175,9 +175,9 @@
 ``wrap(x):``
   Returns a wrapped object that is a reference to the interpreter-level object
   x. This can be used either on simple immutable objects (integers,
-  strings...) to create a new wrapped object, or on instances of ``Wrappable``
+  strings...) to create a new wrapped object, or on instances of ``W_Root``
   to obtain an application-level-visible reference to them.  For example,
-  most classes of the bytecode interpreter subclass ``Wrappable`` and can
+  most classes of the bytecode interpreter subclass ``W_Root`` and can
   be directly exposed to app-level in this way - functions, frames, code
   objects, etc.
 
diff --git a/pypy/doc/test/test_whatsnew.py b/pypy/doc/test/test_whatsnew.py
--- a/pypy/doc/test/test_whatsnew.py
+++ b/pypy/doc/test/test_whatsnew.py
@@ -1,6 +1,6 @@
 import py
 import pypy
-from commands import getoutput
+from commands import getoutput, getstatusoutput
 ROOT = py.path.local(pypy.__file__).dirpath().dirpath()
 
 
@@ -20,6 +20,9 @@
     return startrev, branches
 
 def get_merged_branches(path, startrev, endrev):
+    if getstatusoutput('hg root')[0]:
+        py.test.skip('no Mercurial repo')
+
     # X = take all the merges which are descendants of startrev and are on default
     # revset = all the parents of X which are not on default
     # ===>
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -20,16 +20,22 @@
 
 .. branch: numpypy-longdouble
 Long double support for numpypy
+
 .. branch: numpypy-disable-longdouble
 Since r_longdouble support is missing, disable all longdouble and derivative
 dtypes using ENABLED_LONG_DOUBLE = False
+
 .. branch: numpypy-real-as-view
 Convert real, imag from ufuncs to views. This involves the beginning of
 view() functionality
+
 .. branch: indexing-by-array
 Adds indexing by scalar, adds int conversion from scalar and single element array,
 fixes compress, indexing by an array with a smaller shape and the indexed object.
 
+.. branch: str-dtype-improvement
+Allow concatenation of str and numeric arrays
+
 .. branch: signatures
 Improved RPython typing
 
@@ -48,6 +54,7 @@
 .. branch: fix-version-tool
 .. branch: popen2-removal
 .. branch: pickle-dumps
+.. branch: scalar_get_set
 
 .. branch: release-2.0-beta1
 
@@ -99,3 +106,7 @@
 
 .. branch: pycon2013-doc-fixes
 Documentation fixes after going through the docs at PyCon 2013 sprint.
+
+.. branch: extregistry-refactor
+
+.. branch: remove-list-smm
diff --git a/pypy/goal/targetpypystandalone.py b/pypy/goal/targetpypystandalone.py
--- a/pypy/goal/targetpypystandalone.py
+++ b/pypy/goal/targetpypystandalone.py
@@ -5,7 +5,7 @@
 from pypy.interpreter import gateway
 from pypy.interpreter.error import OperationError
 from pypy.tool.ann_override import PyPyAnnotatorPolicy
-from rpython.config.config import Config, to_optparse, make_dict, SUPPRESS_USAGE
+from rpython.config.config import to_optparse, make_dict, SUPPRESS_USAGE
 from rpython.config.config import ConflictConfigError
 from pypy.tool.option import make_objspace
 from pypy.conftest import pypydir
diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py
--- a/pypy/interpreter/app_main.py
+++ b/pypy/interpreter/app_main.py
@@ -664,7 +664,6 @@
 
     # start a prompt if requested
     if inspect_requested():
-        inteactive = False
         try:
             from _pypy_interact import interactive_console
             success = run_toplevel(interactive_console, mainmodule, quiet)
diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py
--- a/pypy/interpreter/astcompiler/ast.py
+++ b/pypy/interpreter/astcompiler/ast.py
@@ -1,5 +1,5 @@
 # Generated by tools/asdl_py.py
-from pypy.interpreter.baseobjspace import Wrappable
+from pypy.interpreter.baseobjspace import W_Root
 from pypy.interpreter import typedef
 from pypy.interpreter.gateway import interp2app
 from pypy.interpreter.error import OperationError, operationerrfmt
@@ -16,7 +16,7 @@
     return w_obj
 
 
-class AST(Wrappable):
+class AST(W_Root):
 
     w_dict = None
 
@@ -82,7 +82,7 @@
     pass
 
 
-class _FieldsWrapper(Wrappable):
+class _FieldsWrapper(W_Root):
     "Hack around the fact we can't store tuples on a TypeDef."
 
     def __init__(self, fields):
diff --git a/pypy/interpreter/astcompiler/test/test_astbuilder.py b/pypy/interpreter/astcompiler/test/test_astbuilder.py
--- a/pypy/interpreter/astcompiler/test/test_astbuilder.py
+++ b/pypy/interpreter/astcompiler/test/test_astbuilder.py
@@ -1204,7 +1204,7 @@
         assert space.eq_w(get_num("-0"), space.wrap(0))
         assert space.eq_w(get_num("-0xAAAAAA"), space.wrap(-0xAAAAAAL))
         n = get_num(str(-sys.maxint - 1))
-        assert space.is_true(space.isinstance(n, space.w_int))
+        assert space.isinstance_w(n, space.w_int)
         for num in ("0o53", "0O53", "0o0000053", "0O00053"):
             assert space.eq_w(get_num(num), space.wrap(053))
         for num in ("0b00101", "0B00101", "0b101", "0B101"):
diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py
--- a/pypy/interpreter/astcompiler/tools/asdl_py.py
+++ b/pypy/interpreter/astcompiler/tools/asdl_py.py
@@ -549,7 +549,7 @@
 
 
 HEAD = """# Generated by tools/asdl_py.py
-from pypy.interpreter.baseobjspace import Wrappable
+from pypy.interpreter.baseobjspace import W_Root
 from pypy.interpreter import typedef
 from pypy.interpreter.gateway import interp2app
 from pypy.interpreter.error import OperationError, operationerrfmt
@@ -566,7 +566,7 @@
     return w_obj
 
 
-class AST(Wrappable):
+class AST(W_Root):
 
     w_dict = None
 
@@ -632,7 +632,7 @@
     pass
 
 
-class _FieldsWrapper(Wrappable):
+class _FieldsWrapper(W_Root):
     "Hack around the fact we can\'t store tuples on a TypeDef."
 
     def __init__(self, fields):
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -1,27 +1,30 @@
 import sys
 
+from rpython.rlib.cache import Cache
+from rpython.tool.uid import HUGEVAL_BYTES
+from rpython.rlib import jit, types
+from rpython.rlib.debug import make_sure_not_resized
+from rpython.rlib.objectmodel import (we_are_translated, newlist_hint,
+     compute_unique_id)
+from rpython.rlib.signature import signature
+from rpython.rlib.rarithmetic import r_uint
+
 from pypy.interpreter.executioncontext import (ExecutionContext, ActionFlag,
     UserDelAction, FrameTraceAction)
 from pypy.interpreter.error import (OperationError, operationerrfmt,
     new_exception_class, typed_unwrap_error_msg)
 from pypy.interpreter.argument import Arguments
 from pypy.interpreter.miscutils import ThreadLocals
-from rpython.rlib.cache import Cache
-from rpython.tool.uid import HUGEVAL_BYTES
-from rpython.rlib import jit
-from rpython.rlib.debug import make_sure_not_resized
-from rpython.rlib.objectmodel import we_are_translated, newlist_hint,\
-     compute_unique_id
-from rpython.rlib.rarithmetic import r_uint
 
 
-__all__ = ['ObjSpace', 'OperationError', 'Wrappable', 'W_Root']
+__all__ = ['ObjSpace', 'OperationError', 'W_Root']
 
 UINT_MAX_32_BITS = r_uint(4294967295)
 
-unpackiterable_driver = jit.JitDriver(name = 'unpackiterable',
-                                      greens = ['tp'],
-                                      reds = ['items', 'w_iterator'])
+unpackiterable_driver = jit.JitDriver(name='unpackiterable',
+                                      greens=['tp'],
+                                      reds=['items', 'w_iterator'])
+
 
 class W_Root(object):
     """This is the abstract root class of all wrapped objects that live
@@ -212,6 +215,10 @@
         raise OperationError(space.w_TypeError,
                              typed_unwrap_error_msg(space, "integer", self))
 
+    def float_w(self, space):
+        raise OperationError(space.w_TypeError,
+                             typed_unwrap_error_msg(space, "float", self))
+
     def uint_w(self, space):
         raise OperationError(space.w_TypeError,
                              typed_unwrap_error_msg(space, "integer", self))
@@ -220,16 +227,26 @@
         raise OperationError(space.w_TypeError,
                              typed_unwrap_error_msg(space, "integer", self))
 
+    def int(self, space):
+        w_impl = space.lookup(self, '__int__')
+        if w_impl is None:
+            typename = space.type(self).getname(space)
+            raise operationerrfmt(space.w_TypeError,
+                  "unsupported operand type for int(): '%s'",
+                                  typename)
+        w_result = space.get_and_call_function(w_impl, self)
 
-class Wrappable(W_Root):
-    """A subclass of Wrappable is an internal, interpreter-level class
-    that can nevertheless be exposed at application-level by space.wrap()."""
-    __slots__ = ()
-    _settled_ = True
+        if (space.isinstance_w(w_result, space.w_int) or
+            space.isinstance_w(w_result, space.w_long)):
+            return w_result
+        typename = space.type(w_result).getname(space)
+        msg = "__int__ returned non-int (type '%s')"
+        raise operationerrfmt(space.w_TypeError, msg, typename)
 
     def __spacebind__(self, space):
         return self
 
+
 class W_InterpIterable(W_Root):
     def __init__(self, space, w_iterable):
         self.w_iter = space.iter(w_iterable)
@@ -264,18 +281,13 @@
     def __init__(self, space):
         Cache.__init__(self)
         self.space = space
+
     def _build(self, key):
-        val = self.space.enter_cache_building_mode()
-        try:
-            return self.build(key)
-        finally:
-            self.space.leave_cache_building_mode(val)
+        return self.build(key)
+
     def _ready(self, result):
-        val = self.space.enter_cache_building_mode()
-        try:
-            return self.ready(result)
-        finally:
-            self.space.leave_cache_building_mode(val)
+        return self.ready(result)
+
     def ready(self, result):
         pass
 
@@ -340,10 +352,8 @@
                 if e.match(self, self.w_KeyError):
                     continue
                 raise
-            modname = self.str_w(w_modname)
-            mod = self.interpclass_w(w_mod)
-            if isinstance(mod, Module) and not mod.startup_called:
-                mod.init(self)
+            if isinstance(w_mod, Module) and not w_mod.startup_called:
+                w_mod.init(self)
 
     def finish(self):
         self.wait_for_thread_shutdown()
@@ -352,9 +362,8 @@
         self.sys.flush_std_files(self)
         from pypy.interpreter.module import Module
         for w_mod in self.builtin_modules.values():
-            mod = self.interpclass_w(w_mod)
-            if isinstance(mod, Module) and mod.startup_called:
-                mod.shutdown(self)
+            if isinstance(w_mod, Module) and w_mod.startup_called:
+                w_mod.shutdown(self)
 
     def wait_for_thread_shutdown(self):
         """Wait until threading._shutdown() completes, provided the threading
@@ -426,14 +435,12 @@
 
             # And initialize it
             from pypy.interpreter.module import Module
-            mod = self.interpclass_w(w_mod)
-            if isinstance(mod, Module):
-                mod.init(self)
+            if isinstance(w_mod, Module):
+                w_mod.init(self)
             return w_mod
 
     def get_builtinmodule_to_install(self):
         """NOT_RPYTHON"""
-        from pypy.tool.lib_pypy import LIB_PYPY
         try:
             return self._builtinmodule_list
         except AttributeError:
@@ -580,11 +587,6 @@
         """NOT_RPYTHON: Abstract method that should put some minimal
         content into the w_builtins."""
 
-    def enter_cache_building_mode(self):
-        "hook for the flow object space"
-    def leave_cache_building_mode(self, val):
-        "hook for the flow object space"
-
     @jit.loop_invariant
     def getexecutioncontext(self):
         "Return what we consider to be the active execution context."
@@ -713,6 +715,7 @@
                 raise
             return None
 
+    @signature(types.any(), types.bool(), returns=types.instance(W_Root))
     def newbool(self, b):
         if b:
             return self.w_True
@@ -736,52 +739,28 @@
         w_s = self.interned_strings[s] = self.wrap(s)
         return w_s
 
-    def interpclass_w(self, w_obj):
-        """
-         If w_obj is a wrapped internal interpreter class instance unwrap to it,
-         otherwise return None.  (Can be overridden in specific spaces; you
-     should generally use the helper space.interp_w() instead.)
-        """
-        if isinstance(w_obj, Wrappable):
-            return w_obj
-        return None
-
     def descr_self_interp_w(self, RequiredClass, w_obj):
-        obj = self.interpclass_w(w_obj)
-        if not isinstance(obj, RequiredClass):
+        if not isinstance(w_obj, RequiredClass):
             raise DescrMismatch()
-        return obj
+        return w_obj
     descr_self_interp_w._annspecialcase_ = 'specialize:arg(1)'
 
     def interp_w(self, RequiredClass, w_obj, can_be_None=False):
         """
         Unwrap w_obj, checking that it is an instance of the required internal
-        interpreter class (a subclass of Wrappable).
+        interpreter class.
         """
         assert RequiredClass is not None
         if can_be_None and self.is_none(w_obj):
             return None
-        obj = self.interpclass_w(w_obj)
-        if not isinstance(obj, RequiredClass):   # or obj is None
+        if not isinstance(w_obj, RequiredClass):   # or obj is None
             msg = "'%s' object expected, got '%s' instead"
             raise operationerrfmt(self.w_TypeError, msg,
                 wrappable_class_name(RequiredClass),
                 w_obj.getclass(self).getname(self))
-        return obj
+        return w_obj
     interp_w._annspecialcase_ = 'specialize:arg(1)'
 
-    def _check_constant_interp_w_or_w_None(self, RequiredClass, w_obj):
-        """
-        This method should NOT be called unless you are really sure about
-        it. It is used inside the implementation of end_finally() in
-        pyopcode.py, and it's there so that it can be overridden by the
-        FlowObjSpace.
-        """
-        if self.is_w(w_obj, self.w_None):
-            return True
-        obj = self.interpclass_w(w_obj)
-        return isinstance(obj, RequiredClass)
-
     def unpackiterable(self, w_iterable, expected_length=-1):
         """Unpack an iterable into a real (interpreter-level) list.
 
@@ -945,7 +924,7 @@
         """Checks if the given exception type matches 'w_check_class'."""
         if self.is_w(w_exc_type, w_check_class):
             return True   # fast path
-        if self.is_true(self.isinstance(w_check_class, self.w_tuple)):
+        if self.isinstance_w(w_check_class, self.w_tuple):
             for w_t in self.fixedview(w_check_class):
                 if self.exception_match(w_exc_type, w_t):
                     return True
@@ -1041,9 +1020,6 @@
     def issequence_w(self, w_obj):
         return (self.findattr(w_obj, self.wrap("__getitem__")) is not None)
 
-    def isinstance_w(self, w_obj, w_type):
-        return self.is_true(self.isinstance(w_obj, w_type))
-
     # The code below only works
     # for the simple case (new-style instance).
     # These methods are patched with the full logic by the builtins
@@ -1055,11 +1031,11 @@
 
     def abstract_isinstance_w(self, w_obj, w_cls):
         # Equivalent to 'isinstance(obj, cls)'.
-        return self.is_true(self.isinstance(w_obj, w_cls))
+        return self.isinstance_w(w_obj, w_cls)
 
     def abstract_isclass_w(self, w_obj):
         # Equivalent to 'isinstance(obj, type)'.
-        return self.is_true(self.isinstance(w_obj, self.w_type))
+        return self.isinstance_w(w_obj, self.w_type)
 
     def abstract_getclass(self, w_obj):
         # Equivalent to 'obj.__class__'.
@@ -1096,7 +1072,7 @@
             expression = compiler.compile(expression, '?', 'eval', 0,
                                          hidden_applevel=hidden_applevel)
         else:
-            raise TypeError, 'space.eval(): expected a string, code or PyCode object'
+            raise TypeError('space.eval(): expected a string, code or PyCode object')
         return expression.exec_code(self, w_globals, w_locals)
 
     def exec_(self, statement, w_globals, w_locals, hidden_applevel=False,
@@ -1110,7 +1086,7 @@
             statement = compiler.compile(statement, filename, 'exec', 0,
                                          hidden_applevel=hidden_applevel)
         if not isinstance(statement, PyCode):
-            raise TypeError, 'space.exec_(): expected a string, code or PyCode object'
+            raise TypeError('space.exec_(): expected a string, code or PyCode object')
         w_key = self.wrap('__builtins__')
         if not self.is_true(self.contains(w_globals, w_key)):
             self.setitem(w_globals, w_key, self.wrap(self.builtin))
@@ -1166,7 +1142,7 @@
              -> (index, 0, 0) or
                 (start, stop, step)
         """
-        if self.is_true(self.isinstance(w_index_or_slice, self.w_slice)):
+        if self.isinstance_w(w_index_or_slice, self.w_slice):
             from pypy.objspace.std.sliceobject import W_SliceObject
             assert isinstance(w_index_or_slice, W_SliceObject)
             start, stop, step = w_index_or_slice.indices3(self, seqlength)
@@ -1186,7 +1162,7 @@
              -> (index, 0, 0, 1) or
                 (start, stop, step, slice_length)
         """
-        if self.is_true(self.isinstance(w_index_or_slice, self.w_slice)):
+        if self.isinstance_w(w_index_or_slice, self.w_slice):
             from pypy.objspace.std.sliceobject import W_SliceObject
             assert isinstance(w_index_or_slice, W_SliceObject)
             start, stop, step, length = w_index_or_slice.indices4(self,
@@ -1355,15 +1331,21 @@
     def int_w(self, w_obj):
         return w_obj.int_w(self)
 
+    def int(self, w_obj):
+        return w_obj.int(self)
+
     def uint_w(self, w_obj):
         return w_obj.uint_w(self)
 
     def bigint_w(self, w_obj):
         return w_obj.bigint_w(self)
 
+    def float_w(self, w_obj):
+        return w_obj.float_w(self)
+
     def realstr_w(self, w_obj):
         # Like str_w, but only works if w_obj is really of type 'str'.
-        if not self.is_true(self.isinstance(w_obj, self.w_str)):
+        if not self.isinstance_w(w_obj, self.w_str):
             raise OperationError(self.w_TypeError,
                                  self.wrap('argument must be a string'))
         return self.str_w(w_obj)
@@ -1383,7 +1365,7 @@
     def realunicode_w(self, w_obj):
         # Like unicode_w, but only works if w_obj is really of type
         # 'unicode'.
-        if not self.is_true(self.isinstance(w_obj, self.w_unicode)):
+        if not self.isinstance_w(w_obj, self.w_unicode):
             raise OperationError(self.w_TypeError,
                                  self.wrap('argument must be a unicode'))
         return self.unicode_w(w_obj)
@@ -1413,7 +1395,7 @@
 
     # This is all interface for gateway.py.
     def gateway_int_w(self, w_obj):
-        if self.is_true(self.isinstance(w_obj, self.w_float)):
+        if self.isinstance_w(w_obj, self.w_float):
             raise OperationError(self.w_TypeError,
                             self.wrap("integer argument expected, got float"))
         return self.int_w(self.int(w_obj))
@@ -1422,19 +1404,19 @@
         return self.float_w(self.float(w_obj))
 
     def gateway_r_longlong_w(self, w_obj):
-        if self.is_true(self.isinstance(w_obj, self.w_float)):
+        if self.isinstance_w(w_obj, self.w_float):
             raise OperationError(self.w_TypeError,
                             self.wrap("integer argument expected, got float"))
         return self.r_longlong_w(self.int(w_obj))
 
     def gateway_r_uint_w(self, w_obj):
-        if self.is_true(self.isinstance(w_obj, self.w_float)):
+        if self.isinstance_w(w_obj, self.w_float):
             raise OperationError(self.w_TypeError,
                             self.wrap("integer argument expected, got float"))
         return self.uint_w(self.int(w_obj))
 
     def gateway_r_ulonglong_w(self, w_obj):
-        if self.is_true(self.isinstance(w_obj, self.w_float)):
+        if self.isinstance_w(w_obj, self.w_float):
             raise OperationError(self.w_TypeError,
                             self.wrap("integer argument expected, got float"))
         return self.r_ulonglong_w(self.int(w_obj))
@@ -1549,23 +1531,28 @@
         space.exec_(str(source), w_glob, w_glob)
         return space.getitem(w_glob, space.wrap('anonymous'))
 
+
 class DummyLock(object):
     def acquire(self, flag):
         return True
+
     def release(self):
         pass
+
     def _freeze_(self):
         return True
+
     def __enter__(self):
         pass
+
     def __exit__(self, *args):
         pass
 
 dummy_lock = DummyLock()
 
-## Table describing the regular part of the interface of object spaces,
-## namely all methods which only take w_ arguments and return a w_ result
-## (if any).  Note: keep in sync with rpython.flowspace.operation.Table.
+# Table describing the regular part of the interface of object spaces,
+# namely all methods which only take w_ arguments and return a w_ result
+# (if any).
 
 ObjSpace.MethodTable = [
 # method name # symbol # number of arguments # special method name(s)
@@ -1589,7 +1576,7 @@
     ('pos',             'pos',       1, ['__pos__']),
     ('neg',             'neg',       1, ['__neg__']),
     ('nonzero',         'truth',     1, ['__bool__']),
-    ('abs' ,            'abs',       1, ['__abs__']),
+    ('abs',             'abs',       1, ['__abs__']),
     ('ord',             'ord',       1, []),
     ('invert',          '~',         1, ['__invert__']),
     ('add',             '+',         2, ['__add__', '__radd__']),
@@ -1637,12 +1624,12 @@
     ('delete',          'delete',    2, ['__delete__']),
     ('userdel',         'del',       1, ['__del__']),
     ('buffer',          'buffer',    1, ['__buffer__']),   # see buffer.py
-    ]
+]
 
 ObjSpace.BuiltinModuleTable = [
     'builtins',
     'sys',
-    ]
+]
 
 ObjSpace.ConstantTable = [
     'None',
@@ -1650,7 +1637,7 @@
     'True',
     'Ellipsis',
     'NotImplemented',
-    ]
+]
 
 ObjSpace.ExceptionTable = [
     'ArithmeticError',
@@ -1692,7 +1679,7 @@
     'ZeroDivisionError',
     'RuntimeWarning',
     'PendingDeprecationWarning',
-    ]
+]
 
 if sys.platform.startswith("win"):
     ObjSpace.ExceptionTable += ['WindowsError']
@@ -1705,7 +1692,6 @@
 #                       float_w(w_floatval) -> floatval
 #             uint_w(w_ival or w_long_ival) -> r_uint_val (unsigned int value)
 #             bigint_w(w_ival or w_long_ival) -> rbigint
-#interpclass_w(w_interpclass_inst or w_obj) -> interpclass_inst|w_obj
 #                               unwrap(w_x) -> x
 #                              is_true(w_x) -> True or False
 #                  newtuple([w_1, w_2,...]) -> w_tuple
@@ -1722,7 +1708,6 @@
     'uint_w',
     'bigint_w',
     'unicode_w',
-    'interpclass_w',
     'unwrap',
     'is_true',
     'is_w',
@@ -1732,4 +1717,4 @@
     'newslice',
     'call_args',
     'marshal_w',
-    ]
+]
diff --git a/pypy/interpreter/buffer.py b/pypy/interpreter/buffer.py
--- a/pypy/interpreter/buffer.py
+++ b/pypy/interpreter/buffer.py
@@ -15,7 +15,7 @@
 # free the typecheck that __buffer__() really returned a wrapped Buffer.
 
 import operator
-from pypy.interpreter.baseobjspace import Wrappable
+from pypy.interpreter.baseobjspace import W_Root
 from pypy.interpreter.typedef import TypeDef
 from pypy.interpreter.gateway import interp2app, unwrap_spec
 from pypy.interpreter.error import OperationError
@@ -23,7 +23,7 @@
 from rpython.rlib.rstring import StringBuilder
 
 
-class Buffer(Wrappable):
+class Buffer(W_Root):
     """Abstract base class for memory views."""
 
     __slots__ = ()     # no extra slot here
@@ -93,12 +93,11 @@
 
     def _make_descr__cmp(name):
         def descr__cmp(self, space, w_other):
-            other = space.interpclass_w(w_other)
-            if not isinstance(other, Buffer):
+            if not isinstance(w_other, Buffer):
                 return space.w_NotImplemented
             # xxx not the most efficient implementation
             str1 = self.as_str()
-            str2 = other.as_str()
+            str2 = w_other.as_str()
             return space.wrap(getattr(operator, name)(str1, str2))
         descr__cmp.func_name = name
         return descr__cmp
@@ -145,6 +144,7 @@
         for i in range(len(string)):
             self.setitem(start + i, string[i])
 
+
 @unwrap_spec(offset=int, size=int)
 def descr_buffer__new__(space, w_subtype, w_object, offset=0, size=-1):
     # w_subtype can only be exactly 'buffer' for now
@@ -209,7 +209,7 @@
     __mul__ = interp2app(Buffer.descr_mul),
     __rmul__ = interp2app(Buffer.descr_mul),
     __repr__ = interp2app(Buffer.descr_repr),
-    )
+)
 Buffer.typedef.acceptable_as_base_class = False
 
 # ____________________________________________________________
diff --git a/pypy/interpreter/callbench/bltn04.py b/pypy/interpreter/callbench/bltn04.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/bltn04.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from sup import run
-
-def w(N, start):
-    c = chr
-
-    start()
-    i = 0
-    while i < N:
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)        
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)        
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)
-        c(65)        
-        i+=1
-
-run(w, 1000)
diff --git a/pypy/interpreter/callbench/bltn_instantiate.py b/pypy/interpreter/callbench/bltn_instantiate.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/bltn_instantiate.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from sup import run
-
-def w(N, start):
-    o = object
-    start()
-    i = 0
-    while i < N:
-        o()
-        o()
-        o()
-        o()
-        o()
-        o()
-        o()
-        o()
-        o()
-        o()
-        o()
-        o()        
-        i+=1
-    
-run(w, 1000)
diff --git a/pypy/interpreter/callbench/bltna1.py b/pypy/interpreter/callbench/bltna1.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/bltna1.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from sup import run
-
-def w(N, start):
-    l = []
-    start()
-    i = 0
-    while i < N:
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        l.__init__()
-        i+=1
-    
-run(w, 1000)
diff --git a/pypy/interpreter/callbench/bltna2.py b/pypy/interpreter/callbench/bltna2.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/bltna2.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from sup import run
-
-def w(N, start):
-    l = []
-    start()
-    i = 0
-    z = l.__init__
-    while i < N:
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        z()
-        i+=1
-    
-run(w, 1000)
diff --git a/pypy/interpreter/callbench/bm14.py b/pypy/interpreter/callbench/bm14.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/bm14.py
+++ /dev/null
@@ -1,51 +0,0 @@
-from sup import run
-
-def w(N, start):
-    class A(object):
-        def f0(self):
-            pass
-        def f1(self, a):
-            pass
-        def f2(self, a, b):
-            pass
-        def f3(self, a, b, c):
-            pass
-        def f4(self, a, b, c, d):
-            pass
-
-    a = A()
-    f0 = a.f0
-    f1 = a.f1
-    f2 = a.f2
-    f3 = a.f3
-    f4 = a.f4
-
-    start()
-    i = 0
-    while i < N:
-        f0()
-        f0()
-        f0()
-        f0()
-        f1(1)
-        f1(1)
-        f1(1)
-        f1(1)
-        f2(1, 2)
-        f2(1, 2)
-        f2(1, 2)
-        f3(1, 2, 3)
-        f3(1, 2, 3)
-        f4(1, 2, 3, 4)
-
-        f0()
-        f0()
-        f0()
-        f1(1)
-        f1(1)
-        f1(1)
-        f2(1, 2)
-        
-        i+=1
-
-run(w, 1000)
diff --git a/pypy/interpreter/callbench/bmabvararg.py b/pypy/interpreter/callbench/bmabvararg.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/bmabvararg.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from sup import run
-
-def w(N, start):
-    class A(object):
-        def f(self, a, b, *args):
-            pass
-
-    a = A()
-    f = a.f
-    z = (3, 4, 5)
-
-    start()
-    i = 0
-    while i < N:
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        i+=1
-
-run(w, 1000)
diff --git a/pypy/interpreter/callbench/bmfilter.py b/pypy/interpreter/callbench/bmfilter.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/bmfilter.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from sup import run
-
-def w(N, start):
-    x = range(50)
-    class A(object):
-        def f1(self, a):
-            return False
-
-    x = range(50)
-    a = A()
-    f1 = a.f1
-    flt = filter
-
-    start()
-    i = 0
-    while i < N:
-        flt(f1, x)
-        i+=1
-
-run(w, 200)
diff --git a/pypy/interpreter/callbench/bmmore.py b/pypy/interpreter/callbench/bmmore.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/bmmore.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from sup import run
-
-def w(N, start):
-    class A(object):
-        def f4(self, a, b, c, d):
-            pass
-        def f5(self, a, b, c, d, e):
-            pass
-    a = A()
-    f4 = a.f4
-    f5 = a.f5
-
-    start()
-    i = 0
-    while i < N:
-        f4(1, 2, 3, 4)
-        f4(1, 2, 3, 4)
-        f4(1, 2, 3, 4)    
-        f5(1, 2, 3, 4, 5)
-        f5(1, 2, 3, 4, 5)
-        f5(1, 2, 3, 4, 5)
-        f4(1, 2, 3, 4)
-        f4(1, 2, 3, 4)
-        f4(1, 2, 3, 4)    
-        f5(1, 2, 3, 4, 5)
-        f5(1, 2, 3, 4, 5)
-        f5(1, 2, 3, 4, 5)        
-        i+=1
-    
-run(w, 1000)
diff --git a/pypy/interpreter/callbench/compare.py b/pypy/interpreter/callbench/compare.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/compare.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# compare.py <results-file> <reference-results-file>
-
-import sys
-
-def main(cur, ref):
-    cur = open(cur, 'rU')
-    ref = open(ref, 'rU')
-    try:
-        while True:
-            cur_line = cur.next()
-            ref_line = ref.next()
-            cur_name, cur_t = cur_line.split()
-            ref_name, ref_t = ref_line.split()
-            assert cur_name == ref_name
-            cur_t = float(cur_t)
-            ref_t = float(ref_t)            
-            print "%-16s %.06g (x%.02f)" % (cur_name, cur_t, cur_t/ref_t)
-    except StopIteration:
-        pass
-
-if __name__ == '__main__':
-    main(sys.argv[1], sys.argv[2])
diff --git a/pypy/interpreter/callbench/f04.py b/pypy/interpreter/callbench/f04.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/f04.py
+++ /dev/null
@@ -1,39 +0,0 @@
-from sup import run
-
-def w(N, start):
-    def f0():
-        pass
-    def f1(a):
-        pass
-    def f2(a, b):
-        pass
-    def f3(a, b, c):
-        pass
-    def f4(a, b, c, d):
-        pass
-    def f5(a, b, c, d, e):
-        pass
-
-    start()
-    i = 0
-    while i < N:
-        f0()
-        f0()
-        f0()
-        f1(1)
-        f1(1)
-        f2(1, 2)
-        f3(1, 2, 3)
-        f4(1, 2, 3, 4)
-        f5(1, 2, 3, 4, 5)
-        f0()
-        f0()
-        f0()
-        f1(1)
-        f1(1)
-        f2(1, 2)
-        f3(1, 2, 3)
-        f4(1, 2, 3, 4)        
-        i+=1
-
-run(w, 1000)
diff --git a/pypy/interpreter/callbench/fabvararg.py b/pypy/interpreter/callbench/fabvararg.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/fabvararg.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from sup import run
-
-def w(N, start):
-    def f(a, b, *args):
-        pass
-
-    z = (3, 4, 5)
-    start()
-
-    i = 0
-    while i < N:
-        f(1, 2, *z)
-        f(1, 2, *z)    
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)    
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)
-        f(1, 2, *z)        
-        i+=1
-
-run(w, 1000)
diff --git a/pypy/interpreter/callbench/ffilter.py b/pypy/interpreter/callbench/ffilter.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/ffilter.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from sup import run
-
-def w(N, start):
-    def f1(a):
-        return False
-    x = range(50)
-
-    start()
-    i = 0
-    while i < N:
-        filter(f1, x)
-        i+=1
-    
-run(w, 200)
diff --git a/pypy/interpreter/callbench/ffunccall.py b/pypy/interpreter/callbench/ffunccall.py
deleted file mode 100644
--- a/pypy/interpreter/callbench/ffunccall.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from sup import run
-
-def w(N, start):
-    class A(object):
-        def foo(self, x):
-            pass
-
-        __add__ = foo
-


More information about the pypy-commit mailing list