[pypy-svn] r69194 - in pypy/trunk: . dotviewer lib-python py py/bin py/bin/win32 py/impl py/impl/cmdline py/impl/code py/impl/compat py/impl/io py/impl/log py/impl/path py/impl/path/gateway py/impl/process py/impl/test py/impl/test/dist py/impl/test/looponfail py/plugin pypy pypy/annotation/test pypy/bin pypy/config pypy/config/test pypy/doc pypy/doc/config pypy/doc/statistic pypy/doc/tool pypy/jit pypy/jit/backend pypy/jit/backend/test pypy/jit/backend/x86 pypy/jit/tl pypy/jit/tl/spli pypy/jit/tl/test pypy/jit/tl/tla pypy/jit/tool pypy/lang/gameboy/debug pypy/lang/gameboy/profiling pypy/lang/gameboy/profiling/evaluation pypy/lang/gameboy/test pypy/lang/gameboy/tool pypy/lang/js pypy/lang/js/test pypy/lang/js/test/ecma pypy/lang/prolog/interpreter pypy/lang/scheme pypy/lang/scheme/test pypy/lang/smalltalk/test pypy/lang/smalltalk/tool pypy/lib/app_test/ctypes_tests pypy/lib/distributed pypy/lib/test2 pypy/module/__builtin__/test pypy/module/_codecs/test pypy/module/_file/test pypy/module/_sre/test pypy/module/bz2/test pypy/module/pypyjit/test pypy/module/sys/test pypy/objspace/std/test pypy/rlib/parsing pypy/rlib/parsing/test pypy/rlib/rsdl pypy/rlib/rsdl/test pypy/rlib/test pypy/rpython/microbench pypy/rpython/module/test pypy/rpython/test pypy/tool pypy/tool/algo/test pypy/tool/bench pypy/tool/pytest pypy/tool/pytest/test pypy/tool/rest pypy/tool/test pypy/translator pypy/translator/benchmark pypy/translator/c pypy/translator/c/test pypy/translator/cli pypy/translator/cli/test pypy/translator/goal pypy/translator/goal/test2 pypy/translator/jvm pypy/translator/microbench/pybench pypy/translator/platform pypy/translator/platform/test pypy/translator/sandbox pypy/translator/sandbox/test pypy/translator/test pypy/translator/tool

hpk at codespeak.net hpk at codespeak.net
Wed Nov 11 18:54:56 CET 2009


Author: hpk
Date: Wed Nov 11 18:54:49 2009
New Revision: 69194

Added:
   pypy/trunk/py/
   pypy/trunk/py/__init__.py
   pypy/trunk/py/apipkg.py
   pypy/trunk/py/bin/
   pypy/trunk/py/bin/_findpy.py
   pypy/trunk/py/bin/env.cmd
   pypy/trunk/py/bin/env.py
   pypy/trunk/py/bin/py.cleanup
   pypy/trunk/py/bin/py.convert_unittest
   pypy/trunk/py/bin/py.countloc
   pypy/trunk/py/bin/py.lookup
   pypy/trunk/py/bin/py.svnwcrevert
   pypy/trunk/py/bin/py.test
   pypy/trunk/py/bin/py.which
   pypy/trunk/py/bin/win32/
   pypy/trunk/py/bin/win32/py.cleanup.cmd
   pypy/trunk/py/bin/win32/py.convert_unittest.cmd
   pypy/trunk/py/bin/win32/py.countloc.cmd
   pypy/trunk/py/bin/win32/py.lookup.cmd
   pypy/trunk/py/bin/win32/py.svnwcrevert.cmd
   pypy/trunk/py/bin/win32/py.test.cmd
   pypy/trunk/py/bin/win32/py.which.cmd
   pypy/trunk/py/impl/
   pypy/trunk/py/impl/__init__.py
   pypy/trunk/py/impl/_com.py
   pypy/trunk/py/impl/_metainfo.py
   pypy/trunk/py/impl/builtin.py
   pypy/trunk/py/impl/cmdline/
   pypy/trunk/py/impl/cmdline/__init__.py
   pypy/trunk/py/impl/cmdline/pycleanup.py
   pypy/trunk/py/impl/cmdline/pyconvert_unittest.py
   pypy/trunk/py/impl/cmdline/pycountloc.py
   pypy/trunk/py/impl/cmdline/pylookup.py
   pypy/trunk/py/impl/cmdline/pysvnwcrevert.py
   pypy/trunk/py/impl/cmdline/pytest.py
   pypy/trunk/py/impl/cmdline/pywhich.py
   pypy/trunk/py/impl/code/
   pypy/trunk/py/impl/code/__init__.py
   pypy/trunk/py/impl/code/_assertionnew.py
   pypy/trunk/py/impl/code/_assertionold.py
   pypy/trunk/py/impl/code/assertion.py
   pypy/trunk/py/impl/code/code.py
   pypy/trunk/py/impl/code/oldmagic.py
   pypy/trunk/py/impl/code/oldmagic2.py
   pypy/trunk/py/impl/code/source.py
   pypy/trunk/py/impl/compat/
   pypy/trunk/py/impl/compat/__init__.py
   pypy/trunk/py/impl/compat/dep_doctest.py
   pypy/trunk/py/impl/compat/dep_optparse.py
   pypy/trunk/py/impl/compat/dep_subprocess.py
   pypy/trunk/py/impl/compat/dep_textwrap.py
   pypy/trunk/py/impl/error.py
   pypy/trunk/py/impl/io/
   pypy/trunk/py/impl/io/__init__.py
   pypy/trunk/py/impl/io/capture.py
   pypy/trunk/py/impl/io/terminalwriter.py
   pypy/trunk/py/impl/log/
   pypy/trunk/py/impl/log/__init__.py
   pypy/trunk/py/impl/log/log.py
   pypy/trunk/py/impl/log/warning.py
   pypy/trunk/py/impl/path/
   pypy/trunk/py/impl/path/__init__.py
   pypy/trunk/py/impl/path/cacheutil.py
   pypy/trunk/py/impl/path/common.py
   pypy/trunk/py/impl/path/gateway/
   pypy/trunk/py/impl/path/gateway/__init__.py
   pypy/trunk/py/impl/path/gateway/channeltest.py
   pypy/trunk/py/impl/path/gateway/channeltest2.py
   pypy/trunk/py/impl/path/gateway/remotepath.py
   pypy/trunk/py/impl/path/local.py
   pypy/trunk/py/impl/path/local.py.orig
   pypy/trunk/py/impl/path/svnurl.py
   pypy/trunk/py/impl/path/svnwc.py
   pypy/trunk/py/impl/process/
   pypy/trunk/py/impl/process/__init__.py
   pypy/trunk/py/impl/process/cmdexec.py
   pypy/trunk/py/impl/process/forkedfunc.py
   pypy/trunk/py/impl/process/killproc.py
   pypy/trunk/py/impl/std.py
   pypy/trunk/py/impl/test/
   pypy/trunk/py/impl/test/__init__.py
   pypy/trunk/py/impl/test/cmdline.py
   pypy/trunk/py/impl/test/collect.py
   pypy/trunk/py/impl/test/compat.py
   pypy/trunk/py/impl/test/config.py
   pypy/trunk/py/impl/test/conftesthandle.py
   pypy/trunk/py/impl/test/defaultconftest.py
   pypy/trunk/py/impl/test/dist/
   pypy/trunk/py/impl/test/dist/__init__.py
   pypy/trunk/py/impl/test/dist/dsession.py
   pypy/trunk/py/impl/test/dist/gwmanage.py
   pypy/trunk/py/impl/test/dist/mypickle.py
   pypy/trunk/py/impl/test/dist/nodemanage.py
   pypy/trunk/py/impl/test/dist/txnode.py
   pypy/trunk/py/impl/test/funcargs.py
   pypy/trunk/py/impl/test/looponfail/
   pypy/trunk/py/impl/test/looponfail/__init__.py
   pypy/trunk/py/impl/test/looponfail/remote.py
   pypy/trunk/py/impl/test/looponfail/util.py
   pypy/trunk/py/impl/test/outcome.py
   pypy/trunk/py/impl/test/parseopt.py
   pypy/trunk/py/impl/test/pluginmanager.py
   pypy/trunk/py/impl/test/pycollect.py
   pypy/trunk/py/impl/test/session.py
   pypy/trunk/py/impl/xmlgen.py
   pypy/trunk/py/plugin/
   pypy/trunk/py/plugin/__init__.py
   pypy/trunk/py/plugin/hookspec.py
   pypy/trunk/py/plugin/pytest__pytest.py
   pypy/trunk/py/plugin/pytest_assertion.py
   pypy/trunk/py/plugin/pytest_capture.py
   pypy/trunk/py/plugin/pytest_default.py
   pypy/trunk/py/plugin/pytest_doctest.py
   pypy/trunk/py/plugin/pytest_figleaf.py
   pypy/trunk/py/plugin/pytest_helpconfig.py
   pypy/trunk/py/plugin/pytest_hooklog.py
   pypy/trunk/py/plugin/pytest_mark.py
   pypy/trunk/py/plugin/pytest_monkeypatch.py
   pypy/trunk/py/plugin/pytest_nose.py
   pypy/trunk/py/plugin/pytest_pastebin.py
   pypy/trunk/py/plugin/pytest_pdb.py
   pypy/trunk/py/plugin/pytest_pylint.py
   pypy/trunk/py/plugin/pytest_pytester.py
   pypy/trunk/py/plugin/pytest_recwarn.py
   pypy/trunk/py/plugin/pytest_restdoc.py
   pypy/trunk/py/plugin/pytest_resultlog.py
   pypy/trunk/py/plugin/pytest_runner.py
   pypy/trunk/py/plugin/pytest_skipping.py
   pypy/trunk/py/plugin/pytest_terminal.py
   pypy/trunk/py/plugin/pytest_tmpdir.py
   pypy/trunk/py/plugin/pytest_unittest.py
   pypy/trunk/pypy/tool/difftime.py
   pypy/trunk/pypy/tool/rest/
   pypy/trunk/pypy/tool/rest/__init__.py
   pypy/trunk/pypy/tool/rest/convert.py
   pypy/trunk/pypy/tool/rest/directive.py
   pypy/trunk/pypy/tool/rest/rest.py
   pypy/trunk/pypy/tool/rest/rst.py
Removed:
   pypy/trunk/pytest_resultlog.py
Modified:
   pypy/trunk/   (props changed)
   pypy/trunk/dotviewer/conftest.py
   pypy/trunk/lib-python/conftest.py
   pypy/trunk/pypy/annotation/test/autopath.py
   pypy/trunk/pypy/bin/autopath.py
   pypy/trunk/pypy/bin/py.py
   pypy/trunk/pypy/config/autopath.py
   pypy/trunk/pypy/config/config.py
   pypy/trunk/pypy/config/makerestdoc.py
   pypy/trunk/pypy/config/pypyoption.py
   pypy/trunk/pypy/config/test/test_makerestdoc.py
   pypy/trunk/pypy/config/test/test_pypyoption.py
   pypy/trunk/pypy/conftest.py
   pypy/trunk/pypy/doc/config/autopath.py
   pypy/trunk/pypy/doc/config/generate.py
   pypy/trunk/pypy/doc/config/makemodules.py
   pypy/trunk/pypy/doc/confrest.py
   pypy/trunk/pypy/doc/confrest_oldpy.py
   pypy/trunk/pypy/doc/conftest.py
   pypy/trunk/pypy/doc/statistic/confrest.py
   pypy/trunk/pypy/doc/test_redirections.py
   pypy/trunk/pypy/doc/tool/makeref.py
   pypy/trunk/pypy/doc/tool/mydot.py
   pypy/trunk/pypy/jit/backend/autopath.py
   pypy/trunk/pypy/jit/backend/test/conftest.py
   pypy/trunk/pypy/jit/backend/x86/autopath.py
   pypy/trunk/pypy/jit/conftest.py
   pypy/trunk/pypy/jit/tl/autopath.py
   pypy/trunk/pypy/jit/tl/conftest.py
   pypy/trunk/pypy/jit/tl/spli/autopath.py
   pypy/trunk/pypy/jit/tl/targettlc.py
   pypy/trunk/pypy/jit/tl/targettlr.py
   pypy/trunk/pypy/jit/tl/test/test_pypyjit.py
   pypy/trunk/pypy/jit/tl/tla/targettla.py
   pypy/trunk/pypy/jit/tl/tla/tla_assembler.py
   pypy/trunk/pypy/jit/tool/autopath.py
   pypy/trunk/pypy/lang/gameboy/debug/gameboy_debug_entry_point.py
   pypy/trunk/pypy/lang/gameboy/profiling/evaluation/gameboy_evaluation_target.py
   pypy/trunk/pypy/lang/gameboy/profiling/gameboyTest.py
   pypy/trunk/pypy/lang/gameboy/test/test_cartridge.py
   pypy/trunk/pypy/lang/gameboy/test/test_rom.py
   pypy/trunk/pypy/lang/gameboy/tool/autopath.py
   pypy/trunk/pypy/lang/js/autopath.py
   pypy/trunk/pypy/lang/js/jsparser.py
   pypy/trunk/pypy/lang/js/test/ecma/conftest.py
   pypy/trunk/pypy/lang/js/test/test_interactive.py
   pypy/trunk/pypy/lang/js/test/test_parser.py
   pypy/trunk/pypy/lang/prolog/interpreter/autopath.py
   pypy/trunk/pypy/lang/prolog/interpreter/conftest.py
   pypy/trunk/pypy/lang/prolog/interpreter/interactive.py
   pypy/trunk/pypy/lang/prolog/interpreter/parsing.py
   pypy/trunk/pypy/lang/scheme/autopath.py
   pypy/trunk/pypy/lang/scheme/execution.py
   pypy/trunk/pypy/lang/scheme/test/test_interactive.py
   pypy/trunk/pypy/lang/smalltalk/test/test_miniimage.py
   pypy/trunk/pypy/lang/smalltalk/tool/analyseimage.py
   pypy/trunk/pypy/lang/smalltalk/tool/autopath.py
   pypy/trunk/pypy/lib/app_test/ctypes_tests/conftest.py
   pypy/trunk/pypy/lib/distributed/socklayer.py
   pypy/trunk/pypy/lib/test2/autopath.py
   pypy/trunk/pypy/module/__builtin__/test/autopath.py
   pypy/trunk/pypy/module/__builtin__/test/test_import.py
   pypy/trunk/pypy/module/_codecs/test/autopath.py
   pypy/trunk/pypy/module/_file/test/test_file_extra.py
   pypy/trunk/pypy/module/_sre/test/autopath.py
   pypy/trunk/pypy/module/_sre/test/test_app_sre.py
   pypy/trunk/pypy/module/bz2/test/test_bz2_compdecomp.py
   pypy/trunk/pypy/module/bz2/test/test_bz2_file.py
   pypy/trunk/pypy/module/pypyjit/test/conftest.py
   pypy/trunk/pypy/module/sys/test/autopath.py
   pypy/trunk/pypy/objspace/std/test/test_complexobject.py
   pypy/trunk/pypy/rlib/parsing/ebnfparse.py
   pypy/trunk/pypy/rlib/parsing/makepackrat.py
   pypy/trunk/pypy/rlib/parsing/regexparse.py
   pypy/trunk/pypy/rlib/parsing/test/autopath.py
   pypy/trunk/pypy/rlib/parsing/test/test_pythonlexer.py
   pypy/trunk/pypy/rlib/parsing/test/test_pythonparse.py
   pypy/trunk/pypy/rlib/rsdl/eci.py
   pypy/trunk/pypy/rlib/rsdl/test/autopath.py
   pypy/trunk/pypy/rlib/test/test_listsort.py
   pypy/trunk/pypy/rpython/microbench/autopath.py
   pypy/trunk/pypy/rpython/module/test/test_ll_os_path.py
   pypy/trunk/pypy/rpython/test/test_rbuiltin.py
   pypy/trunk/pypy/test_all.py
   pypy/trunk/pypy/tool/algo/test/autopath.py
   pypy/trunk/pypy/tool/ansi_mandelbrot.py
   pypy/trunk/pypy/tool/ansi_print.py
   pypy/trunk/pypy/tool/autopath.py
   pypy/trunk/pypy/tool/bench/pypyresult.py
   pypy/trunk/pypy/tool/genstatistic.py
   pypy/trunk/pypy/tool/option.py
   pypy/trunk/pypy/tool/pytest/appsupport.py
   pypy/trunk/pypy/tool/pytest/autopath.py
   pypy/trunk/pypy/tool/pytest/genreportdata.py
   pypy/trunk/pypy/tool/pytest/htmlreport.py
   pypy/trunk/pypy/tool/pytest/test/test_new_count.py
   pypy/trunk/pypy/tool/statistic_over_time.py
   pypy/trunk/pypy/tool/test/autopath.py
   pypy/trunk/pypy/tool/test/test_conftest1.py
   pypy/trunk/pypy/tool/test/test_pytestsupport.py
   pypy/trunk/pypy/tool/udir.py
   pypy/trunk/pypy/translator/autopath.py
   pypy/trunk/pypy/translator/benchmark/autopath.py
   pypy/trunk/pypy/translator/benchmark/benchmarks.py
   pypy/trunk/pypy/translator/benchmark/jitbench.py
   pypy/trunk/pypy/translator/c/autopath.py
   pypy/trunk/pypy/translator/c/test/autopath.py
   pypy/trunk/pypy/translator/c/test/test_extfunc.py
   pypy/trunk/pypy/translator/cli/conftest.py
   pypy/trunk/pypy/translator/cli/test/autopath.py
   pypy/trunk/pypy/translator/driver.py
   pypy/trunk/pypy/translator/goal/autopath.py
   pypy/trunk/pypy/translator/goal/targetgbfullprofiling.py
   pypy/trunk/pypy/translator/goal/targetgbimplementation.py
   pypy/trunk/pypy/translator/goal/targetgbrom4.py
   pypy/trunk/pypy/translator/goal/targetpreimportedpypy.py
   pypy/trunk/pypy/translator/goal/targetpypystandalone.py
   pypy/trunk/pypy/translator/goal/test2/autopath.py
   pypy/trunk/pypy/translator/goal/translate.py
   pypy/trunk/pypy/translator/interactive.py
   pypy/trunk/pypy/translator/jvm/conftest.py
   pypy/trunk/pypy/translator/jvm/genjvm.py
   pypy/trunk/pypy/translator/microbench/pybench/autopath.py
   pypy/trunk/pypy/translator/platform/__init__.py
   pypy/trunk/pypy/translator/platform/test/test_darwin.py
   pypy/trunk/pypy/translator/platform/test/test_maemo.py
   pypy/trunk/pypy/translator/sandbox/autopath.py
   pypy/trunk/pypy/translator/sandbox/test/autopath.py
   pypy/trunk/pypy/translator/test/autopath.py
   pypy/trunk/pypy/translator/test/test_driver.py
   pypy/trunk/pypy/translator/tool/autopath.py
Log:
merging py11 branch that integrates the py-1.1.0 release verbatim and
a py/bin/ directory containing the command line scripts. 



Modified: pypy/trunk/dotviewer/conftest.py
==============================================================================
--- pypy/trunk/dotviewer/conftest.py	(original)
+++ pypy/trunk/dotviewer/conftest.py	Wed Nov 11 18:54:49 2009
@@ -1,7 +1,7 @@
 import py
 
 def pytest_addoption(parser):
-    group = parser.addgroup("dotviever")
+    group = parser.getgroup("dotviever")
     group.addoption('--pygame', action="store_true", 
         dest="pygame", default=False, 
         help="allow interactive tests using Pygame")

Modified: pypy/trunk/lib-python/conftest.py
==============================================================================
--- pypy/trunk/lib-python/conftest.py	(original)
+++ pypy/trunk/lib-python/conftest.py	Wed Nov 11 18:54:49 2009
@@ -28,7 +28,7 @@
 #
 
 def pytest_addoption(parser):
-    group = parser.addgroup("complicance testing options") 
+    group = parser.getgroup("complicance testing options") 
     group.addoption('-T', '--timeout', action="store", type="string", 
        default="1000", dest="timeout", 
        help="fail a test module after the given timeout. "

Added: pypy/trunk/py/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,184 @@
+# -*- coding: utf-8 -*-
+"""
+py.test and pylib: rapid testing and development utils
+
+this module uses apipkg.py for lazy-loading sub modules
+and classes.  The initpkg-dictionary  below specifies
+name->value mappings where value can be another namespace
+dictionary or an import path.  
+
+(c) Holger Krekel and others, 2009
+"""
+version = "1.1.0"
+
+__version__ = version = version or "1.1.x"
+import py.apipkg
+
+py.apipkg.initpkg(__name__, dict(
+    # access to all standard lib modules
+    std = '.impl.std:std',
+    # access to all posix errno's as classes
+    error = '.impl.error:error',
+
+    _impldir = '.impl._metainfo:impldir',
+    _dir = '.impl._metainfo:pydir',
+    _pydirs = '.impl._metainfo:pydirs',
+    version = 'py:__version__', # backward compatibility
+
+    _com = {
+        'Registry': '.impl._com:Registry',
+        'MultiCall':  '.impl._com:MultiCall',
+        'comregistry': '.impl._com:comregistry',
+        'HookRelay': '.impl._com:HookRelay',
+    },
+    cmdline = {
+        'pytest':     '.impl.cmdline.pytest:main',
+        'pylookup':   '.impl.cmdline.pylookup:main',
+        'pycountloc': '.impl.cmdline.pycountlog:main',
+        'pytest':     '.impl.test.cmdline:main',
+        'pylookup':   '.impl.cmdline.pylookup:main',
+        'pycountloc': '.impl.cmdline.pycountloc:main',
+        'pycleanup':  '.impl.cmdline.pycleanup:main',
+        'pywhich'        : '.impl.cmdline.pywhich:main',
+        'pysvnwcrevert'  : '.impl.cmdline.pysvnwcrevert:main',
+        'pyconvert_unittest'  : '.impl.cmdline.pyconvert_unittest:main',
+    },
+
+    test = {
+        # helpers for use from test functions or collectors
+        '__doc__'           : '.impl.test:__doc__',
+        '_PluginManager'    : '.impl.test.pluginmanager:PluginManager',
+        'raises'            : '.impl.test.outcome:raises',
+        'skip'              : '.impl.test.outcome:skip',
+        'importorskip'      : '.impl.test.outcome:importorskip',
+        'fail'              : '.impl.test.outcome:fail',
+        'exit'              : '.impl.test.outcome:exit',
+        # configuration/initialization related test api
+        'config'            : '.impl.test.config:config_per_process',
+        'ensuretemp'        : '.impl.test.config:ensuretemp',
+        'collect': {
+            'Collector' : '.impl.test.collect:Collector',
+            'Directory' : '.impl.test.collect:Directory',
+            'File'      : '.impl.test.collect:File',
+            'Item'      : '.impl.test.collect:Item',
+            'Module'    : '.impl.test.pycollect:Module',
+            'Class'     : '.impl.test.pycollect:Class',
+            'Instance'  : '.impl.test.pycollect:Instance',
+            'Generator' : '.impl.test.pycollect:Generator',
+            'Function'  : '.impl.test.pycollect:Function',
+            '_fillfuncargs' : '.impl.test.funcargs:fillfuncargs',
+        },
+    },
+
+    # hook into the top-level standard library
+    process = {
+        '__doc__'        : '.impl.process:__doc__',
+        'cmdexec'        : '.impl.process.cmdexec:cmdexec',
+        'kill'           : '.impl.process.killproc:kill',
+        'ForkedFunc'     : '.impl.process.forkedfunc:ForkedFunc',
+    },
+
+    path = {
+        '__doc__'        : '.impl.path:__doc__',
+        'svnwc'          : '.impl.path.svnwc:SvnWCCommandPath',
+        'svnurl'         : '.impl.path.svnurl:SvnCommandPath',
+        'local'          : '.impl.path.local:LocalPath',
+        'SvnAuth'        : '.impl.path.svnwc:SvnAuth',
+    },
+
+    # some nice slightly magic APIs
+    magic = {
+        'invoke'           : '.impl.code.oldmagic:invoke',
+        'revoke'           : '.impl.code.oldmagic:revoke',
+        'patch'            : '.impl.code.oldmagic:patch',
+        'revert'           : '.impl.code.oldmagic:revert',
+        'autopath'         : '.impl.path.local:autopath',
+        'AssertionError'   : '.impl.code.oldmagic2:AssertionError',
+    },
+
+    # python inspection/code-generation API
+    code = {
+        '__doc__'           : '.impl.code:__doc__',
+        'compile'           : '.impl.code.source:compile_',
+        'Source'            : '.impl.code.source:Source',
+        'Code'              : '.impl.code.code:Code',
+        'Frame'             : '.impl.code.code:Frame',
+        'ExceptionInfo'     : '.impl.code.code:ExceptionInfo',
+        'Traceback'         : '.impl.code.code:Traceback',
+        'getfslineno'       : '.impl.code.source:getfslineno',
+        'getrawcode'        : '.impl.code.code:getrawcode',
+        'patch_builtins'    : '.impl.code.code:patch_builtins',
+        'unpatch_builtins'  : '.impl.code.code:unpatch_builtins',
+        '_AssertionError'   : '.impl.code.assertion:AssertionError',
+    },
+
+    # backports and additions of builtins
+    builtin = {
+        '__doc__'        : '.impl.builtin:__doc__',
+        'enumerate'      : '.impl.builtin:enumerate',
+        'reversed'       : '.impl.builtin:reversed',
+        'sorted'         : '.impl.builtin:sorted',
+        'set'            : '.impl.builtin:set',
+        'frozenset'      : '.impl.builtin:frozenset',
+        'BaseException'  : '.impl.builtin:BaseException',
+        'GeneratorExit'  : '.impl.builtin:GeneratorExit',
+        'print_'         : '.impl.builtin:print_',
+        '_reraise'       : '.impl.builtin:_reraise',
+        '_tryimport'     : '.impl.builtin:_tryimport',
+        'exec_'          : '.impl.builtin:exec_',
+        '_basestring'    : '.impl.builtin:_basestring',
+        '_totext'        : '.impl.builtin:_totext',
+        '_isbytes'       : '.impl.builtin:_isbytes',
+        '_istext'        : '.impl.builtin:_istext',
+        '_getimself'     : '.impl.builtin:_getimself',
+        '_getfuncdict'   : '.impl.builtin:_getfuncdict',
+        'builtins'       : '.impl.builtin:builtins',
+        'execfile'       : '.impl.builtin:execfile',
+        'callable'       : '.impl.builtin:callable',
+    },
+
+    # input-output helping
+    io = {
+        '__doc__'             : '.impl.io:__doc__',
+        'dupfile'             : '.impl.io.capture:dupfile',
+        'TextIO'              : '.impl.io.capture:TextIO',
+        'BytesIO'             : '.impl.io.capture:BytesIO',
+        'FDCapture'           : '.impl.io.capture:FDCapture',
+        'StdCapture'          : '.impl.io.capture:StdCapture',
+        'StdCaptureFD'        : '.impl.io.capture:StdCaptureFD',
+        'TerminalWriter'      : '.impl.io.terminalwriter:TerminalWriter',
+    },
+
+    # small and mean xml/html generation
+    xml = {
+        '__doc__'            : '.impl.xmlgen:__doc__',
+        'html'               : '.impl.xmlgen:html',
+        'Tag'                : '.impl.xmlgen:Tag',
+        'raw'                : '.impl.xmlgen:raw',
+        'Namespace'          : '.impl.xmlgen:Namespace',
+        'escape'             : '.impl.xmlgen:escape',
+    },
+
+    log = {
+        # logging API ('producers' and 'consumers' connected via keywords)
+        '__doc__'            : '.impl.log:__doc__',
+        '_apiwarn'           : '.impl.log.warning:_apiwarn',
+        'Producer'           : '.impl.log.log:Producer',
+        'setconsumer'        : '.impl.log.log:setconsumer',
+        '_setstate'          : '.impl.log.log:setstate',
+        '_getstate'          : '.impl.log.log:getstate',
+        'Path'               : '.impl.log.log:Path',
+        'STDOUT'             : '.impl.log.log:STDOUT',
+        'STDERR'             : '.impl.log.log:STDERR',
+        'Syslog'             : '.impl.log.log:Syslog',
+    },
+
+    # compatibility modules (deprecated)
+    compat = {
+        '__doc__'         : '.impl.compat:__doc__',
+        'doctest'         : '.impl.compat.dep_doctest:doctest',
+        'optparse'        : '.impl.compat.dep_optparse:optparse',
+        'textwrap'        : '.impl.compat.dep_textwrap:textwrap',
+        'subprocess'      : '.impl.compat.dep_subprocess:subprocess',
+    },
+))

Added: pypy/trunk/py/apipkg.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/apipkg.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,69 @@
+"""
+apipkg: control the exported namespace of a python package.
+
+see http://pypi.python.org/pypi/apipkg
+
+(c) holger krekel, 2009 - MIT license
+"""
+import sys
+from types import ModuleType
+
+__version__ = "1.0b2"
+
+def initpkg(pkgname, exportdefs):
+    """ initialize given package from the export definitions. """
+    mod = ApiModule(pkgname, exportdefs, implprefix=pkgname)
+    oldmod = sys.modules[pkgname]
+    mod.__file__ = getattr(oldmod, '__file__', None)
+    mod.__version__ = getattr(oldmod, '__version__', None)
+    mod.__path__ = getattr(oldmod, '__path__', None)
+    sys.modules[pkgname]  = mod
+
+def importobj(modpath, attrname):
+    module = __import__(modpath, None, None, ['__doc__'])
+    return getattr(module, attrname)
+
+class ApiModule(ModuleType):
+    def __init__(self, name, importspec, implprefix=None):
+        self.__name__ = name
+        self.__all__ = list(importspec)
+        self.__map__ = {}
+        self.__implprefix__ = implprefix or name
+        for name, importspec in importspec.items():
+            if isinstance(importspec, dict):
+                subname = '%s.%s'%(self.__name__, name)
+                apimod = ApiModule(subname, importspec, implprefix)
+                sys.modules[subname] = apimod
+                setattr(self, name, apimod)
+            else:
+                modpath, attrname = importspec.split(':')
+                if modpath[0] == '.':
+                    modpath = implprefix + modpath
+                if name == '__doc__':
+                    self.__doc__ = importobj(modpath, attrname)
+                else:
+                    self.__map__[name] = (modpath, attrname)
+
+    def __repr__(self):
+        return '<ApiModule %r>' % (self.__name__,)
+
+    def __getattr__(self, name):
+        try:
+            modpath, attrname = self.__map__[name]
+        except KeyError:
+            raise AttributeError(name)
+        else:
+            result = importobj(modpath, attrname)
+            setattr(self, name, result)
+            del self.__map__[name]
+            return result
+
+    def __dict__(self):
+        # force all the content of the module to be loaded when __dict__ is read
+        dictdescr = ModuleType.__dict__['__dict__']
+        dict = dictdescr.__get__(self)
+        if dict is not None:
+            for name in self.__all__:
+                hasattr(self, name)  # force attribute load, ignore errors
+        return dict
+    __dict__ = property(__dict__)

Added: pypy/trunk/py/bin/_findpy.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/_findpy.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,37 @@
+#!/usr/bin/env python 
+
+#
+# find and import a version of 'py'
+#
+import sys
+import os
+from os.path import dirname as opd, exists, join, basename, abspath
+
+def searchpy(current):
+    while 1:
+        last = current
+        initpy = join(current, '__init__.py')
+        if not exists(initpy):
+            pydir = join(current, 'py')
+            # recognize py-package and ensure it is importable
+            if exists(pydir) and exists(join(pydir, '__init__.py')):
+                #for p in sys.path:
+                #    if p == current:
+                #        return True
+                if current != sys.path[0]:  # if we are already first, then ok
+                    sys.stderr.write("inserting into sys.path: %s\n" % current)
+                    sys.path.insert(0, current)
+                return True
+        current = opd(current)
+        if last == current:
+            return False
+
+if not searchpy(abspath(os.curdir)):
+    if not searchpy(opd(abspath(sys.argv[0]))):
+        if not searchpy(opd(__file__)):
+            pass # let's hope it is just on sys.path 
+
+import py
+
+if __name__ == '__main__': 
+    print ("py lib is at %s" % py.__file__)

Added: pypy/trunk/py/bin/env.cmd
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/env.cmd	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,2 @@
+ at echo off
+for /F "usebackq delims=" %%i in (`python "%~dp0\env.py"`) do %%i

Added: pypy/trunk/py/bin/env.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/env.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+
+import sys, os, os.path
+
+progpath = sys.argv[0]
+packagedir = os.path.dirname(os.path.dirname(os.path.abspath(progpath)))
+packagename = os.path.basename(packagedir)
+bindir = os.path.join(packagedir, 'bin')
+if sys.platform == 'win32':
+    bindir = os.path.join(bindir, 'win32')
+rootdir = os.path.dirname(packagedir)
+
+def prepend_path(name, value):
+    sep = os.path.pathsep
+    curpath = os.environ.get(name, '')
+    newpath = [value] + [ x for x in curpath.split(sep) if x and x != value ]
+    return setenv(name, sep.join(newpath))
+
+def setenv(name, value):
+    shell = os.environ.get('SHELL', '')
+    comspec = os.environ.get('COMSPEC', '')
+    if shell.endswith('csh'):
+        cmd = 'setenv %s "%s"' % (name, value)
+    elif shell.endswith('sh'):
+        cmd = '%s="%s"; export %s' % (name, value, name)
+    elif comspec.endswith('cmd.exe'):
+        cmd = 'set %s=%s' % (name, value)
+    else:
+        assert False, 'Shell not supported.'
+    return cmd
+
+print(prepend_path('PATH', bindir))
+print(prepend_path('PYTHONPATH', rootdir))

Added: pypy/trunk/py/bin/py.cleanup
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/py.cleanup	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+from _findpy import py
+py.cmdline.pycleanup()
\ No newline at end of file

Added: pypy/trunk/py/bin/py.convert_unittest
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/py.convert_unittest	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+from _findpy import py
+py.cmdline.pyconvert_unittest()
\ No newline at end of file

Added: pypy/trunk/py/bin/py.countloc
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/py.countloc	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+from _findpy import py
+py.cmdline.pycountloc()
\ No newline at end of file

Added: pypy/trunk/py/bin/py.lookup
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/py.lookup	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+from _findpy import py
+py.cmdline.pylookup()
\ No newline at end of file

Added: pypy/trunk/py/bin/py.svnwcrevert
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/py.svnwcrevert	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+from _findpy import py
+py.cmdline.pysvnwcrevert()
\ No newline at end of file

Added: pypy/trunk/py/bin/py.test
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/py.test	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+from _findpy import py
+py.cmdline.pytest()
\ No newline at end of file

Added: pypy/trunk/py/bin/py.which
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/py.which	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+from _findpy import py
+py.cmdline.pywhich()
\ No newline at end of file

Added: pypy/trunk/py/bin/win32/py.cleanup.cmd
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/win32/py.cleanup.cmd	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,2 @@
+ at echo off
+python "%~dp0\..\py.cleanup" %*
\ No newline at end of file

Added: pypy/trunk/py/bin/win32/py.convert_unittest.cmd
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/win32/py.convert_unittest.cmd	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,2 @@
+ at echo off
+python "%~dp0\..\py.convert_unittest" %*
\ No newline at end of file

Added: pypy/trunk/py/bin/win32/py.countloc.cmd
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/win32/py.countloc.cmd	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,2 @@
+ at echo off
+python "%~dp0\..\py.countloc" %*
\ No newline at end of file

Added: pypy/trunk/py/bin/win32/py.lookup.cmd
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/win32/py.lookup.cmd	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,2 @@
+ at echo off
+python "%~dp0\..\py.lookup" %*
\ No newline at end of file

Added: pypy/trunk/py/bin/win32/py.svnwcrevert.cmd
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/win32/py.svnwcrevert.cmd	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,2 @@
+ at echo off
+python "%~dp0\..\py.svnwcrevert" %*
\ No newline at end of file

Added: pypy/trunk/py/bin/win32/py.test.cmd
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/win32/py.test.cmd	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,2 @@
+ at echo off
+python "%~dp0\..\py.test" %*
\ No newline at end of file

Added: pypy/trunk/py/bin/win32/py.which.cmd
==============================================================================
--- (empty file)
+++ pypy/trunk/py/bin/win32/py.which.cmd	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,2 @@
+ at echo off
+python "%~dp0\..\py.which" %*
\ No newline at end of file

Added: pypy/trunk/py/impl/__init__.py
==============================================================================

Added: pypy/trunk/py/impl/_com.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/_com.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,125 @@
+"""
+py lib plugins and plugin call management
+"""
+
+import py
+import inspect
+
+__all__ = ['Registry', 'MultiCall', 'comregistry', 'HookRelay']
+
+class MultiCall:
+    """ execute a call into multiple python functions/methods.  """
+
+    def __init__(self, methods, kwargs, firstresult=False):
+        self.methods = methods[:]
+        self.kwargs = kwargs.copy()
+        self.kwargs['__multicall__'] = self
+        self.results = []
+        self.firstresult = firstresult
+
+    def __repr__(self):
+        status = "%d results, %d meths" % (len(self.results), len(self.methods))
+        return "<MultiCall %s, kwargs=%r>" %(status, self.kwargs)
+
+    def execute(self):
+        while self.methods:
+            method = self.methods.pop()
+            kwargs = self.getkwargs(method)
+            res = method(**kwargs)
+            if res is not None:
+                self.results.append(res) 
+                if self.firstresult:
+                    return res
+        if not self.firstresult:
+            return self.results 
+
+    def getkwargs(self, method):
+        kwargs = {}
+        for argname in varnames(method):
+            try:
+                kwargs[argname] = self.kwargs[argname]
+            except KeyError:
+                pass # might be optional param
+        return kwargs 
+
+def varnames(func):
+    ismethod = inspect.ismethod(func)
+    rawcode = py.code.getrawcode(func)
+    try:
+        return rawcode.co_varnames[ismethod:]
+    except AttributeError:
+        return ()
+
+class Registry:
+    """
+        Manage Plugins: register/unregister call calls to plugins. 
+    """
+    def __init__(self, plugins=None):
+        if plugins is None:
+            plugins = []
+        self._plugins = plugins
+
+    def register(self, plugin):
+        assert not isinstance(plugin, str)
+        assert not plugin in self._plugins
+        self._plugins.append(plugin)
+
+    def unregister(self, plugin):
+        self._plugins.remove(plugin)
+
+    def isregistered(self, plugin):
+        return plugin in self._plugins 
+
+    def __iter__(self):
+        return iter(self._plugins)
+
+    def listattr(self, attrname, plugins=None, extra=(), reverse=False):
+        l = []
+        if plugins is None:
+            plugins = self._plugins
+        candidates = list(plugins) + list(extra)
+        for plugin in candidates:
+            try:
+                l.append(getattr(plugin, attrname))
+            except AttributeError:
+                continue 
+        if reverse:
+            l.reverse()
+        return l
+
+class HookRelay: 
+    def __init__(self, hookspecs, registry):
+        self._hookspecs = hookspecs
+        self._registry = registry
+        for name, method in vars(hookspecs).items():
+            if name[:1] != "_":
+                setattr(self, name, self._makecall(name))
+
+    def _makecall(self, name, extralookup=None):
+        hookspecmethod = getattr(self._hookspecs, name)
+        firstresult = getattr(hookspecmethod, 'firstresult', False)
+        return HookCaller(self, name, firstresult=firstresult,
+            extralookup=extralookup)
+
+    def _getmethods(self, name, extralookup=()):
+        return self._registry.listattr(name, extra=extralookup)
+
+    def _performcall(self, name, multicall):
+        return multicall.execute()
+        
+class HookCaller:
+    def __init__(self, hookrelay, name, firstresult, extralookup=None):
+        self.hookrelay = hookrelay 
+        self.name = name 
+        self.firstresult = firstresult 
+        self.extralookup = extralookup and [extralookup] or ()
+
+    def __repr__(self):
+        return "<HookCaller %r>" %(self.name,)
+
+    def __call__(self, **kwargs):
+        methods = self.hookrelay._getmethods(self.name, self.extralookup)
+        mc = MultiCall(methods, kwargs, firstresult=self.firstresult)
+        return self.hookrelay._performcall(self.name, mc)
+   
+comregistry = Registry([])

Added: pypy/trunk/py/impl/_metainfo.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/_metainfo.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,9 @@
+
+import py
+
+pydir = py.path.local(py.__file__).dirpath()
+impldir = pydir.join("impl")
+
+# list of all directories beloging to py
+assert impldir.relto(pydir)
+pydirs = [pydir]

Added: pypy/trunk/py/impl/builtin.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/builtin.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,203 @@
+import sys
+
+try:
+    reversed = reversed
+except NameError:
+    def reversed(sequence):
+        """reversed(sequence) -> reverse iterator over values of the sequence
+
+        Return a reverse iterator
+        """
+        if hasattr(sequence, '__reversed__'):
+            return sequence.__reversed__()
+        if not hasattr(sequence, '__getitem__'):
+            raise TypeError("argument to reversed() must be a sequence")
+        return reversed_iterator(sequence)
+
+    class reversed_iterator(object):
+
+        def __init__(self, seq):
+            self.seq = seq
+            self.remaining = len(seq)
+
+        def __iter__(self):
+            return self
+
+        def next(self):
+            i = self.remaining
+            if i > 0:
+                i -= 1
+                item = self.seq[i]
+                self.remaining = i
+                return item
+            raise StopIteration
+
+        def __length_hint__(self):
+            return self.remaining
+
+try:
+    sorted = sorted
+except NameError:
+    builtin_cmp = cmp # need to use cmp as keyword arg
+
+    def sorted(iterable, cmp=None, key=None, reverse=0):
+        use_cmp = None
+        if key is not None:
+            if cmp is None:
+                def use_cmp(x, y):
+                    return builtin_cmp(x[0], y[0])
+            else:
+                def use_cmp(x, y):
+                    return cmp(x[0], y[0])
+            l = [(key(element), element) for element in iterable]
+        else:
+            if cmp is not None:
+                use_cmp = cmp
+            l = list(iterable)
+        if use_cmp is not None:
+            l.sort(use_cmp)
+        else:
+            l.sort()
+        if reverse:
+            l.reverse()
+        if key is not None:
+            return [element for (_, element) in l]
+        return l
+
+try:
+    set, frozenset = set, frozenset
+except NameError:
+    from sets import set, frozenset 
+
+# pass through
+enumerate = enumerate 
+
+try:
+    BaseException = BaseException
+except NameError:
+    BaseException = Exception
+
+try:
+    GeneratorExit = GeneratorExit
+except NameError:
+    class GeneratorExit(Exception):
+        """ This exception is never raised, it is there to make it possible to
+        write code compatible with CPython 2.5 even in lower CPython
+        versions."""
+        pass
+    GeneratorExit.__module__ = 'exceptions'
+
+if sys.version_info >= (3, 0):
+    exec ("print_ = print ; exec_=exec")
+    import builtins
+
+    # some backward compatibility helpers 
+    _basestring = str 
+    def _totext(obj, encoding):
+        if isinstance(obj, bytes):
+            obj = obj.decode(encoding)
+        elif not isinstance(obj, str):
+            obj = str(obj)
+        return obj
+
+    def _isbytes(x): 
+        return isinstance(x, bytes)
+    def _istext(x): 
+        return isinstance(x, str)
+
+    def _getimself(function):
+        return getattr(function, '__self__', None)
+
+    def _getfuncdict(function):
+        return getattr(function, "__dict__", None)
+
+    def execfile(fn, globs=None, locs=None):
+        if globs is None:
+            back = sys._getframe(1)
+            globs = back.f_globals
+            locs = back.f_locals
+            del back
+        elif locs is None:
+            locs = globs
+        fp = open(fn, "rb")
+        try:
+            source = fp.read()
+        finally:
+            fp.close()
+        co = compile(source, fn, "exec", dont_inherit=True)
+        exec_(co, globs, locs)
+
+    def callable(obj):
+        return hasattr(obj, "__call__")
+
+else:
+    import __builtin__ as builtins
+    _totext = unicode 
+    _basestring = basestring
+    execfile = execfile
+    callable = callable
+    def _isbytes(x): 
+        return isinstance(x, str)
+    def _istext(x): 
+        return isinstance(x, unicode)
+
+    def _getimself(function):
+        return getattr(function, 'im_self', None)
+
+    def _getfuncdict(function):
+        return getattr(function, "__dict__", None)
+
+    def print_(*args, **kwargs):
+        """ minimal backport of py3k print statement. """ 
+        sep = ' '
+        if 'sep' in kwargs:
+            sep = kwargs.pop('sep')
+        end = '\n'
+        if 'end' in kwargs:
+            end = kwargs.pop('end')
+        file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
+        if kwargs:
+            args = ", ".join([str(x) for x in kwargs])
+            raise TypeError("invalid keyword arguments: %s" % args)
+        at_start = True
+        for x in args:
+            if not at_start:
+                file.write(sep)
+            file.write(str(x))
+            at_start = False
+        file.write(end)
+
+    def exec_(obj, globals=None, locals=None):
+        """ minimal backport of py3k exec statement. """ 
+        if globals is None: 
+            frame = sys._getframe(1)
+            globals = frame.f_globals 
+            if locals is None:
+                locals = frame.f_locals
+        elif locals is None:
+            locals = globals
+        exec2(obj, globals, locals) 
+
+if sys.version_info >= (3,0):
+    exec ("""
+def _reraise(cls, val, tb):
+    assert hasattr(val, '__traceback__')
+    raise val
+""")
+else:
+    exec ("""
+def _reraise(cls, val, tb):
+    raise cls, val, tb
+def exec2(obj, globals, locals):
+    exec obj in globals, locals 
+""")
+
+def _tryimport(*names):
+    """ return the first successfully imported module. """ 
+    assert names
+    for name in names:
+        try:
+            return __import__(name, None, None, '__doc__')
+        except ImportError:
+            excinfo = sys.exc_info()
+    _reraise(*excinfo)

Added: pypy/trunk/py/impl/cmdline/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/cmdline/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1 @@
+#

Added: pypy/trunk/py/impl/cmdline/pycleanup.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/cmdline/pycleanup.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,47 @@
+#!/usr/bin/env python 
+
+"""\
+py.cleanup [PATH]
+
+Delete pyc file recursively, starting from PATH (which defaults to the current
+working directory). Don't follow links and don't recurse into directories with
+a ".".
+"""
+import py
+
+def main():
+    parser = py.std.optparse.OptionParser(usage=__doc__)
+    parser.add_option("-e", "--remove", dest="ext", default=".pyc", action="store",
+        help="remove files with the given comma-separated list of extensions"
+    )
+    parser.add_option("-n", "--dryrun", dest="dryrun", default=False, 
+        action="store_true", 
+        help="display would-be-removed filenames"
+    )
+    parser.add_option("-d", action="store_true", dest="removedir",
+                      help="remove empty directories")
+    (options, args) = parser.parse_args()
+    if not args:
+        args = ["."]
+    ext = options.ext.split(",")
+    def shouldremove(p):
+        return p.ext in ext
+        
+    for arg in args:
+        path = py.path.local(arg)
+        py.builtin.print_("cleaning path", path, "of extensions", ext)
+        for x in path.visit(shouldremove, lambda x: x.check(dotfile=0, link=0)):
+            remove(x, options)
+    if options.removedir:
+        for x in path.visit(lambda x: x.check(dir=1), 
+                            lambda x: x.check(dotfile=0, link=0)):
+            if not x.listdir():
+                remove(x, options)
+
+def remove(path, options):
+    if options.dryrun:
+        py.builtin.print_("would remove", path)
+    else:
+        py.builtin.print_("removing", path)
+        path.remove()
+                

Added: pypy/trunk/py/impl/cmdline/pyconvert_unittest.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/cmdline/pyconvert_unittest.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,249 @@
+import re
+import sys
+import parser
+
+d={}
+#  d is the dictionary of unittest changes, keyed to the old name
+#  used by unittest.
+#  d[old][0] is the new replacement function.
+#  d[old][1] is the operator you will substitute, or '' if there is none.
+#  d[old][2] is the possible number of arguments to the unittest
+#  function.
+
+# Old Unittest Name             new name         operator  # of args
+d['assertRaises']           = ('raises',               '', ['Any'])
+d['fail']                   = ('raise AssertionError', '', [0,1])
+d['assert_']                = ('assert',               '', [1,2])
+d['failIf']                 = ('assert not',           '', [1,2])
+d['assertEqual']            = ('assert',            ' ==', [2,3])
+d['failIfEqual']            = ('assert not',        ' ==', [2,3])
+d['assertIn']               = ('assert',            ' in', [2,3])
+d['assertNotIn']            = ('assert',            ' not in', [2,3])
+d['assertNotEqual']         = ('assert',            ' !=', [2,3])
+d['failUnlessEqual']        = ('assert',            ' ==', [2,3])
+d['assertAlmostEqual']      = ('assert round',      ' ==', [2,3,4])
+d['failIfAlmostEqual']      = ('assert not round',  ' ==', [2,3,4])
+d['assertNotAlmostEqual']   = ('assert round',      ' !=', [2,3,4])
+d['failUnlessAlmostEquals'] = ('assert round',      ' ==', [2,3,4])
+
+#  the list of synonyms
+d['failUnlessRaises']      = d['assertRaises']
+d['failUnless']            = d['assert_']
+d['assertEquals']          = d['assertEqual']
+d['assertNotEquals']       = d['assertNotEqual']
+d['assertAlmostEquals']    = d['assertAlmostEqual']
+d['assertNotAlmostEquals'] = d['assertNotAlmostEqual']
+
+# set up the regular expressions we will need
+leading_spaces = re.compile(r'^(\s*)') # this never fails
+
+pat = ''
+for k in d.keys():  # this complicated pattern to match all unittests
+    pat += '|' + r'^(\s*)' + 'self.' + k + r'\(' # \tself.whatever(
+
+old_names = re.compile(pat[1:])
+linesep='\n'        # nobody will really try to convert files not read
+                    # in text mode, will they?
+
+
+def blocksplitter(fp):
+    '''split a file into blocks that are headed by functions to rename'''
+
+    blocklist = []
+    blockstring = ''
+
+    for line in fp:
+        interesting = old_names.match(line)
+        if interesting :
+            if blockstring:
+                blocklist.append(blockstring)
+                blockstring = line # reset the block
+        else:
+            blockstring += line
+            
+    blocklist.append(blockstring)
+    return blocklist
+
+def rewrite_utest(block):
+    '''rewrite every block to use the new utest functions'''
+
+    '''returns the rewritten unittest, unless it ran into problems,
+       in which case it just returns the block unchanged.
+    '''
+    utest = old_names.match(block)
+
+    if not utest:
+        return block
+
+    old = utest.group(0).lstrip()[5:-1] # the name we want to replace
+    new = d[old][0] # the name of the replacement function
+    op  = d[old][1] # the operator you will use , or '' if there is none.
+    possible_args = d[old][2]  # a list of the number of arguments the
+                               # unittest function could possibly take.
+                
+    if possible_args == ['Any']: # just rename assertRaises & friends
+        return re.sub('self.'+old, new, block)
+
+    message_pos = possible_args[-1]
+    # the remaining unittests can have an optional message to print
+    # when they fail.  It is always the last argument to the function.
+
+    try:
+        indent, argl, trailer = decompose_unittest(old, block)
+
+    except SyntaxError: # but we couldn't parse it!
+        return block
+    
+    argnum = len(argl)
+    if argnum not in possible_args:
+        # sanity check - this one isn't real either
+        return block
+
+    elif argnum == message_pos:
+        message = argl[-1]
+        argl = argl[:-1]
+    else:
+        message = None
+
+    if argnum is 0 or (argnum is 1 and argnum is message_pos): #unittest fail()
+        string = ''
+        if message:
+            message = ' ' + message
+
+    elif message_pos is 4:  # assertAlmostEqual & friends
+        try:
+            pos = argl[2].lstrip()
+        except IndexError:
+            pos = '7' # default if none is specified
+        string = '(%s -%s, %s)%s 0' % (argl[0], argl[1], pos, op )
+
+    else: # assert_, assertEquals and all the rest
+        string = ' ' + op.join(argl)
+
+    if message:
+        string = string + ',' + message
+
+    return indent + new + string + trailer
+
+def decompose_unittest(old, block):
+    '''decompose the block into its component parts'''
+
+    ''' returns indent, arglist, trailer 
+        indent -- the indentation
+        arglist -- the arguments to the unittest function
+        trailer -- any extra junk after the closing paren, such as #commment
+    '''
+ 
+    indent = re.match(r'(\s*)', block).group()
+    pat = re.search('self.' + old + r'\(', block)
+
+    args, trailer = get_expr(block[pat.end():], ')')
+    arglist = break_args(args, [])
+
+    if arglist == ['']: # there weren't any
+        return indent, [], trailer
+
+    for i in range(len(arglist)):
+        try:
+            parser.expr(arglist[i].lstrip('\t '))
+        except SyntaxError:
+            if i == 0:
+                arglist[i] = '(' + arglist[i] + ')'
+            else:
+                arglist[i] = ' (' + arglist[i] + ')'
+
+    return indent, arglist, trailer
+
+def break_args(args, arglist):
+    '''recursively break a string into a list of arguments'''
+    try:
+        first, rest = get_expr(args, ',')
+        if not rest:
+            return arglist + [first]
+        else:
+            return [first] + break_args(rest, arglist)
+    except SyntaxError:
+        return arglist + [args]
+
+def get_expr(s, char):
+    '''split a string into an expression, and the rest of the string'''
+
+    pos=[]
+    for i in range(len(s)):
+        if s[i] == char:
+            pos.append(i)
+    if pos == []:
+        raise SyntaxError # we didn't find the expected char.  Ick.
+     
+    for p in pos:
+        # make the python parser do the hard work of deciding which comma
+        # splits the string into two expressions
+        try:
+            parser.expr('(' + s[:p] + ')')
+            return s[:p], s[p+1:]
+        except SyntaxError: # It's not an expression yet
+            pass
+    raise SyntaxError       # We never found anything that worked.
+
+
+def main():
+    import sys
+    import py
+
+    usage = "usage: %prog [-s [filename ...] | [-i | -c filename ...]]"
+    optparser = py.std.optparse.OptionParser(usage)
+
+    def select_output (option, opt, value, optparser, **kw):
+        if hasattr(optparser, 'output'):
+            optparser.error(
+                'Cannot combine -s -i and -c options. Use one only.')
+        else:
+            optparser.output = kw['output']
+
+    optparser.add_option("-s", "--stdout", action="callback",
+                         callback=select_output,
+                         callback_kwargs={'output':'stdout'},
+                         help="send your output to stdout")
+
+    optparser.add_option("-i", "--inplace", action="callback",
+                         callback=select_output,
+                         callback_kwargs={'output':'inplace'},
+                         help="overwrite files in place")
+
+    optparser.add_option("-c", "--copy", action="callback",
+                         callback=select_output,
+                         callback_kwargs={'output':'copy'},
+                         help="copy files ... fn.py --> fn_cp.py")
+
+    options, args = optparser.parse_args()
+
+    output = getattr(optparser, 'output', 'stdout')
+
+    if output in ['inplace', 'copy'] and not args:
+        optparser.error(
+                '-i and -c option  require at least one filename')
+
+    if not args:
+        s = ''
+        for block in blocksplitter(sys.stdin):
+            s += rewrite_utest(block)
+        sys.stdout.write(s)
+
+    else:
+        for infilename in args: # no error checking to see if we can open, etc.
+            infile = file(infilename)
+            s = ''
+            for block in blocksplitter(infile):
+                s += rewrite_utest(block)
+            if output == 'inplace':
+                outfile = file(infilename, 'w+')
+            elif output == 'copy': # yes, just go clobber any existing .cp
+                outfile = file (infilename[:-3]+ '_cp.py', 'w+')
+            else:
+                outfile = sys.stdout
+
+            outfile.write(s)
+
+    
+if __name__ == '__main__':
+    main()

Added: pypy/trunk/py/impl/cmdline/pycountloc.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/cmdline/pycountloc.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+
+# hands on script to compute the non-empty Lines of Code 
+# for tests and non-test code 
+
+"""\
+py.countloc [PATHS]
+
+Count (non-empty) lines of python code and number of python files recursively
+starting from a list of paths given on the command line (starting from the
+current working directory). Distinguish between test files and normal ones and
+report them separately.
+"""
+import py
+
+def main():
+    parser = py.std.optparse.OptionParser(usage=__doc__)
+    (options, args) = parser.parse_args()
+    countloc(args)
+   
+def nodot(p):
+    return p.check(dotfile=0)
+
+class FileCounter(object):  
+    def __init__(self):
+        self.file2numlines = {}
+        self.numlines = 0
+        self.numfiles = 0
+
+    def addrecursive(self, directory, fil="*.py", rec=nodot):
+        for x in directory.visit(fil, rec): 
+            self.addfile(x)
+
+    def addfile(self, fn, emptylines=False):
+        if emptylines:
+            s = len(p.readlines())
+        else:
+            s = 0
+            for i in fn.readlines():
+                if i.strip():
+                    s += 1
+        self.file2numlines[fn] = s 
+        self.numfiles += 1
+        self.numlines += s
+
+    def getnumlines(self, fil): 
+        numlines = 0
+        for path, value in self.file2numlines.items():
+            if fil(path): 
+                numlines += value
+        return numlines 
+
+    def getnumfiles(self, fil): 
+        numfiles = 0
+        for path in self.file2numlines:
+            if fil(path): 
+                numfiles += 1
+        return numfiles
+
+def get_loccount(locations=None):
+    if locations is None:
+        localtions = [py.path.local()]
+    counter = FileCounter()
+    for loc in locations: 
+        counter.addrecursive(loc, '*.py', rec=nodot)
+
+    def istestfile(p):
+        return p.check(fnmatch='test_*.py')
+    isnottestfile = lambda x: not istestfile(x)
+
+    numfiles = counter.getnumfiles(isnottestfile) 
+    numlines = counter.getnumlines(isnottestfile) 
+    numtestfiles = counter.getnumfiles(istestfile)
+    numtestlines = counter.getnumlines(istestfile)
+   
+    return counter, numfiles, numlines, numtestfiles, numtestlines
+
+def countloc(paths=None):
+    if not paths:
+        paths = ['.']
+    locations = [py.path.local(x) for x in paths]
+    (counter, numfiles, numlines, numtestfiles,
+     numtestlines) = get_loccount(locations)
+
+    items = counter.file2numlines.items()
+    items.sort(lambda x,y: cmp(x[1], y[1]))
+    for x, y in items:
+        print("%3d %30s" % (y,x))
+    
+    print("%30s %3d" %("number of testfiles", numtestfiles))
+    print("%30s %3d" %("number of non-empty testlines", numtestlines))
+    print("%30s %3d" %("number of files", numfiles))
+    print("%30s %3d" %("number of non-empty lines", numlines))
+

Added: pypy/trunk/py/impl/cmdline/pylookup.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/cmdline/pylookup.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,83 @@
+#!/usr/bin/env python 
+
+"""\
+py.lookup [search_directory] SEARCH_STRING [options]
+
+Looks recursively at Python files for a SEARCH_STRING, starting from the
+present working directory. Prints the line, with the filename and line-number
+prepended."""
+
+import sys, os
+import py
+from py.impl.io.terminalwriter import ansi_print, terminal_width
+import re
+
+def rec(p):
+    return p.check(dotfile=0)
+
+parser = py.std.optparse.OptionParser(usage=__doc__)
+parser.add_option("-i", "--ignore-case", action="store_true", dest="ignorecase",
+                  help="ignore case distinctions")
+parser.add_option("-C", "--context", action="store", type="int", dest="context",
+            default=0, help="How many lines of output to show")
+
+def find_indexes(search_line, string):
+    indexes = []
+    before = 0
+    while 1:
+        i = search_line.find(string, before)
+        if i == -1:
+            break
+        indexes.append(i)
+        before = i + len(string)
+    return indexes
+
+def main():
+    (options, args) = parser.parse_args()
+    if len(args) == 2:
+        search_dir, string = args
+        search_dir = py.path.local(search_dir)
+    else:
+        search_dir = py.path.local()
+        string = args[0]
+    if options.ignorecase:
+        string = string.lower()
+    for x in search_dir.visit('*.py', rec):
+        # match filename directly
+        s = x.relto(search_dir)
+        if options.ignorecase:
+            s = s.lower()
+        if s.find(string) != -1:
+            sys.stdout.write("%s: filename matches %r" %(x, string) + "\n")
+
+        try:
+            s = x.read()
+        except py.error.ENOENT:
+            pass # whatever, probably broken link (ie emacs lock)
+        searchs = s
+        if options.ignorecase:
+            searchs = s.lower()
+        if s.find(string) != -1:
+            lines = s.splitlines()
+            if options.ignorecase:
+                searchlines = s.lower().splitlines()
+            else:
+                searchlines = lines
+            for i, (line, searchline) in enumerate(zip(lines, searchlines)): 
+                indexes = find_indexes(searchline, string)
+                if not indexes:
+                    continue
+                if not options.context:
+                    sys.stdout.write("%s:%d: " %(x.relto(search_dir), i+1))
+                    last_index = 0
+                    for index in indexes:
+                        sys.stdout.write(line[last_index: index])
+                        ansi_print(line[index: index+len(string)],
+                                   file=sys.stdout, esc=31, newline=False)
+                        last_index = index + len(string)
+                    sys.stdout.write(line[last_index:] + "\n")
+                else:
+                    context = (options.context)/2
+                    for count in range(max(0, i-context), min(len(lines) - 1, i+context+1)):
+                        print("%s:%d:  %s" %(x.relto(search_dir), count+1, lines[count].rstrip()))
+                    print("-" * terminal_width)

Added: pypy/trunk/py/impl/cmdline/pysvnwcrevert.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/cmdline/pysvnwcrevert.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,55 @@
+#! /usr/bin/env python
+"""\
+py.svnwcrevert [options] WCPATH
+
+Running this script and then 'svn up' puts the working copy WCPATH in a state
+as clean as a fresh check-out.
+
+WARNING: you'll loose all local changes, obviously!
+
+This script deletes all files that have been modified
+or that svn doesn't explicitly know about, including svn:ignored files
+(like .pyc files, hint hint).
+
+The goal of this script is to leave the working copy with some files and
+directories possibly missing, but - most importantly - in a state where
+the following 'svn up' won't just crash.
+"""
+
+import sys, py
+
+def kill(p, root):
+    print('<    %s' % (p.relto(root),))
+    p.remove(rec=1)
+
+def svnwcrevert(path, root=None, precious=[]):
+    if root is None:
+        root = path
+    wcpath = py.path.svnwc(path)
+    try:
+        st = wcpath.status()
+    except ValueError:   # typically, "bad char in wcpath"
+        kill(path, root)
+        return
+    for p in path.listdir():
+        if p.basename == '.svn' or p.basename in precious:
+            continue
+        wcp = py.path.svnwc(p)
+        if wcp not in st.unchanged and wcp not in st.external:
+            kill(p, root)
+        elif p.check(dir=1):
+            svnwcrevert(p, root)
+
+# XXX add a functional test
+
+parser = py.std.optparse.OptionParser(usage=__doc__)
+parser.add_option("-p", "--precious",
+                  action="append", dest="precious", default=[],
+                  help="preserve files with this name")
+
+def main():
+    opts, args = parser.parse_args()
+    if len(args) != 1:
+        parser.print_help()
+        sys.exit(2)
+    svnwcrevert(py.path.local(args[0]), precious=opts.precious)

Added: pypy/trunk/py/impl/cmdline/pytest.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/cmdline/pytest.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,5 @@
+#!/usr/bin/env python 
+import py
+
+def main():
+    py.test.cmdline.main() 

Added: pypy/trunk/py/impl/cmdline/pywhich.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/cmdline/pywhich.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,23 @@
+#!/usr/bin/env python 
+
+"""\
+py.which [name]
+
+print the location of the given python module or package name 
+"""
+
+import sys
+
+def main():
+    name = sys.argv[1]
+    try:
+        mod = __import__(name)
+    except ImportError:
+        sys.stderr.write("could not import: " +  name + "\n")
+    else:
+        try:
+            location = mod.__file__ 
+        except AttributeError:
+            sys.stderr.write("module (has no __file__): " + str(mod))
+        else:
+            print(location)

Added: pypy/trunk/py/impl/code/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/code/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1 @@
+""" python inspection/code generation API """

Added: pypy/trunk/py/impl/code/_assertionnew.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/code/_assertionnew.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,337 @@
+"""
+Like _assertion.py but using builtin AST.  It should replace _assertion.py
+eventually.
+"""
+
+import sys
+import ast
+
+import py
+from py.impl.code.assertion import _format_explanation, BuiltinAssertionError
+
+
+if sys.platform.startswith("java") and sys.version_info < (2, 5, 2):
+    # See http://bugs.jython.org/issue1497
+    _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
+              "ListComp", "GeneratorExp", "Yield", "Compare", "Call",
+              "Repr", "Num", "Str", "Attribute", "Subscript", "Name",
+              "List", "Tuple")
+    _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
+              "AugAssign", "Print", "For", "While", "If", "With", "Raise",
+              "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
+              "Exec", "Global", "Expr", "Pass", "Break", "Continue")
+    _expr_nodes = set(getattr(ast, name) for name in _exprs)
+    _stmt_nodes = set(getattr(ast, name) for name in _stmts)
+    def _is_ast_expr(node):
+        return node.__class__ in _expr_nodes
+    def _is_ast_stmt(node):
+        return node.__class__ in _stmt_nodes
+else:
+    def _is_ast_expr(node):
+        return isinstance(node, ast.expr)
+    def _is_ast_stmt(node):
+        return isinstance(node, ast.stmt)
+
+
+class Failure(Exception):
+    """Error found while interpreting AST."""
+
+    def __init__(self, explanation=""):
+        self.cause = sys.exc_info()
+        self.explanation = explanation
+
+
+def interpret(source, frame, should_fail=False):
+    mod = ast.parse(source)
+    visitor = DebugInterpreter(frame)
+    try:
+        visitor.visit(mod)
+    except Failure:
+        failure = sys.exc_info()[1]
+        return getfailure(failure)
+    if should_fail:
+        return ("(assertion failed, but when it was re-run for "
+                "printing intermediate values, it did not fail.  Suggestions: "
+                "compute assert expression before the assert or use --nomagic)")
+
+def run(offending_line, frame=None):
+    if frame is None:
+        frame = py.code.Frame(sys._getframe(1))
+    return interpret(offending_line, frame)
+
+def getfailure(failure):
+    explanation = _format_explanation(failure.explanation)
+    value = failure.cause[1]
+    if str(value):
+        lines = explanation.splitlines()
+        if not lines:
+            lines.append("")
+        lines[0] += " << %s" % (value,)
+        explanation = "\n".join(lines)
+    text = "%s: %s" % (failure.cause[0].__name__, explanation)
+    if text.startswith("AssertionError: assert "):
+        text = text[16:]
+    return text
+
+
+operator_map = {
+    ast.BitOr : "|",
+    ast.BitXor : "^",
+    ast.BitAnd : "&",
+    ast.LShift : "<<",
+    ast.RShift : ">>",
+    ast.Add : "+",
+    ast.Sub : "-",
+    ast.Mult : "*",
+    ast.Div : "/",
+    ast.FloorDiv : "//",
+    ast.Mod : "%",
+    ast.Eq : "==",
+    ast.NotEq : "!=",
+    ast.Lt : "<",
+    ast.LtE : "<=",
+    ast.Gt : ">",
+    ast.GtE : ">=",
+    ast.Is : "is",
+    ast.IsNot : "is not",
+    ast.In : "in",
+    ast.NotIn : "not in"
+}
+
+unary_map = {
+    ast.Not : "not %s",
+    ast.Invert : "~%s",
+    ast.USub : "-%s",
+    ast.UAdd : "+%s"
+}
+
+
+class DebugInterpreter(ast.NodeVisitor):
+    """Interpret AST nodes to gleam useful debugging information."""
+
+    def __init__(self, frame):
+        self.frame = frame
+
+    def generic_visit(self, node):
+        # Fallback when we don't have a special implementation.
+        if _is_ast_expr(node):
+            mod = ast.Expression(node)
+            co = self._compile(mod)
+            try:
+                result = self.frame.eval(co)
+            except Exception:
+                raise Failure()
+            explanation = self.frame.repr(result)
+            return explanation, result
+        elif _is_ast_stmt(node):
+            mod = ast.Module([node])
+            co = self._compile(mod, "exec")
+            try:
+                self.frame.exec_(co)
+            except Exception:
+                raise Failure()
+            return None, None
+        else:
+            raise AssertionError("can't handle %s" %(node,))
+
+    def _compile(self, source, mode="eval"):
+        return compile(source, "<assertion interpretation>", mode)
+
+    def visit_Expr(self, expr):
+        return self.visit(expr.value)
+
+    def visit_Module(self, mod):
+        for stmt in mod.body:
+            self.visit(stmt)
+
+    def visit_Name(self, name):
+        explanation, result = self.generic_visit(name)
+        # See if the name is local.
+        source = "%r in locals() is not globals()" % (name.id,)
+        co = self._compile(source)
+        try:
+            local = self.frame.eval(co)
+        except Exception:
+            # have to assume it isn't
+            local = False
+        if not local:
+            return name.id, result
+        return explanation, result
+
+    def visit_Compare(self, comp):
+        left = comp.left
+        left_explanation, left_result = self.visit(left)
+        got_result = False
+        for op, next_op in zip(comp.ops, comp.comparators):
+            if got_result and not result:
+                break
+            next_explanation, next_result = self.visit(next_op)
+            op_symbol = operator_map[op.__class__]
+            explanation = "%s %s %s" % (left_explanation, op_symbol,
+                                        next_explanation)
+            source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
+            co = self._compile(source)
+            try:
+                result = self.frame.eval(co, __exprinfo_left=left_result,
+                                         __exprinfo_right=next_result)
+            except Exception:
+                raise Failure(explanation)
+            else:
+                got_result = True
+            left_explanation, left_result = next_explanation, next_result
+        return explanation, result
+
+    def visit_BoolOp(self, boolop):
+        is_or = isinstance(boolop.op, ast.Or)
+        explanations = []
+        for operand in boolop.values:
+            explanation, result = self.visit(operand)
+            explanations.append(explanation)
+            if result == is_or:
+                break
+        name = is_or and " or " or " and "
+        explanation = "(" + name.join(explanations) + ")"
+        return explanation, result
+
+    def visit_UnaryOp(self, unary):
+        pattern = unary_map[unary.op.__class__]
+        operand_explanation, operand_result = self.visit(unary.operand)
+        explanation = pattern % (operand_explanation,)
+        co = self._compile(pattern % ("__exprinfo_expr",))
+        try:
+            result = self.frame.eval(co, __exprinfo_expr=operand_result)
+        except Exception:
+            raise Failure(explanation)
+        return explanation, result
+
+    def visit_BinOp(self, binop):
+        left_explanation, left_result = self.visit(binop.left)
+        right_explanation, right_result = self.visit(binop.right)
+        symbol = operator_map[binop.op.__class__]
+        explanation = "(%s %s %s)" % (left_explanation, symbol,
+                                      right_explanation)
+        source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
+        co = self._compile(source)
+        try:
+            result = self.frame.eval(co, __exprinfo_left=left_result,
+                                     __exprinfo_right=right_result)
+        except Exception:
+            raise Failure(explanation)
+        return explanation, result
+
+    def visit_Call(self, call):
+        func_explanation, func = self.visit(call.func)
+        arg_explanations = []
+        ns = {"__exprinfo_func" : func}
+        arguments = []
+        for arg in call.args:
+            arg_explanation, arg_result = self.visit(arg)
+            arg_name = "__exprinfo_%s" % (len(ns),)
+            ns[arg_name] = arg_result
+            arguments.append(arg_name)
+            arg_explanations.append(arg_explanation)
+        for keyword in call.keywords:
+            arg_explanation, arg_result = self.visit(keyword.value)
+            arg_name = "__exprinfo_%s" % (len(ns),)
+            ns[arg_name] = arg_result
+            keyword_source = "%s=%%s" % (keyword.id)
+            arguments.append(keyword_source % (arg_name,))
+            arg_explanations.append(keyword_source % (arg_explanation,))
+        if call.starargs:
+            arg_explanation, arg_result = self.visit(call.starargs)
+            arg_name = "__exprinfo_star"
+            ns[arg_name] = arg_result
+            arguments.append("*%s" % (arg_name,))
+            arg_explanations.append("*%s" % (arg_explanation,))
+        if call.kwargs:
+            arg_explanation, arg_result = self.visit(call.kwargs)
+            arg_name = "__exprinfo_kwds"
+            ns[arg_name] = arg_result
+            arguments.append("**%s" % (arg_name,))
+            arg_explanations.append("**%s" % (arg_explanation,))
+        args_explained = ", ".join(arg_explanations)
+        explanation = "%s(%s)" % (func_explanation, args_explained)
+        args = ", ".join(arguments)
+        source = "__exprinfo_func(%s)" % (args,)
+        co = self._compile(source)
+        try:
+            result = self.frame.eval(co, **ns)
+        except Exception:
+            raise Failure(explanation)
+        # Only show result explanation if it's not a builtin call or returns a
+        # bool.
+        if not isinstance(call.func, ast.Name) or \
+                not self._is_builtin_name(call.func):
+            source = "isinstance(__exprinfo_value, bool)"
+            co = self._compile(source)
+            try:
+                is_bool = self.frame.eval(co, __exprinfo_value=result)
+            except Exception:
+                is_bool = False
+            if not is_bool:
+                pattern = "%s\n{%s = %s\n}"
+                rep = self.frame.repr(result)
+                explanation = pattern % (rep, rep, explanation)
+        return explanation, result
+
+    def _is_builtin_name(self, name):
+        pattern = "%r not in globals() and %r not in locals()"
+        source = pattern % (name.id, name.id)
+        co = self._compile(source)
+        try:
+            return self.frame.eval(co)
+        except Exception:
+            return False
+
+    def visit_Attribute(self, attr):
+        if not isinstance(attr.ctx, ast.Load):
+            return self.generic_visit(attr)
+        source_explanation, source_result = self.visit(attr.value)
+        explanation = "%s.%s" % (source_explanation, attr.attr)
+        source = "__exprinfo_expr.%s" % (attr.attr,)
+        co = self._compile(source)
+        try:
+            result = self.frame.eval(co, __exprinfo_expr=source_result)
+        except Exception:
+            raise Failure(explanation)
+        # Check if the attr is from an instance.
+        source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
+        source = source % (attr.attr,)
+        co = self._compile(source)
+        try:
+            from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
+        except Exception:
+            from_instance = True
+        if from_instance:
+            rep = self.frame.repr(result)
+            pattern = "%s\n{%s = %s\n}"
+            explanation = pattern % (rep, rep, explanation)
+        return explanation, result
+
+    def visit_Assert(self, assrt):
+        test_explanation, test_result = self.visit(assrt.test)
+        if test_explanation.startswith("False\n{False =") and \
+                test_explanation.endswith("\n"):
+            test_explanation = test_explanation[15:-2]
+        explanation = "assert %s" % (test_explanation,)
+        if not test_result:
+            try:
+                raise BuiltinAssertionError
+            except Exception:
+                raise Failure(explanation)
+        return explanation, test_result
+
+    def visit_Assign(self, assign):
+        value_explanation, value_result = self.visit(assign.value)
+        explanation = "... = %s" % (value_explanation,)
+        name = ast.Name("__exprinfo_expr", ast.Load(), assign.value.lineno,
+                        assign.value.col_offset)
+        new_assign = ast.Assign(assign.targets, name, assign.lineno,
+                                assign.col_offset)
+        mod = ast.Module([new_assign])
+        co = self._compile(mod, "exec")
+        try:
+            self.frame.exec_(co, __exprinfo_expr=value_result)
+        except Exception:
+            raise Failure(explanation)
+        return explanation, value_result

Added: pypy/trunk/py/impl/code/_assertionold.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/code/_assertionold.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,558 @@
+import py
+import sys, inspect
+from compiler import parse, ast, pycodegen
+from py.impl.code.assertion import BuiltinAssertionError, _format_explanation
+
+passthroughex = (KeyboardInterrupt, SystemExit, MemoryError)
+
+class Failure:
+    def __init__(self, node):
+        self.exc, self.value, self.tb = sys.exc_info()
+        self.node = node
+
+class View(object):
+    """View base class.
+
+    If C is a subclass of View, then C(x) creates a proxy object around
+    the object x.  The actual class of the proxy is not C in general,
+    but a *subclass* of C determined by the rules below.  To avoid confusion
+    we call view class the class of the proxy (a subclass of C, so of View)
+    and object class the class of x.
+
+    Attributes and methods not found in the proxy are automatically read on x.
+    Other operations like setting attributes are performed on the proxy, as
+    determined by its view class.  The object x is available from the proxy
+    as its __obj__ attribute.
+
+    The view class selection is determined by the __view__ tuples and the
+    optional __viewkey__ method.  By default, the selected view class is the
+    most specific subclass of C whose __view__ mentions the class of x.
+    If no such subclass is found, the search proceeds with the parent
+    object classes.  For example, C(True) will first look for a subclass
+    of C with __view__ = (..., bool, ...) and only if it doesn't find any
+    look for one with __view__ = (..., int, ...), and then ..., object,...
+    If everything fails the class C itself is considered to be the default.
+
+    Alternatively, the view class selection can be driven by another aspect
+    of the object x, instead of the class of x, by overriding __viewkey__.
+    See last example at the end of this module.
+    """
+
+    _viewcache = {}
+    __view__ = ()
+
+    def __new__(rootclass, obj, *args, **kwds):
+        self = object.__new__(rootclass)
+        self.__obj__ = obj
+        self.__rootclass__ = rootclass
+        key = self.__viewkey__()
+        try:
+            self.__class__ = self._viewcache[key]
+        except KeyError:
+            self.__class__ = self._selectsubclass(key)
+        return self
+
+    def __getattr__(self, attr):
+        # attributes not found in the normal hierarchy rooted on View
+        # are looked up in the object's real class
+        return getattr(self.__obj__, attr)
+
+    def __viewkey__(self):
+        return self.__obj__.__class__
+
+    def __matchkey__(self, key, subclasses):
+        if inspect.isclass(key):
+            keys = inspect.getmro(key)
+        else:
+            keys = [key]
+        for key in keys:
+            result = [C for C in subclasses if key in C.__view__]
+            if result:
+                return result
+        return []
+
+    def _selectsubclass(self, key):
+        subclasses = list(enumsubclasses(self.__rootclass__))
+        for C in subclasses:
+            if not isinstance(C.__view__, tuple):
+                C.__view__ = (C.__view__,)
+        choices = self.__matchkey__(key, subclasses)
+        if not choices:
+            return self.__rootclass__
+        elif len(choices) == 1:
+            return choices[0]
+        else:
+            # combine the multiple choices
+            return type('?', tuple(choices), {})
+
+    def __repr__(self):
+        return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
+
+
+def enumsubclasses(cls):
+    for subcls in cls.__subclasses__():
+        for subsubclass in enumsubclasses(subcls):
+            yield subsubclass
+    yield cls
+
+
+class Interpretable(View):
+    """A parse tree node with a few extra methods."""
+    explanation = None
+
+    def is_builtin(self, frame):
+        return False
+
+    def eval(self, frame):
+        # fall-back for unknown expression nodes
+        try:
+            expr = ast.Expression(self.__obj__)
+            expr.filename = '<eval>'
+            self.__obj__.filename = '<eval>'
+            co = pycodegen.ExpressionCodeGenerator(expr).getCode()
+            result = frame.eval(co)
+        except passthroughex:
+            raise
+        except:
+            raise Failure(self)
+        self.result = result
+        self.explanation = self.explanation or frame.repr(self.result)
+
+    def run(self, frame):
+        # fall-back for unknown statement nodes
+        try:
+            expr = ast.Module(None, ast.Stmt([self.__obj__]))
+            expr.filename = '<run>'
+            co = pycodegen.ModuleCodeGenerator(expr).getCode()
+            frame.exec_(co)
+        except passthroughex:
+            raise
+        except:
+            raise Failure(self)
+
+    def nice_explanation(self):
+        return _format_explanation(self.explanation)
+
+
+class Name(Interpretable):
+    __view__ = ast.Name
+
+    def is_local(self, frame):
+        co = compile('%r in locals() is not globals()' % self.name, '?', 'eval')
+        try:
+            return frame.is_true(frame.eval(co))
+        except passthroughex:
+            raise
+        except:
+            return False
+
+    def is_global(self, frame):
+        co = compile('%r in globals()' % self.name, '?', 'eval')
+        try:
+            return frame.is_true(frame.eval(co))
+        except passthroughex:
+            raise
+        except:
+            return False
+
+    def is_builtin(self, frame):
+        co = compile('%r not in locals() and %r not in globals()' % (
+            self.name, self.name), '?', 'eval')
+        try:
+            return frame.is_true(frame.eval(co))
+        except passthroughex:
+            raise
+        except:
+            return False
+
+    def eval(self, frame):
+        super(Name, self).eval(frame)
+        if not self.is_local(frame):
+            self.explanation = self.name
+
+class Compare(Interpretable):
+    __view__ = ast.Compare
+
+    def eval(self, frame):
+        expr = Interpretable(self.expr)
+        expr.eval(frame)
+        for operation, expr2 in self.ops:
+            if hasattr(self, 'result'):
+                # shortcutting in chained expressions
+                if not frame.is_true(self.result):
+                    break
+            expr2 = Interpretable(expr2)
+            expr2.eval(frame)
+            self.explanation = "%s %s %s" % (
+                expr.explanation, operation, expr2.explanation)
+            co = compile("__exprinfo_left %s __exprinfo_right" % operation,
+                         '?', 'eval')
+            try:
+                self.result = frame.eval(co, __exprinfo_left=expr.result,
+                                             __exprinfo_right=expr2.result)
+            except passthroughex:
+                raise
+            except:
+                raise Failure(self)
+            expr = expr2
+
+class And(Interpretable):
+    __view__ = ast.And
+
+    def eval(self, frame):
+        explanations = []
+        for expr in self.nodes:
+            expr = Interpretable(expr)
+            expr.eval(frame)
+            explanations.append(expr.explanation)
+            self.result = expr.result
+            if not frame.is_true(expr.result):
+                break
+        self.explanation = '(' + ' and '.join(explanations) + ')'
+
+class Or(Interpretable):
+    __view__ = ast.Or
+
+    def eval(self, frame):
+        explanations = []
+        for expr in self.nodes:
+            expr = Interpretable(expr)
+            expr.eval(frame)
+            explanations.append(expr.explanation)
+            self.result = expr.result
+            if frame.is_true(expr.result):
+                break
+        self.explanation = '(' + ' or '.join(explanations) + ')'
+
+
+# == Unary operations ==
+keepalive = []
+for astclass, astpattern in {
+    ast.Not    : 'not __exprinfo_expr',
+    ast.Invert : '(~__exprinfo_expr)',
+    }.items():
+
+    class UnaryArith(Interpretable):
+        __view__ = astclass
+
+        def eval(self, frame, astpattern=astpattern,
+                              co=compile(astpattern, '?', 'eval')):
+            expr = Interpretable(self.expr)
+            expr.eval(frame)
+            self.explanation = astpattern.replace('__exprinfo_expr',
+                                                  expr.explanation)
+            try:
+                self.result = frame.eval(co, __exprinfo_expr=expr.result)
+            except passthroughex:
+                raise
+            except:
+                raise Failure(self)
+
+    keepalive.append(UnaryArith)
+
+# == Binary operations ==
+for astclass, astpattern in {
+    ast.Add    : '(__exprinfo_left + __exprinfo_right)',
+    ast.Sub    : '(__exprinfo_left - __exprinfo_right)',
+    ast.Mul    : '(__exprinfo_left * __exprinfo_right)',
+    ast.Div    : '(__exprinfo_left / __exprinfo_right)',
+    ast.Mod    : '(__exprinfo_left % __exprinfo_right)',
+    ast.Power  : '(__exprinfo_left ** __exprinfo_right)',
+    }.items():
+
+    class BinaryArith(Interpretable):
+        __view__ = astclass
+
+        def eval(self, frame, astpattern=astpattern,
+                              co=compile(astpattern, '?', 'eval')):
+            left = Interpretable(self.left)
+            left.eval(frame)
+            right = Interpretable(self.right)
+            right.eval(frame)
+            self.explanation = (astpattern
+                                .replace('__exprinfo_left',  left .explanation)
+                                .replace('__exprinfo_right', right.explanation))
+            try:
+                self.result = frame.eval(co, __exprinfo_left=left.result,
+                                             __exprinfo_right=right.result)
+            except passthroughex:
+                raise
+            except:
+                raise Failure(self)
+
+    keepalive.append(BinaryArith)
+
+
+class CallFunc(Interpretable):
+    __view__ = ast.CallFunc
+
+    def is_bool(self, frame):
+        co = compile('isinstance(__exprinfo_value, bool)', '?', 'eval')
+        try:
+            return frame.is_true(frame.eval(co, __exprinfo_value=self.result))
+        except passthroughex:
+            raise
+        except:
+            return False
+
+    def eval(self, frame):
+        node = Interpretable(self.node)
+        node.eval(frame)
+        explanations = []
+        vars = {'__exprinfo_fn': node.result}
+        source = '__exprinfo_fn('
+        for a in self.args:
+            if isinstance(a, ast.Keyword):
+                keyword = a.name
+                a = a.expr
+            else:
+                keyword = None
+            a = Interpretable(a)
+            a.eval(frame)
+            argname = '__exprinfo_%d' % len(vars)
+            vars[argname] = a.result
+            if keyword is None:
+                source += argname + ','
+                explanations.append(a.explanation)
+            else:
+                source += '%s=%s,' % (keyword, argname)
+                explanations.append('%s=%s' % (keyword, a.explanation))
+        if self.star_args:
+            star_args = Interpretable(self.star_args)
+            star_args.eval(frame)
+            argname = '__exprinfo_star'
+            vars[argname] = star_args.result
+            source += '*' + argname + ','
+            explanations.append('*' + star_args.explanation)
+        if self.dstar_args:
+            dstar_args = Interpretable(self.dstar_args)
+            dstar_args.eval(frame)
+            argname = '__exprinfo_kwds'
+            vars[argname] = dstar_args.result
+            source += '**' + argname + ','
+            explanations.append('**' + dstar_args.explanation)
+        self.explanation = "%s(%s)" % (
+            node.explanation, ', '.join(explanations))
+        if source.endswith(','):
+            source = source[:-1]
+        source += ')'
+        co = compile(source, '?', 'eval')
+        try:
+            self.result = frame.eval(co, **vars)
+        except passthroughex:
+            raise
+        except:
+            raise Failure(self)
+        if not node.is_builtin(frame) or not self.is_bool(frame):
+            r = frame.repr(self.result)
+            self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+class Getattr(Interpretable):
+    __view__ = ast.Getattr
+
+    def eval(self, frame):
+        expr = Interpretable(self.expr)
+        expr.eval(frame)
+        co = compile('__exprinfo_expr.%s' % self.attrname, '?', 'eval')
+        try:
+            self.result = frame.eval(co, __exprinfo_expr=expr.result)
+        except passthroughex:
+            raise
+        except:
+            raise Failure(self)
+        self.explanation = '%s.%s' % (expr.explanation, self.attrname)
+        # if the attribute comes from the instance, its value is interesting
+        co = compile('hasattr(__exprinfo_expr, "__dict__") and '
+                     '%r in __exprinfo_expr.__dict__' % self.attrname,
+                     '?', 'eval')
+        try:
+            from_instance = frame.is_true(
+                frame.eval(co, __exprinfo_expr=expr.result))
+        except passthroughex:
+            raise
+        except:
+            from_instance = True
+        if from_instance:
+            r = frame.repr(self.result)
+            self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+# == Re-interpretation of full statements ==
+
+class Assert(Interpretable):
+    __view__ = ast.Assert
+
+    def run(self, frame):
+        test = Interpretable(self.test)
+        test.eval(frame)
+        # simplify 'assert False where False = ...'
+        if (test.explanation.startswith('False\n{False = ') and
+            test.explanation.endswith('\n}')):
+            test.explanation = test.explanation[15:-2]
+        # print the result as  'assert <explanation>'
+        self.result = test.result
+        self.explanation = 'assert ' + test.explanation
+        if not frame.is_true(test.result):
+            try:
+                raise BuiltinAssertionError
+            except passthroughex:
+                raise
+            except:
+                raise Failure(self)
+
+class Assign(Interpretable):
+    __view__ = ast.Assign
+
+    def run(self, frame):
+        expr = Interpretable(self.expr)
+        expr.eval(frame)
+        self.result = expr.result
+        self.explanation = '... = ' + expr.explanation
+        # fall-back-run the rest of the assignment
+        ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
+        mod = ast.Module(None, ast.Stmt([ass]))
+        mod.filename = '<run>'
+        co = pycodegen.ModuleCodeGenerator(mod).getCode()
+        try:
+            frame.exec_(co, __exprinfo_expr=expr.result)
+        except passthroughex:
+            raise
+        except:
+            raise Failure(self)
+
+class Discard(Interpretable):
+    __view__ = ast.Discard
+
+    def run(self, frame):
+        expr = Interpretable(self.expr)
+        expr.eval(frame)
+        self.result = expr.result
+        self.explanation = expr.explanation
+
+class Stmt(Interpretable):
+    __view__ = ast.Stmt
+
+    def run(self, frame):
+        for stmt in self.nodes:
+            stmt = Interpretable(stmt)
+            stmt.run(frame)
+
+
+def report_failure(e):
+    explanation = e.node.nice_explanation()
+    if explanation:
+        explanation = ", in: " + explanation
+    else:
+        explanation = ""
+    sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
+
+def check(s, frame=None):
+    if frame is None:
+        import sys
+        frame = sys._getframe(1)
+        frame = py.code.Frame(frame)
+    expr = parse(s, 'eval')
+    assert isinstance(expr, ast.Expression)
+    node = Interpretable(expr.node)
+    try:
+        node.eval(frame)
+    except passthroughex:
+        raise
+    except Failure:
+        e = sys.exc_info()[1]
+        report_failure(e)
+    else:
+        if not frame.is_true(node.result):
+            sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
+
+
+###########################################################
+# API / Entry points
+# #########################################################
+
+def interpret(source, frame, should_fail=False):
+    module = Interpretable(parse(source, 'exec').node)
+    #print "got module", module
+    if isinstance(frame, py.std.types.FrameType):
+        frame = py.code.Frame(frame)
+    try:
+        module.run(frame)
+    except Failure:
+        e = sys.exc_info()[1]
+        return getfailure(e)
+    except passthroughex:
+        raise
+    except:
+        import traceback
+        traceback.print_exc()
+    if should_fail:
+        return ("(assertion failed, but when it was re-run for "
+                "printing intermediate values, it did not fail.  Suggestions: "
+                "compute assert expression before the assert or use --nomagic)")
+    else:
+        return None
+
+def getmsg(excinfo):
+    if isinstance(excinfo, tuple):
+        excinfo = py.code.ExceptionInfo(excinfo)
+    #frame, line = gettbline(tb)
+    #frame = py.code.Frame(frame)
+    #return interpret(line, frame)
+
+    tb = excinfo.traceback[-1] 
+    source = str(tb.statement).strip()
+    x = interpret(source, tb.frame, should_fail=True)
+    if not isinstance(x, str):
+        raise TypeError("interpret returned non-string %r" % (x,))
+    return x
+
+def getfailure(e):
+    explanation = e.node.nice_explanation()
+    if str(e.value):
+        lines = explanation.split('\n')
+        lines[0] += "  << %s" % (e.value,)
+        explanation = '\n'.join(lines)
+    text = "%s: %s" % (e.exc.__name__, explanation)
+    if text.startswith('AssertionError: assert '):
+        text = text[16:]
+    return text
+
+def run(s, frame=None):
+    if frame is None:
+        import sys
+        frame = sys._getframe(1)
+        frame = py.code.Frame(frame)
+    module = Interpretable(parse(s, 'exec').node)
+    try:
+        module.run(frame)
+    except Failure:
+        e = sys.exc_info()[1]
+        report_failure(e)
+
+
+if __name__ == '__main__':
+    # example:
+    def f():
+        return 5
+    def g():
+        return 3
+    def h(x):
+        return 'never'
+    check("f() * g() == 5")
+    check("not f()")
+    check("not (f() and g() or 0)")
+    check("f() == g()")
+    i = 4
+    check("i == f()")
+    check("len(f()) == 0")
+    check("isinstance(2+3+4, float)")
+
+    run("x = i")
+    check("x == 5")
+
+    run("assert not f(), 'oops'")
+    run("a, b, c = 1, 2")
+    run("a, b, c = f()")
+
+    check("max([f(),g()]) == 4")
+    check("'hello'[g()] == 'h'")
+    run("'guk%d' % h(f())")

Added: pypy/trunk/py/impl/code/assertion.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/code/assertion.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,75 @@
+import sys
+import py
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+
+
+def _format_explanation(explanation):
+    # uck!  See CallFunc for where \n{ and \n} escape sequences are used
+    raw_lines = (explanation or '').split('\n')
+    # escape newlines not followed by { and }
+    lines = [raw_lines[0]]
+    for l in raw_lines[1:]:
+        if l.startswith('{') or l.startswith('}'):
+            lines.append(l)
+        else:
+            lines[-1] += '\\n' + l
+
+    result = lines[:1]
+    stack = [0]
+    stackcnt = [0]
+    for line in lines[1:]:
+        if line.startswith('{'):
+            if stackcnt[-1]:
+                s = 'and   '
+            else:
+                s = 'where '
+            stack.append(len(result))
+            stackcnt[-1] += 1
+            stackcnt.append(0)
+            result.append(' +' + '  '*(len(stack)-1) + s + line[1:])
+        else:
+            assert line.startswith('}')
+            stack.pop()
+            stackcnt.pop()
+            result[stack[-1]] += line[1:]
+    assert len(stack) == 1
+    return '\n'.join(result)
+
+
+if sys.version_info >= (2, 6) or (sys.platform.startswith("java")):
+    from py.impl.code._assertionnew import interpret
+else:
+    from py.impl.code._assertionold import interpret
+
+
+class AssertionError(BuiltinAssertionError):
+
+    def __init__(self, *args):
+        BuiltinAssertionError.__init__(self, *args)
+        if args:
+            try:
+                self.msg = str(args[0])
+            except (KeyboardInterrupt, SystemExit):
+                raise
+            except:
+                self.msg = "<[broken __repr__] %s at %0xd>" %(
+                    args[0].__class__, id(args[0]))
+        else:
+            f = py.code.Frame(sys._getframe(1))
+            try:
+                source = f.statement
+                source = str(source.deindent()).strip()
+            except py.error.ENOENT:
+                source = None
+                # this can also occur during reinterpretation, when the
+                # co_filename is set to "<run>".
+            if source:
+                self.msg = interpret(source, f, should_fail=True)
+                if not self.args:
+                    self.args = (self.msg,)
+            else:
+                self.msg = None
+
+if sys.version_info > (3, 0):
+    AssertionError.__module__ = "builtins"

Added: pypy/trunk/py/impl/code/code.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/code/code.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,767 @@
+import py
+import sys
+
+builtin_repr = repr
+
+repr = py.builtin._tryimport('repr', 'reprlib')
+
+class Code(object):
+    """ wrapper around Python code objects """
+    def __init__(self, rawcode):
+        rawcode = py.code.getrawcode(rawcode)
+        self.raw = rawcode 
+        try:
+            self.filename = rawcode.co_filename
+            self.firstlineno = rawcode.co_firstlineno - 1
+            self.name = rawcode.co_name
+        except AttributeError: 
+            raise TypeError("not a code object: %r" %(rawcode,))
+        
+    def __eq__(self, other): 
+        return self.raw == other.raw
+
+    def __ne__(self, other):
+        return not self == other
+
+    def new(self, rec=False, **kwargs): 
+        """ return new code object with modified attributes. 
+            if rec-cursive is true then dive into code 
+            objects contained in co_consts. 
+        """ 
+        if sys.platform.startswith("java"):
+            # XXX jython does not support the below co_filename hack
+            return self.raw 
+        names = [x for x in dir(self.raw) if x[:3] == 'co_']
+        for name in kwargs: 
+            if name not in names: 
+                raise TypeError("unknown code attribute: %r" %(name, ))
+        if rec and hasattr(self.raw, 'co_consts'):  # jython 
+            newconstlist = []
+            co = self.raw
+            cotype = type(co)
+            for c in co.co_consts:
+                if isinstance(c, cotype):
+                    c = self.__class__(c).new(rec=True, **kwargs) 
+                newconstlist.append(c)
+            return self.new(rec=False, co_consts=tuple(newconstlist), **kwargs) 
+        for name in names:
+            if name not in kwargs:
+                kwargs[name] = getattr(self.raw, name)
+        arglist = [
+                 kwargs['co_argcount'],
+                 kwargs['co_nlocals'],
+                 kwargs.get('co_stacksize', 0), # jython
+                 kwargs.get('co_flags', 0), # jython
+                 kwargs.get('co_code', ''), # jython
+                 kwargs.get('co_consts', ()), # jython
+                 kwargs.get('co_names', []), # 
+                 kwargs['co_varnames'],
+                 kwargs['co_filename'],
+                 kwargs['co_name'],
+                 kwargs['co_firstlineno'],
+                 kwargs.get('co_lnotab', ''), #jython
+                 kwargs.get('co_freevars', None), #jython
+                 kwargs.get('co_cellvars', None), # jython
+        ]
+        if sys.version_info >= (3,0):
+            arglist.insert(1, kwargs['co_kwonlyargcount'])
+            return self.raw.__class__(*arglist)
+        else:
+            return py.std.new.code(*arglist)
+
+    def path(self):
+        """ return a py.path.local object pointing to the source code """
+        fn = self.raw.co_filename 
+        try:
+            return fn.__path__
+        except AttributeError:
+            p = py.path.local(self.raw.co_filename)
+            if not p.check(file=1):
+                # XXX maybe try harder like the weird logic 
+                # in the standard lib [linecache.updatecache] does? 
+                p = self.raw.co_filename
+            return p
+                
+    path = property(path, None, None, "path of this code object")
+
+    def fullsource(self):
+        """ return a py.code.Source object for the full source file of the code
+        """
+        from py.impl.code import source
+        full, _ = source.findsource(self.raw)
+        return full
+    fullsource = property(fullsource, None, None,
+                          "full source containing this code object")
+    
+    def source(self):
+        """ return a py.code.Source object for the code object's source only
+        """
+        # return source only for that part of code
+        return py.code.Source(self.raw)
+
+    def getargs(self):
+        """ return a tuple with the argument names for the code object
+        """
+        # handfull shortcut for getting args
+        raw = self.raw
+        return raw.co_varnames[:raw.co_argcount]
+
+class Frame(object):
+    """Wrapper around a Python frame holding f_locals and f_globals
+    in which expressions can be evaluated."""
+
+    def __init__(self, frame):
+        self.code = py.code.Code(frame.f_code)
+        self.lineno = frame.f_lineno - 1
+        self.f_globals = frame.f_globals
+        self.f_locals = frame.f_locals
+        self.raw = frame
+
+    def statement(self):
+        if self.code.fullsource is None:
+            return py.code.Source("")
+        return self.code.fullsource.getstatement(self.lineno)
+    statement = property(statement, None, None,
+                         "statement this frame is at")
+
+    def eval(self, code, **vars):
+        """ evaluate 'code' in the frame
+
+            'vars' are optional additional local variables
+
+            returns the result of the evaluation
+        """
+        f_locals = self.f_locals.copy() 
+        f_locals.update(vars)
+        return eval(code, self.f_globals, f_locals)
+
+    def exec_(self, code, **vars):
+        """ exec 'code' in the frame
+
+            'vars' are optiona; additional local variables
+        """
+        f_locals = self.f_locals.copy() 
+        f_locals.update(vars)
+        py.builtin.exec_(code, self.f_globals, f_locals )
+
+    def repr(self, object):
+        """ return a 'safe' (non-recursive, one-line) string repr for 'object'
+        """
+        return safe_repr(object)
+
+    def is_true(self, object):
+        return object
+
+    def getargs(self):
+        """ return a list of tuples (name, value) for all arguments
+        """
+        retval = []
+        for arg in self.code.getargs():
+            try:
+                retval.append((arg, self.f_locals[arg]))
+            except KeyError:
+                pass     # this can occur when using Psyco
+        return retval
+
+class TracebackEntry(object):
+    """ a single entry in a traceback """
+    
+    exprinfo = None 
+
+    def __init__(self, rawentry):
+        self._rawentry = rawentry
+        self.frame = py.code.Frame(rawentry.tb_frame)
+        # Ugh. 2.4 and 2.5 differs here when encountering
+        # multi-line statements. Not sure about the solution, but
+        # should be portable
+        self.lineno = rawentry.tb_lineno - 1
+        self.relline = self.lineno - self.frame.code.firstlineno
+
+    def __repr__(self):
+        return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1)
+
+    def statement(self):
+        """ return a py.code.Source object for the current statement """
+        source = self.frame.code.fullsource
+        return source.getstatement(self.lineno)
+    statement = property(statement, None, None,
+                         "statement of this traceback entry.")
+
+    def path(self):
+        return self.frame.code.path
+    path = property(path, None, None, "path to the full source code")
+
+    def getlocals(self):
+        return self.frame.f_locals
+    locals = property(getlocals, None, None, "locals of underlaying frame")
+
+    def reinterpret(self):
+        """Reinterpret the failing statement and returns a detailed information
+           about what operations are performed."""
+        if self.exprinfo is None:
+            from py.impl.code import assertion 
+            source = str(self.statement).strip()
+            x = assertion.interpret(source, self.frame, should_fail=True)
+            if not isinstance(x, str):
+                raise TypeError("interpret returned non-string %r" % (x,))
+            self.exprinfo = x 
+        return self.exprinfo
+
+    def getfirstlinesource(self):
+        return self.frame.code.firstlineno
+
+    def getsource(self): 
+        """ return failing source code. """
+        source = self.frame.code.fullsource
+        if source is None:
+            return None
+        start = self.getfirstlinesource()
+        end = self.lineno
+        try:
+            _, end = source.getstatementrange(end) 
+        except IndexError: 
+            end = self.lineno + 1 
+        # heuristic to stop displaying source on e.g. 
+        #   if something:  # assume this causes a NameError
+        #      # _this_ lines and the one 
+               #        below we don't want from entry.getsource() 
+        for i in range(self.lineno, end): 
+            if source[i].rstrip().endswith(':'): 
+                end = i + 1
+                break 
+        return source[start:end]
+    source = property(getsource)
+
+    def ishidden(self):
+        """ return True if the current frame has a var __tracebackhide__ 
+            resolving to True
+            
+            mostly for internal use
+        """
+        try: 
+            return self.frame.eval("__tracebackhide__") 
+        except (SystemExit, KeyboardInterrupt): 
+            raise
+        except:
+            return False 
+
+    def __str__(self): 
+        try: 
+            fn = str(self.path) 
+        except py.error.Error: 
+            fn = '???'
+        name = self.frame.code.name 
+        try: 
+            line = str(self.statement).lstrip()
+        except KeyboardInterrupt:
+            raise
+        except:
+            line = "???"
+        return "  File %r:%d in %s\n  %s\n" %(fn, self.lineno+1, name, line) 
+
+    def name(self):
+        return self.frame.code.raw.co_name
+    name = property(name, None, None, "co_name of underlaying code")
+
+class Traceback(list):
+    """ Traceback objects encapsulate and offer higher level 
+        access to Traceback entries.  
+    """
+    Entry = TracebackEntry 
+    def __init__(self, tb):
+        """ initialize from given python traceback object. """
+        if hasattr(tb, 'tb_next'):
+            def f(cur): 
+                while cur is not None: 
+                    yield self.Entry(cur)
+                    cur = cur.tb_next 
+            list.__init__(self, f(tb)) 
+        else:
+            list.__init__(self, tb)
+
+    def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
+        """ return a Traceback instance wrapping part of this Traceback
+
+            by provding any combination of path, lineno and firstlineno, the
+            first frame to start the to-be-returned traceback is determined
+
+            this allows cutting the first part of a Traceback instance e.g.
+            for formatting reasons (removing some uninteresting bits that deal
+            with handling of the exception/traceback)
+        """
+        for x in self:
+            code = x.frame.code
+            codepath = code.path
+            if ((path is None or codepath == path) and
+                (excludepath is None or (hasattr(codepath, 'relto') and
+                 not codepath.relto(excludepath))) and 
+                (lineno is None or x.lineno == lineno) and
+                (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
+                return Traceback(x._rawentry)
+        return self
+
+    def __getitem__(self, key):
+        val = super(Traceback, self).__getitem__(key)
+        if isinstance(key, type(slice(0))):
+            val = self.__class__(val)
+        return val
+
+    def filter(self, fn=lambda x: not x.ishidden()):
+        """ return a Traceback instance with certain items removed
+
+            fn is a function that gets a single argument, a TracebackItem
+            instance, and should return True when the item should be added
+            to the Traceback, False when not
+
+            by default this removes all the TracebackItems which are hidden
+            (see ishidden() above)
+        """
+        return Traceback(filter(fn, self))
+
+    def getcrashentry(self):
+        """ return last non-hidden traceback entry that lead
+        to the exception of a traceback. 
+        """
+        tb = self.filter()
+        if not tb:
+            tb = self
+        return tb[-1]
+
+    def recursionindex(self):
+        """ return the index of the frame/TracebackItem where recursion
+            originates if appropriate, None if no recursion occurred
+        """
+        cache = {}
+        for i, entry in enumerate(self):
+            key = entry.frame.code.path, entry.lineno 
+            #print "checking for recursion at", key
+            l = cache.setdefault(key, [])
+            if l: 
+                f = entry.frame
+                loc = f.f_locals
+                for otherloc in l: 
+                    if f.is_true(f.eval(co_equal, 
+                        __recursioncache_locals_1=loc,
+                        __recursioncache_locals_2=otherloc)):
+                        return i 
+            l.append(entry.frame.f_locals)
+        return None
+
+co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
+                   '?', 'eval')
+
+class ExceptionInfo(object):
+    """ wraps sys.exc_info() objects and offers
+        help for navigating the traceback.
+    """
+    _striptext = '' 
+    def __init__(self, tup=None, exprinfo=None):
+        # NB. all attributes are private!  Subclasses or other
+        #     ExceptionInfo-like classes may have different attributes.
+        if tup is None:
+            tup = sys.exc_info()
+            if exprinfo is None and isinstance(tup[1], py.code._AssertionError):
+                exprinfo = getattr(tup[1], 'msg', None)
+                if exprinfo is None:
+                    exprinfo = str(tup[1])
+                if exprinfo and exprinfo.startswith('assert '):
+                    self._striptext = 'AssertionError: '
+        self._excinfo = tup
+        self.type, self.value, tb = self._excinfo
+        self.typename = self.type.__name__
+        self.traceback = py.code.Traceback(tb) 
+
+    def __repr__(self):
+        return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
+
+    def exconly(self, tryshort=False): 
+        """ return the exception as a string
+        
+            when 'tryshort' resolves to True, and the exception is a
+            py.code._AssertionError, only the actual exception part of
+            the exception representation is returned (so 'AssertionError: ' is
+            removed from the beginning)
+        """
+        lines = py.std.traceback.format_exception_only(self.type, self.value)
+        text = ''.join(lines)
+        text = text.rstrip()
+        if tryshort: 
+            if text.startswith(self._striptext): 
+                text = text[len(self._striptext):]
+        return text
+
+    def errisinstance(self, exc): 
+        """ return True if the exception is an instance of exc """
+        return isinstance(self.value, exc) 
+
+    def _getreprcrash(self):
+        exconly = self.exconly(tryshort=True)
+        entry = self.traceback.getcrashentry()
+        path, lineno = entry.path, entry.lineno
+        reprcrash = ReprFileLocation(path, lineno+1, exconly)
+        return reprcrash
+
+    def getrepr(self, showlocals=False, style="long", 
+            abspath=False, tbfilter=True, funcargs=False):
+        """ return str()able representation of this exception info.
+            showlocals: show locals per traceback entry 
+            style: long|short|no traceback style 
+            tbfilter: hide entries (where __tracebackhide__ is true)
+        """
+        fmt = FormattedExcinfo(showlocals=showlocals, style=style, 
+            abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
+        return fmt.repr_excinfo(self)
+
+    def __str__(self):
+        entry = self.traceback[-1]
+        loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+        return str(loc)
+
+class FormattedExcinfo(object):
+    """ presenting information about failing Functions and Generators. """ 
+    # for traceback entries 
+    flow_marker = ">"    
+    fail_marker = "E"
+    
+    def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False):
+        self.showlocals = showlocals
+        self.style = style
+        self.tbfilter = tbfilter
+        self.funcargs = funcargs
+        self.abspath = abspath 
+
+    def _getindent(self, source):
+        # figure out indent for given source 
+        try:
+            s = str(source.getstatement(len(source)-1))
+        except KeyboardInterrupt: 
+            raise 
+        except:
+            try:
+                s = str(source[-1])
+            except KeyboardInterrupt:
+                raise
+            except:
+                return 0
+        return 4 + (len(s) - len(s.lstrip()))
+
+    def _getentrysource(self, entry):
+        source = entry.getsource()
+        if source is not None:
+            source = source.deindent()
+        return source
+    
+    def _saferepr(self, obj):
+        return safe_repr(obj)
+
+    def repr_args(self, entry):
+        if self.funcargs:
+            args = []
+            for argname, argvalue in entry.frame.getargs():
+                args.append((argname, self._saferepr(argvalue)))
+            return ReprFuncArgs(args)
+
+    def get_source(self, source, line_index=-1, excinfo=None):
+        """ return formatted and marked up source lines. """
+        lines = []
+        if source is None:
+            source = py.code.Source("???")
+            line_index = 0 
+        if line_index < 0:
+            line_index += len(source)
+        for i in range(len(source)):
+            if i == line_index:
+                prefix = self.flow_marker + "   "
+            else:
+                prefix = "    "
+            line = prefix + source[i]
+            lines.append(line)
+        if excinfo is not None:
+            indent = self._getindent(source)
+            lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
+        return lines
+
+    def get_exconly(self, excinfo, indent=4, markall=False):
+        lines = []
+        indent = " " * indent 
+        # get the real exception information out 
+        exlines = excinfo.exconly(tryshort=True).split('\n')
+        failindent = self.fail_marker + indent[1:]
+        for line in exlines:
+            lines.append(failindent + line)
+            if not markall:
+                failindent = indent 
+        return lines
+
+    def repr_locals(self, locals):
+        if self.showlocals: 
+            lines = []
+            keys = list(locals)
+            keys.sort()
+            for name in keys:
+                value = locals[name]
+                if name == '__builtins__': 
+                    lines.append("__builtins__ = <builtins>")
+                else:
+                    # This formatting could all be handled by the
+                    # _repr() function, which is only repr.Repr in
+                    # disguise, so is very configurable.
+                    str_repr = self._saferepr(value)
+                    #if len(str_repr) < 70 or not isinstance(value,
+                    #                            (list, tuple, dict)):
+                    lines.append("%-10s = %s" %(name, str_repr))
+                    #else:
+                    #    self._line("%-10s =\\" % (name,))
+                    #    # XXX
+                    #    py.std.pprint.pprint(value, stream=self.excinfowriter)
+            return ReprLocals(lines)
+
+    def repr_traceback_entry(self, entry, excinfo=None):
+        # excinfo is not None if this is the last tb entry 
+        source = self._getentrysource(entry)
+        if source is None:
+            source = py.code.Source("???")
+            line_index = 0
+        else:
+            line_index = entry.lineno - entry.getfirstlinesource()
+
+        lines = []
+        if self.style == "long":
+            reprargs = self.repr_args(entry) 
+            lines.extend(self.get_source(source, line_index, excinfo))
+            message = excinfo and excinfo.typename or ""
+            path = self._makepath(entry.path)
+            filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
+            localsrepr =  self.repr_locals(entry.locals)
+            return ReprEntry(lines, reprargs, localsrepr, filelocrepr)
+        else: 
+            if self.style == "short":
+                line = source[line_index].lstrip()
+                lines.append('  File "%s", line %d, in %s' % (
+                    entry.path.basename, entry.lineno+1, entry.name))
+                lines.append("    " + line) 
+            if excinfo: 
+                lines.extend(self.get_exconly(excinfo, indent=4))
+            return ReprEntry(lines, None, None, None)
+
+    def _makepath(self, path):
+        if not self.abspath:
+            np = py.path.local().bestrelpath(path)
+            if len(np) < len(str(path)):
+                path = np
+        return path
+
+    def repr_traceback(self, excinfo): 
+        traceback = excinfo.traceback 
+        if self.tbfilter:
+            traceback = traceback.filter()
+        recursionindex = None
+        if excinfo.errisinstance(RuntimeError):
+            recursionindex = traceback.recursionindex()
+        last = traceback[-1]
+        entries = []
+        extraline = None
+        for index, entry in enumerate(traceback): 
+            einfo = (last == entry) and excinfo or None
+            reprentry = self.repr_traceback_entry(entry, einfo)
+            entries.append(reprentry)
+            if index == recursionindex:
+                extraline = "!!! Recursion detected (same locals & position)"
+                break
+        return ReprTraceback(entries, extraline, style=self.style)
+
+    def repr_excinfo(self, excinfo):
+        reprtraceback = self.repr_traceback(excinfo)
+        reprcrash = excinfo._getreprcrash()
+        return ReprExceptionInfo(reprtraceback, reprcrash)
+
+class TerminalRepr:
+    def __str__(self):
+        tw = py.io.TerminalWriter(stringio=True)
+        self.toterminal(tw)
+        return tw.stringio.getvalue().strip()
+
+    def __repr__(self):
+        return "<%s instance at %0x>" %(self.__class__, id(self))
+
+class ReprExceptionInfo(TerminalRepr):
+    def __init__(self, reprtraceback, reprcrash):
+        self.reprtraceback = reprtraceback
+        self.reprcrash = reprcrash 
+        self.sections = []
+
+    def addsection(self, name, content, sep="-"):
+        self.sections.append((name, content, sep))
+
+    def toterminal(self, tw):
+        self.reprtraceback.toterminal(tw)
+        for name, content, sep in self.sections:
+            tw.sep(sep, name)
+            tw.line(content)
+    
+class ReprTraceback(TerminalRepr):
+    entrysep = "_ "
+
+    def __init__(self, reprentries, extraline, style):
+        self.reprentries = reprentries
+        self.extraline = extraline
+        self.style = style
+
+    def toterminal(self, tw):
+        sepok = False 
+        for entry in self.reprentries:
+            if self.style == "long":
+                if sepok:
+                    tw.sep(self.entrysep)
+                tw.line("")
+            sepok = True
+            entry.toterminal(tw)
+        if self.extraline:
+            tw.line(self.extraline)
+
+class ReprEntry(TerminalRepr):
+    localssep = "_ "
+
+    def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr):
+        self.lines = lines
+        self.reprfuncargs = reprfuncargs
+        self.reprlocals = reprlocals 
+        self.reprfileloc = filelocrepr
+
+    def toterminal(self, tw):
+        if self.reprfuncargs:
+            self.reprfuncargs.toterminal(tw)
+        for line in self.lines:
+            red = line.startswith("E   ") 
+            tw.line(line, bold=True, red=red)
+        if self.reprlocals:
+            #tw.sep(self.localssep, "Locals")
+            tw.line("")
+            self.reprlocals.toterminal(tw)
+        if self.reprfileloc:
+            tw.line("")
+            self.reprfileloc.toterminal(tw)
+
+    def __str__(self):
+        return "%s\n%s\n%s" % ("\n".join(self.lines), 
+                               self.reprlocals, 
+                               self.reprfileloc)
+
+class ReprFileLocation(TerminalRepr):
+    def __init__(self, path, lineno, message):
+        self.path = str(path)
+        self.lineno = lineno
+        self.message = message
+
+    def toterminal(self, tw):
+        # filename and lineno output for each entry,
+        # using an output format that most editors unterstand
+        msg = self.message 
+        i = msg.find("\n")
+        if i != -1:
+            msg = msg[:i] 
+        tw.line("%s:%s: %s" %(self.path, self.lineno, msg))
+
+class ReprLocals(TerminalRepr):
+    def __init__(self, lines):
+        self.lines = lines 
+
+    def toterminal(self, tw):
+        for line in self.lines:
+            tw.line(line)
+
+class ReprFuncArgs(TerminalRepr):
+    def __init__(self, args):
+        self.args = args
+
+    def toterminal(self, tw):
+        if self.args:
+            linesofar = ""
+            for name, value in self.args:
+                ns = "%s = %s" %(name, value)
+                if len(ns) + len(linesofar) + 2 > tw.fullwidth:
+                    if linesofar:
+                        tw.line(linesofar)
+                    linesofar =  ns 
+                else:
+                    if linesofar:
+                        linesofar += ", " + ns
+                    else:
+                        linesofar = ns
+            if linesofar:
+                tw.line(linesofar)
+            tw.line("")
+
+
+
+class SafeRepr(repr.Repr):
+    """ subclass of repr.Repr that limits the resulting size of repr() 
+        and includes information on exceptions raised during the call. 
+    """ 
+    def __init__(self, *args, **kwargs):
+        repr.Repr.__init__(self, *args, **kwargs)
+        self.maxstring = 240   # 3 * 80 chars
+        self.maxother = 160    # 2 * 80 chars
+
+    def repr(self, x):
+        return self._callhelper(repr.Repr.repr, self, x)
+
+    def repr_instance(self, x, level):
+        return self._callhelper(builtin_repr, x)
+        
+    def _callhelper(self, call, x, *args):
+        try:
+            # Try the vanilla repr and make sure that the result is a string
+            s = call(x, *args)
+        except (KeyboardInterrupt, MemoryError, SystemExit):
+            raise
+        except:
+            cls, e, tb = sys.exc_info()
+            try:
+                exc_name = cls.__name__
+            except:
+                exc_name = 'unknown'
+            try:
+                exc_info = str(e)
+            except:
+                exc_info = 'unknown'
+            return '<[%s("%s") raised in repr()] %s object at 0x%x>' % (
+                exc_name, exc_info, x.__class__.__name__, id(x))
+        else:
+            if len(s) > self.maxstring:
+                i = max(0, (self.maxstring-3)//2)
+                j = max(0, self.maxstring-3-i)
+                s = s[:i] + '...' + s[len(s)-j:]
+            return s
+
+safe_repr = SafeRepr().repr
+
+oldbuiltins = {}
+
+def patch_builtins(assertion=True, compile=True):
+    """ put compile and AssertionError builtins to Python's builtins. """
+    if assertion:
+        from py.impl.code import assertion
+        l = oldbuiltins.setdefault('AssertionError', [])
+        l.append(py.builtin.builtins.AssertionError)
+        py.builtin.builtins.AssertionError = assertion.AssertionError
+    if compile: 
+        l = oldbuiltins.setdefault('compile', [])
+        l.append(py.builtin.builtins.compile)
+        py.builtin.builtins.compile = py.code.compile
+
+def unpatch_builtins(assertion=True, compile=True):
+    """ remove compile and AssertionError builtins from Python builtins. """
+    if assertion:
+        py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
+    if compile: 
+        py.builtin.builtins.compile = oldbuiltins['compile'].pop()
+
+def getrawcode(obj):
+    """ return code object for given function. """ 
+    obj = getattr(obj, 'im_func', obj)
+    obj = getattr(obj, 'func_code', obj)
+    obj = getattr(obj, 'f_code', obj)
+    obj = getattr(obj, '__code__', obj)
+    return obj
+    

Added: pypy/trunk/py/impl/code/oldmagic.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/code/oldmagic.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,62 @@
+""" deprecated module for turning on/off some features. """ 
+
+import py 
+
+from py.builtin import builtins as cpy_builtin
+
+def invoke(assertion=False, compile=False):
+    """ (deprecated) invoke magic, currently you can specify:
+
+        assertion  patches the builtin AssertionError to try to give
+                   more meaningful AssertionErrors, which by means
+                   of deploying a mini-interpreter constructs
+                   a useful error message.
+    """
+    py.log._apiwarn("1.1", 
+        "py.magic.invoke() is deprecated, use py.code.patch_builtins()",
+        stacklevel=2, 
+    )
+    py.code.patch_builtins(assertion=assertion, compile=compile)
+
+def revoke(assertion=False, compile=False):
+    """ (deprecated) revoke previously invoked magic (see invoke())."""
+    py.log._apiwarn("1.1", 
+        "py.magic.revoke() is deprecated, use py.code.unpatch_builtins()",
+        stacklevel=2, 
+    )
+    py.code.unpatch_builtins(assertion=assertion, compile=compile)
+
+patched = {}
+
+def patch(namespace, name, value):
+    """ (deprecated) rebind the 'name' on the 'namespace'  to the 'value',
+        possibly and remember the original value. Multiple
+        invocations to the same namespace/name pair will
+        remember a list of old values.
+    """
+    py.log._apiwarn("1.1", 
+        "py.magic.patch() is deprecated, in tests use monkeypatch funcarg.", 
+        stacklevel=2, 
+    )
+    nref = (namespace, name)
+    orig = getattr(namespace, name)
+    patched.setdefault(nref, []).append(orig)
+    setattr(namespace, name, value)
+    return orig
+
+def revert(namespace, name):
+    """ (deprecated) revert to the orginal value the last patch modified.
+        Raise ValueError if no such original value exists.
+    """
+    py.log._apiwarn("1.1", 
+        "py.magic.revert() is deprecated, in tests use monkeypatch funcarg.",
+        stacklevel=2, 
+    )
+    nref = (namespace, name)
+    if nref not in patched or not patched[nref]:
+        raise ValueError("No original value stored for %s.%s" % nref)
+    current = getattr(namespace, name)
+    orig = patched[nref].pop()
+    setattr(namespace, name, orig)
+    return current
+

Added: pypy/trunk/py/impl/code/oldmagic2.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/code/oldmagic2.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,6 @@
+
+import py
+
+py.log._apiwarn("1.1", "py.magic.AssertionError is deprecated, use py.code._AssertionError", stacklevel=2)
+
+from py.code import _AssertionError as AssertionError

Added: pypy/trunk/py/impl/code/source.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/code/source.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,347 @@
+from __future__ import generators
+import sys
+import inspect, tokenize
+import py
+cpy_compile = compile 
+
+try:
+    import _ast
+    from _ast import PyCF_ONLY_AST as _AST_FLAG
+except ImportError:
+    _AST_FLAG = 0
+    _ast = None
+
+
+class Source(object):
+    """ a immutable object holding a source code fragment,
+        possibly deindenting it.
+    """
+    def __init__(self, *parts, **kwargs):
+        self.lines = lines = []
+        de = kwargs.get('deindent', True)
+        rstrip = kwargs.get('rstrip', True) 
+        for part in parts:
+            if not part: 
+                partlines = []
+            if isinstance(part, Source):
+                partlines = part.lines
+            elif isinstance(part, py.builtin._basestring):
+                partlines = part.split('\n')
+                if rstrip:
+                    while partlines: 
+                        if partlines[-1].strip(): 
+                            break
+                        partlines.pop()
+            else:
+                partlines = getsource(part, deindent=de).lines
+            if de:
+                partlines = deindent(partlines)
+            lines.extend(partlines)
+
+    def __eq__(self, other): 
+        try:
+            return self.lines == other.lines 
+        except AttributeError: 
+            if isinstance(other, str): 
+                return str(self) == other 
+            return False 
+
+    def __getitem__(self, key):
+        if isinstance(key, int):
+            return self.lines[key]
+        else:
+            if key.step not in (None, 1):
+                raise IndexError("cannot slice a Source with a step")
+            return self.__getslice__(key.start, key.stop)
+
+    def __len__(self): 
+        return len(self.lines) 
+
+    def __getslice__(self, start, end):
+        newsource = Source()
+        newsource.lines = self.lines[start:end]
+        return newsource
+
+    def strip(self):
+        """ return new source object with trailing
+            and leading blank lines removed.
+        """
+        start, end = 0, len(self)
+        while start < end and not self.lines[start].strip():
+            start += 1
+        while end > start and not self.lines[end-1].strip():
+            end -= 1
+        source = Source()
+        source.lines[:] = self.lines[start:end]
+        return source
+
+    def putaround(self, before='', after='', indent=' ' * 4): 
+        """ return a copy of the source object with 
+            'before' and 'after' wrapped around it. 
+        """
+        before = Source(before)
+        after = Source(after)
+        newsource = Source()
+        lines = [ (indent + line) for line in self.lines]
+        newsource.lines = before.lines + lines +  after.lines
+        return newsource
+
+    def indent(self, indent=' ' * 4): 
+        """ return a copy of the source object with 
+            all lines indented by the given indent-string. 
+        """
+        newsource = Source()
+        newsource.lines = [(indent+line) for line in self.lines]
+        return newsource
+
+    def getstatement(self, lineno):
+        """ return Source statement which contains the
+            given linenumber (counted from 0).
+        """
+        start, end = self.getstatementrange(lineno)
+        return self[start:end]
+
+    def getstatementrange(self, lineno):
+        """ return (start, end) tuple which spans the minimal 
+            statement region which containing the given lineno.
+        """
+        # XXX there must be a better than these heuristic ways ...
+        # XXX there may even be better heuristics :-)
+        if not (0 <= lineno < len(self)):
+            raise IndexError("lineno out of range")
+
+        # 1. find the start of the statement
+        from codeop import compile_command
+        for start in range(lineno, -1, -1):
+            trylines = self.lines[start:lineno+1]
+            # quick hack to indent the source and get it as a string in one go
+            trylines.insert(0, 'def xxx():')
+            trysource = '\n '.join(trylines)
+            #              ^ space here
+            try:
+                compile_command(trysource)
+            except (SyntaxError, OverflowError, ValueError):
+                pass
+            else:
+                break   # got a valid or incomplete statement
+
+        # 2. find the end of the statement
+        for end in range(lineno+1, len(self)+1):
+            trysource = self[start:end]
+            if trysource.isparseable():
+                break
+
+        return start, end
+
+    def getblockend(self, lineno):
+        # XXX
+        lines = [x + '\n' for x in self.lines[lineno:]]
+        blocklines = inspect.getblock(lines)
+        #print blocklines
+        return lineno + len(blocklines) - 1
+
+    def deindent(self, offset=None):
+        """ return a new source object deindented by offset.
+            If offset is None then guess an indentation offset from
+            the first non-blank line.  Subsequent lines which have a
+            lower indentation offset will be copied verbatim as
+            they are assumed to be part of multilines.
+        """
+        # XXX maybe use the tokenizer to properly handle multiline
+        #     strings etc.pp?
+        newsource = Source()
+        newsource.lines[:] = deindent(self.lines, offset)
+        return newsource
+
+    def isparseable(self, deindent=True):
+        """ return True if source is parseable, heuristically
+            deindenting it by default. 
+        """
+        try:
+            import parser
+        except ImportError:
+            syntax_checker = lambda x: compile(x, 'asd', 'exec')
+        else:
+            syntax_checker = parser.suite
+    
+        if deindent:
+            source = str(self.deindent())
+        else:
+            source = str(self)
+        try:
+            #compile(source+'\n', "x", "exec")
+            syntax_checker(source+'\n')
+        except SyntaxError:
+            return False
+        else:
+            return True
+
+    def __str__(self):
+        return "\n".join(self.lines)
+
+    def compile(self, filename=None, mode='exec', 
+                flag=generators.compiler_flag, 
+                dont_inherit=0, _genframe=None):
+        """ return compiled code object. if filename is None
+            invent an artificial filename which displays
+            the source/line position of the caller frame.
+        """
+        if not filename or py.path.local(filename).check(file=0): 
+            if _genframe is None:
+                _genframe = sys._getframe(1) # the caller
+            fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
+            if not filename:
+                filename = '<codegen %s:%d>' % (fn, lineno)
+            else:
+                filename = '<codegen %r %s:%d>' % (filename, fn, lineno)
+        source = "\n".join(self.lines) + '\n'
+        try:
+            co = cpy_compile(source, filename, mode, flag)
+        except SyntaxError:
+            ex = sys.exc_info()[1]
+            # re-represent syntax errors from parsing python strings
+            msglines = self.lines[:ex.lineno]
+            if ex.offset:
+                msglines.append(" "*ex.offset + '^')
+            msglines.append("syntax error probably generated here: %s" % filename)
+            newex = SyntaxError('\n'.join(msglines))
+            newex.offset = ex.offset
+            newex.lineno = ex.lineno
+            newex.text = ex.text
+            raise newex
+        else:
+            if flag & _AST_FLAG:
+                return co
+            co_filename = MyStr(filename)
+            co_filename.__source__ = self
+            return py.code.Code(co).new(rec=1, co_filename=co_filename) 
+            #return newcode_withfilename(co, co_filename)
+
+#
+# public API shortcut functions
+#
+
+def compile_(source, filename=None, mode='exec', flags=
+            generators.compiler_flag, dont_inherit=0):
+    """ compile the given source to a raw code object,
+        which points back to the source code through
+        "co_filename.__source__".  All code objects
+        contained in the code object will recursively
+        also have this special subclass-of-string
+        filename.
+    """
+    if _ast is not None and isinstance(source, _ast.AST):
+        # XXX should Source support having AST?
+        return cpy_compile(source, filename, mode, flags, dont_inherit)
+    _genframe = sys._getframe(1) # the caller
+    s = Source(source)
+    co = s.compile(filename, mode, flags, _genframe=_genframe)
+    return co
+
+
+def getfslineno(obj):
+    try:
+        code = py.code.Code(obj)
+    except TypeError:
+        # fallback to 
+        fn = (py.std.inspect.getsourcefile(obj) or
+              py.std.inspect.getfile(obj))
+        fspath = fn and py.path.local(fn) or None
+        if fspath:
+            try:
+                _, lineno = findsource(obj)
+            except IOError:
+                lineno = None
+        else:
+            lineno = None
+    else:
+        fspath = code.path
+        lineno = code.firstlineno 
+    return fspath, lineno
+
+#
+# helper functions
+#
+class MyStr(str):
+    """ custom string which allows to add attributes. """
+
+def findsource(obj):
+    obj = py.code.getrawcode(obj)
+    try:
+        fullsource = obj.co_filename.__source__
+    except AttributeError:
+        try:
+            sourcelines, lineno = py.std.inspect.findsource(obj)
+        except (KeyboardInterrupt, SystemExit):
+            raise
+        except:
+            return None, None
+        source = Source()
+        source.lines = [line.rstrip() for line in sourcelines]
+        return source, lineno
+    else:
+        lineno = obj.co_firstlineno - 1        
+        return fullsource, lineno
+
+
+def getsource(obj, **kwargs):
+    obj = py.code.getrawcode(obj)
+    try:
+        fullsource = obj.co_filename.__source__
+    except AttributeError:
+        try:
+            strsrc = inspect.getsource(obj)
+        except IndentationError:
+            strsrc = "\"Buggy python version consider upgrading, cannot get source\""
+        assert isinstance(strsrc, str)
+        return Source(strsrc, **kwargs)
+    else:
+        lineno = obj.co_firstlineno - 1
+        end = fullsource.getblockend(lineno)
+        return Source(fullsource[lineno:end+1], deident=True)
+
+
+def deindent(lines, offset=None):
+    if offset is None:
+        for line in lines:
+            line = line.expandtabs()
+            s = line.lstrip()
+            if s:
+                offset = len(line)-len(s)
+                break
+        else:
+            offset = 0
+    if offset == 0:
+        return list(lines)
+    newlines = []
+    def readline_generator(lines):
+        for line in lines:
+            yield line + '\n'
+        while True:
+            yield ''
+        
+    r = readline_generator(lines)
+    try: 
+        readline = r.next
+    except AttributeError:
+        readline = r.__next__
+
+    try:
+        for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(readline):
+            if sline > len(lines):
+                break # End of input reached
+            if sline > len(newlines):
+                line = lines[sline - 1].expandtabs()
+                if line.lstrip() and line[:offset].isspace():
+                    line = line[offset:] # Deindent
+                newlines.append(line)
+
+            for i in range(sline, eline):
+                # Don't deindent continuing lines of
+                # multiline tokens (i.e. multiline strings)
+                newlines.append(lines[i])
+    except (IndentationError, tokenize.TokenError):
+        pass
+    # Add any lines we didn't see. E.g. if an exception was raised.
+    newlines.extend(lines[len(newlines):])
+    return newlines

Added: pypy/trunk/py/impl/compat/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/compat/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,2 @@
+""" compatibility modules (taken from 2.4.4) """
+

Added: pypy/trunk/py/impl/compat/dep_doctest.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/compat/dep_doctest.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,4 @@
+import py
+
+py.log._apiwarn("1.1", "py.compat.doctest deprecated, use standard library version.", stacklevel="initpkg")
+doctest = py.std.doctest

Added: pypy/trunk/py/impl/compat/dep_optparse.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/compat/dep_optparse.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,4 @@
+import py
+py.log._apiwarn("1.1", "py.compat.optparse deprecated, use standard library version.", stacklevel="initpkg")
+
+optparse = py.std.optparse 

Added: pypy/trunk/py/impl/compat/dep_subprocess.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/compat/dep_subprocess.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,4 @@
+
+import py
+py.log._apiwarn("1.1", "py.compat.subprocess deprecated, use standard library version.", stacklevel="initpkg")
+subprocess = py.std.subprocess

Added: pypy/trunk/py/impl/compat/dep_textwrap.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/compat/dep_textwrap.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,4 @@
+import py
+
+py.log._apiwarn("1.1", "py.compat.textwrap deprecated, use standard library version.", stacklevel="initpkg")
+textwrap = py.std.textwrap

Added: pypy/trunk/py/impl/error.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/error.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,83 @@
+"""
+create errno-specific classes for IO or os calls. 
+
+"""
+import sys, os, errno
+
+class Error(EnvironmentError):
+    def __repr__(self):
+        return "%s.%s %r: %s " %(self.__class__.__module__,
+                               self.__class__.__name__,
+                               self.__class__.__doc__,
+                               " ".join(map(str, self.args)),
+                               #repr(self.args)
+                                )
+
+    def __str__(self):
+        s = "[%s]: %s" %(self.__class__.__doc__,
+                          " ".join(map(str, self.args)),
+                          )
+        return s
+
+_winerrnomap = {
+    2: errno.ENOENT, 
+    3: errno.ENOENT, 
+    17: errno.EEXIST,
+    22: errno.ENOTDIR,
+    267: errno.ENOTDIR,
+    5: errno.EACCES,  # anything better?
+}
+
+class ErrorMaker(object):
+    """ lazily provides Exception classes for each possible POSIX errno 
+        (as defined per the 'errno' module).  All such instances 
+        subclass EnvironmentError.  
+    """
+    Error = Error
+    _errno2class = {}
+
+    def __getattr__(self, name):
+        eno = getattr(errno, name)
+        cls = self._geterrnoclass(eno)
+        setattr(self, name, cls)
+        return cls
+
+    def _geterrnoclass(self, eno):
+        try:
+            return self._errno2class[eno]
+        except KeyError:
+            clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
+            errorcls = type(Error)(clsname, (Error,),
+                    {'__module__':'py.error',
+                     '__doc__': os.strerror(eno)})
+            self._errno2class[eno] = errorcls
+            return errorcls
+
+    def checked_call(self, func, *args):
+        """ call a function and raise an errno-exception if applicable. """
+        __tracebackhide__ = True
+        try:
+            return func(*args)
+        except self.Error:
+            raise
+        except EnvironmentError:
+            cls, value, tb = sys.exc_info()
+            if not hasattr(value, 'errno'):
+                raise
+            __tracebackhide__ = False
+            errno = value.errno 
+            try:
+                if not isinstance(value, WindowsError): 
+                    raise NameError
+            except NameError: 
+                # we are not on Windows, or we got a proper OSError
+                cls = self._geterrnoclass(errno)
+            else: 
+                try: 
+                    cls = self._geterrnoclass(_winerrnomap[errno]) 
+                except KeyError:    
+                    raise value 
+            raise cls("%s%r" % (func.__name__, args))
+            __tracebackhide__ = True
+
+error = ErrorMaker()

Added: pypy/trunk/py/impl/io/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/io/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1 @@
+""" input/output helping """

Added: pypy/trunk/py/impl/io/capture.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/io/capture.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,344 @@
+import os
+import sys
+import py
+import tempfile
+
+try: 
+    from io import StringIO
+except ImportError: 
+    from StringIO import StringIO
+
+if sys.version_info < (3,0):
+    class TextIO(StringIO):
+        def write(self, data):
+            if not isinstance(data, unicode):
+                data = unicode(data, getattr(self, '_encoding', 'UTF-8'))
+            StringIO.write(self, data)
+else:
+    TextIO = StringIO
+
+try:
+    from io import BytesIO
+except ImportError:
+    class BytesIO(StringIO):
+        def write(self, data):
+            if isinstance(data, unicode):
+                raise TypeError("not a byte value: %r" %(data,))
+            StringIO.write(self, data)
+
+class FDCapture: 
+    """ Capture IO to/from a given os-level filedescriptor. """
+    
+    def __init__(self, targetfd, tmpfile=None): 
+        """ save targetfd descriptor, and open a new 
+            temporary file there.  If no tmpfile is 
+            specified a tempfile.Tempfile() will be opened
+            in text mode. 
+        """
+        self.targetfd = targetfd
+        if tmpfile is None: 
+            f = tempfile.TemporaryFile('wb+')
+            tmpfile = dupfile(f, encoding="UTF-8") 
+            f.close()
+        self.tmpfile = tmpfile 
+        self._savefd = os.dup(targetfd)
+        os.dup2(self.tmpfile.fileno(), targetfd) 
+        self._patched = []
+
+    def setasfile(self, name, module=sys): 
+        """ patch <module>.<name> to self.tmpfile
+        """
+        key = (module, name)
+        self._patched.append((key, getattr(module, name)))
+        setattr(module, name, self.tmpfile) 
+
+    def unsetfiles(self): 
+        """ unpatch all patched items
+        """
+        while self._patched: 
+            (module, name), value = self._patched.pop()
+            setattr(module, name, value) 
+
+    def done(self): 
+        """ unpatch and clean up, returns the self.tmpfile (file object)
+        """
+        os.dup2(self._savefd, self.targetfd) 
+        self.unsetfiles() 
+        os.close(self._savefd) 
+        self.tmpfile.seek(0)
+        return self.tmpfile 
+
+    def writeorg(self, data):
+        """ write a string to the original file descriptor
+        """
+        tempfp = tempfile.TemporaryFile()
+        try:
+            os.dup2(self._savefd, tempfp.fileno())
+            tempfp.write(data)
+        finally:
+            tempfp.close()
+
+
+def dupfile(f, mode=None, buffering=0, raising=False, encoding=None): 
+    """ return a new open file object that's a duplicate of f
+
+        mode is duplicated if not given, 'buffering' controls 
+        buffer size (defaulting to no buffering) and 'raising'
+        defines whether an exception is raised when an incompatible
+        file object is passed in (if raising is False, the file
+        object itself will be returned)
+    """
+    try: 
+        fd = f.fileno() 
+    except AttributeError: 
+        if raising: 
+            raise 
+        return f
+    newfd = os.dup(fd) 
+    mode = mode and mode or f.mode
+    if sys.version_info >= (3,0):
+        if encoding is not None:
+            mode = mode.replace("b", "")
+            buffering = True
+        return os.fdopen(newfd, mode, buffering, encoding, closefd=False)
+    else:
+        f = os.fdopen(newfd, mode, buffering) 
+        if encoding is not None:
+            return EncodedFile(f, encoding)
+        return f
+
+class EncodedFile(object):
+    def __init__(self, _stream, encoding):
+        self._stream = _stream
+        self.encoding = encoding
+
+    def write(self, obj):
+        if isinstance(obj, unicode):
+            obj = obj.encode(self.encoding)
+        elif isinstance(obj, str):
+            pass
+        else:
+            obj = str(obj)
+        self._stream.write(obj)
+
+    def writelines(self, linelist):
+        data = ''.join(linelist)
+        self.write(data)
+
+    def __getattr__(self, name):
+        return getattr(self._stream, name)
+
+class Capture(object):
+    def call(cls, func, *args, **kwargs): 
+        """ return a (res, out, err) tuple where
+            out and err represent the output/error output
+            during function execution. 
+            call the given function with args/kwargs
+            and capture output/error during its execution. 
+        """ 
+        so = cls()
+        try: 
+            res = func(*args, **kwargs)
+        finally: 
+            out, err = so.reset()
+        return res, out, err 
+    call = classmethod(call) 
+
+    def reset(self):
+        """ reset sys.stdout/stderr and return captured output as strings. """
+        if hasattr(self, '_suspended'):
+            outfile = self._kwargs['out']
+            errfile = self._kwargs['err']
+            del self._kwargs
+        else:
+            outfile, errfile = self.done() 
+        out, err = "", ""
+        if outfile:
+            out = outfile.read()
+            outfile.close()
+        if errfile and errfile != outfile:
+            err = errfile.read()
+            errfile.close()
+        return out, err
+
+    def suspend(self):
+        """ return current snapshot captures, memorize tempfiles. """
+        assert not hasattr(self, '_suspended')
+        self._suspended = True
+        outerr = self.readouterr()
+        outfile, errfile = self.done()
+        self._kwargs['out'] = outfile
+        self._kwargs['err'] = errfile
+        return outerr
+
+    def resume(self):
+        """ resume capturing with original temp files. """
+        assert self._suspended
+        self._initialize(**self._kwargs)
+        del self._suspended
+
+
+class StdCaptureFD(Capture): 
+    """ This class allows to capture writes to FD1 and FD2 
+        and may connect a NULL file to FD0 (and prevent
+        reads from sys.stdin)
+    """
+    def __init__(self, out=True, err=True, 
+                 mixed=False, in_=True, patchsys=True): 
+        self._kwargs = locals().copy()
+        del self._kwargs['self']
+        self._initialize(**self._kwargs)
+
+    def _initialize(self, out=True, err=True, 
+                    mixed=False, in_=True, patchsys=True): 
+        if in_:
+            self._oldin = (sys.stdin, os.dup(0))
+            sys.stdin  = DontReadFromInput()
+            fd = os.open(devnullpath, os.O_RDONLY)
+            os.dup2(fd, 0)
+            os.close(fd)
+        if out: 
+            tmpfile = None
+            if hasattr(out, 'write'):
+                tmpfile = out
+            self.out = py.io.FDCapture(1, tmpfile=tmpfile)
+            if patchsys: 
+                self.out.setasfile('stdout')
+        if err: 
+            if mixed and out:
+                tmpfile = self.out.tmpfile 
+            elif hasattr(err, 'write'):
+                tmpfile = err
+            else:
+                tmpfile = None
+            self.err = py.io.FDCapture(2, tmpfile=tmpfile) 
+            if patchsys: 
+                self.err.setasfile('stderr')
+
+    def done(self):
+        """ return (outfile, errfile) and stop capturing. """
+        if hasattr(self, 'out'): 
+            outfile = self.out.done() 
+        else:
+            outfile = None
+        if hasattr(self, 'err'): 
+            errfile = self.err.done() 
+        else:
+            errfile = None 
+        if hasattr(self, '_oldin'):
+            oldsys, oldfd = self._oldin 
+            os.dup2(oldfd, 0)
+            os.close(oldfd)
+            sys.stdin = oldsys 
+        return outfile, errfile 
+
+    def readouterr(self):
+        """ return snapshot value of stdout/stderr capturings. """
+        l = []
+        for name in ('out', 'err'):
+            res = ""
+            if hasattr(self, name):
+                f = getattr(self, name).tmpfile
+                f.seek(0)
+                res = f.read()
+                f.truncate(0)
+                f.seek(0)
+            l.append(res)
+        return l 
+
+class StdCapture(Capture):
+    """ This class allows to capture writes to sys.stdout|stderr "in-memory"
+        and will raise errors on tries to read from sys.stdin. It only
+        modifies sys.stdout|stderr|stdin attributes and does not 
+        touch underlying File Descriptors (use StdCaptureFD for that). 
+    """
+    def __init__(self, out=True, err=True, in_=True, mixed=False):
+        self._kwargs = locals().copy()
+        del self._kwargs['self']
+        self._initialize(**self._kwargs)
+
+    def _initialize(self, out, err, in_, mixed):
+        self._out = out
+        self._err = err 
+        self._in = in_
+        if out: 
+            self._oldout = sys.stdout
+            if not hasattr(out, 'write'):
+                out = TextIO()
+            sys.stdout = self.out = out
+        if err: 
+            self._olderr = sys.stderr
+            if out and mixed: 
+                err = self.out 
+            elif not hasattr(err, 'write'):
+                err = TextIO()
+            sys.stderr = self.err = err
+        if in_:
+            self._oldin  = sys.stdin
+            sys.stdin  = self.newin  = DontReadFromInput()
+
+    def done(self): 
+        """ return (outfile, errfile) and stop capturing. """
+        o,e = sys.stdout, sys.stderr
+        if self._out: 
+            try:
+                sys.stdout = self._oldout 
+            except AttributeError:
+                raise IOError("stdout capturing already reset")
+            del self._oldout
+            outfile = self.out
+            outfile.seek(0)
+        else:
+            outfile = None
+        if self._err: 
+            try:
+                sys.stderr = self._olderr 
+            except AttributeError:
+                raise IOError("stderr capturing already reset")
+            del self._olderr 
+            errfile = self.err 
+            errfile.seek(0)
+        else:
+            errfile = None
+        if self._in:
+            sys.stdin = self._oldin 
+        return outfile, errfile
+
+    def readouterr(self):
+        """ return snapshot value of stdout/stderr capturings. """
+        out = err = ""
+        if self._out:
+            out = sys.stdout.getvalue()
+            sys.stdout.truncate(0)
+        if self._err:
+            err = sys.stderr.getvalue()
+            sys.stderr.truncate(0)
+        return out, err 
+
+class DontReadFromInput:
+    """Temporary stub class.  Ideally when stdin is accessed, the
+    capturing should be turned off, with possibly all data captured
+    so far sent to the screen.  This should be configurable, though,
+    because in automated test runs it is better to crash than
+    hang indefinitely.
+    """
+    def read(self, *args):
+        raise IOError("reading from stdin while output is captured")
+    readline = read
+    readlines = read
+    __iter__ = read
+   
+    def fileno(self):
+        raise ValueError("redirected Stdin is pseudofile, has no fileno()") 
+    def isatty(self):
+        return False
+
+try:
+    devnullpath = os.devnull
+except AttributeError:
+    if os.name == 'nt':
+        devnullpath = 'NUL'
+    else:
+        devnullpath = '/dev/null'
+
+

Added: pypy/trunk/py/impl/io/terminalwriter.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/io/terminalwriter.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,264 @@
+"""
+
+Helper functions for writing to terminals and files. 
+
+"""
+
+
+import sys, os
+import py
+
+def _getdimensions():
+    import termios,fcntl,struct
+    call = fcntl.ioctl(0,termios.TIOCGWINSZ,"\000"*8)
+    height,width = struct.unpack( "hhhh", call ) [:2]
+    return height, width 
+
+if sys.platform == 'win32':
+    # ctypes access to the Windows console
+
+    STD_OUTPUT_HANDLE = -11
+    STD_ERROR_HANDLE  = -12
+    FOREGROUND_BLUE      = 0x0001 # text color contains blue.
+    FOREGROUND_GREEN     = 0x0002 # text color contains green.
+    FOREGROUND_RED       = 0x0004 # text color contains red.
+    FOREGROUND_WHITE     = 0x0007
+    FOREGROUND_INTENSITY = 0x0008 # text color is intensified.
+    BACKGROUND_BLUE      = 0x0010 # background color contains blue.
+    BACKGROUND_GREEN     = 0x0020 # background color contains green.
+    BACKGROUND_RED       = 0x0040 # background color contains red.
+    BACKGROUND_WHITE     = 0x0070
+    BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
+
+    def GetStdHandle(kind):
+        import ctypes
+        return ctypes.windll.kernel32.GetStdHandle(kind)
+
+    def SetConsoleTextAttribute(handle, attr):
+        import ctypes
+        ctypes.windll.kernel32.SetConsoleTextAttribute(
+            handle, attr)
+
+    def _getdimensions():
+        import ctypes
+        from ctypes import wintypes
+
+        SHORT = ctypes.c_short
+        class COORD(ctypes.Structure):
+            _fields_ = [('X', SHORT),
+                        ('Y', SHORT)]
+        class SMALL_RECT(ctypes.Structure):
+            _fields_ = [('Left', SHORT),
+                        ('Top', SHORT),
+                        ('Right', SHORT),
+                        ('Bottom', SHORT)]
+        class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
+            _fields_ = [('dwSize', COORD),
+                        ('dwCursorPosition', COORD),
+                        ('wAttributes', wintypes.WORD),
+                        ('srWindow', SMALL_RECT),
+                        ('dwMaximumWindowSize', COORD)]
+        STD_OUTPUT_HANDLE = -11
+        handle = GetStdHandle(STD_OUTPUT_HANDLE)
+        info = CONSOLE_SCREEN_BUFFER_INFO()
+        ctypes.windll.kernel32.GetConsoleScreenBufferInfo(
+            handle, ctypes.byref(info))
+        # Substract one from the width, otherwise the cursor wraps
+        # and the ending \n causes an empty line to display.
+        return info.dwSize.Y, info.dwSize.X - 1
+
+def get_terminal_width():
+    try:
+        height, width = _getdimensions()
+    except (SystemExit, KeyboardInterrupt):
+        raise
+    except:
+        # FALLBACK
+        width = int(os.environ.get('COLUMNS', 80))-1
+    # XXX the windows getdimensions may be bogus, let's sanify a bit 
+    width = max(width, 40) # we alaways need 40 chars
+    return width
+
+terminal_width = get_terminal_width()
+
+# XXX unify with _escaped func below
+def ansi_print(text, esc, file=None, newline=True, flush=False):
+    if file is None:
+        file = sys.stderr
+    text = text.rstrip()
+    if esc and not isinstance(esc, tuple):
+        esc = (esc,)
+    if esc and sys.platform != "win32" and file.isatty():
+        text = (''.join(['\x1b[%sm' % cod for cod in esc])  +  
+                text +
+                '\x1b[0m')     # ANSI color code "reset"
+    if newline:
+        text += '\n'
+
+    if esc and sys.platform == "win32" and file.isatty():
+        if 1 in esc:
+            bold = True
+            esc = tuple([x for x in esc if x != 1])
+        else:
+            bold = False
+        esctable = {()   : FOREGROUND_WHITE,                 # normal
+                    (31,): FOREGROUND_RED,                   # red
+                    (32,): FOREGROUND_GREEN,                 # green
+                    (33,): FOREGROUND_GREEN|FOREGROUND_RED,  # yellow
+                    (34,): FOREGROUND_BLUE,                  # blue
+                    (35,): FOREGROUND_BLUE|FOREGROUND_RED,   # purple
+                    (36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan
+                    (37,): FOREGROUND_WHITE,                 # white
+                    (39,): FOREGROUND_WHITE,                 # reset
+                    }
+        attr = esctable.get(esc, FOREGROUND_WHITE)
+        if bold:
+            attr |= FOREGROUND_INTENSITY
+        STD_OUTPUT_HANDLE = -11
+        STD_ERROR_HANDLE = -12
+        if file is sys.stderr:
+            handle = GetStdHandle(STD_ERROR_HANDLE)
+        else:
+            handle = GetStdHandle(STD_OUTPUT_HANDLE)
+        SetConsoleTextAttribute(handle, attr)
+        file.write(text)
+        SetConsoleTextAttribute(handle, FOREGROUND_WHITE)
+    else:
+        file.write(text)
+
+    if flush:
+        file.flush()
+
+def should_do_markup(file):
+    return hasattr(file, 'isatty') and file.isatty() \
+           and os.environ.get('TERM') != 'dumb'
+
+class TerminalWriter(object):
+    _esctable = dict(black=30, red=31, green=32, yellow=33, 
+                     blue=34, purple=35, cyan=36, white=37,
+                     Black=40, Red=41, Green=42, Yellow=43, 
+                     Blue=44, Purple=45, Cyan=46, White=47,
+                     bold=1, light=2, blink=5, invert=7)
+
+    # XXX deprecate stringio argument
+    def __init__(self, file=None, stringio=False, encoding=None):
+        self.encoding = encoding 
+
+        if file is None:
+            if stringio:
+                self.stringio = file = py.io.TextIO()
+            else:
+                file = py.std.sys.stdout 
+        elif hasattr(file, '__call__'):
+            file = WriteFile(file, encoding=encoding)
+        self._file = file
+        self.fullwidth = get_terminal_width()
+        self.hasmarkup = should_do_markup(file)
+
+    def _escaped(self, text, esc):
+        if esc and self.hasmarkup:
+            text = (''.join(['\x1b[%sm' % cod for cod in esc])  +  
+                text +'\x1b[0m')
+        return text
+
+    def markup(self, text, **kw):
+        esc = []
+        for name in kw:
+            if name not in self._esctable:
+                raise ValueError("unknown markup: %r" %(name,))
+            if kw[name]:
+                esc.append(self._esctable[name])
+        return self._escaped(text, tuple(esc))
+
+    def sep(self, sepchar, title=None, fullwidth=None, **kw):
+        if fullwidth is None:
+            fullwidth = self.fullwidth
+        # the goal is to have the line be as long as possible
+        # under the condition that len(line) <= fullwidth
+        if title is not None:
+            # we want 2 + 2*len(fill) + len(title) <= fullwidth
+            # i.e.    2 + 2*len(sepchar)*N + len(title) <= fullwidth
+            #         2*len(sepchar)*N <= fullwidth - len(title) - 2
+            #         N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
+            N = (fullwidth - len(title) - 2) // (2*len(sepchar))
+            fill = sepchar * N
+            line = "%s %s %s" % (fill, title, fill)
+        else:
+            # we want len(sepchar)*N <= fullwidth
+            # i.e.    N <= fullwidth // len(sepchar)
+            line = sepchar * (fullwidth // len(sepchar))
+        # in some situations there is room for an extra sepchar at the right,
+        # in particular if we consider that with a sepchar like "_ " the
+        # trailing space is not important at the end of the line
+        if len(line) + len(sepchar.rstrip()) <= fullwidth:
+            line += sepchar.rstrip()
+
+        self.line(line, **kw)
+
+    def write(self, s, **kw):
+        if s:
+            s = self._getbytestring(s)
+            if self.hasmarkup and kw:
+                s = self.markup(s, **kw)
+            self._file.write(s)
+            self._file.flush()
+
+    def _getbytestring(self, s):
+        # XXX review this and the whole logic
+        if self.encoding and sys.version_info < (3,0) and isinstance(s, unicode):
+            return s.encode(self.encoding)
+        elif not isinstance(s, str):
+            return str(s)
+        return s
+
+    def line(self, s='', **kw):
+        self.write(s, **kw)
+        self.write('\n')
+
+class Win32ConsoleWriter(TerminalWriter):
+    def write(self, s, **kw):
+        if s:
+            s = self._getbytestring(s)
+            if self.hasmarkup:
+                handle = GetStdHandle(STD_OUTPUT_HANDLE)
+
+            if self.hasmarkup and kw:
+                attr = 0
+                if kw.pop('bold', False):
+                    attr |= FOREGROUND_INTENSITY
+
+                if kw.pop('red', False):
+                    attr |= FOREGROUND_RED
+                elif kw.pop('blue', False):
+                    attr |= FOREGROUND_BLUE
+                elif kw.pop('green', False):
+                    attr |= FOREGROUND_GREEN
+                else:
+                    attr |= FOREGROUND_WHITE
+
+                SetConsoleTextAttribute(handle, attr)
+            self._file.write(s)
+            self._file.flush()
+            if self.hasmarkup:
+                SetConsoleTextAttribute(handle, FOREGROUND_WHITE)
+
+    def line(self, s="", **kw):
+        self.write(s+"\n", **kw)
+
+if sys.platform == 'win32':
+    TerminalWriter = Win32ConsoleWriter
+
+class WriteFile(object): 
+    def __init__(self, writemethod, encoding=None): 
+        self.encoding = encoding 
+        self._writemethod = writemethod 
+
+    def write(self, data):
+        if self.encoding:
+            data = data.encode(self.encoding)
+        self._writemethod(data)
+
+    def flush(self): 
+        return 
+
+

Added: pypy/trunk/py/impl/log/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/log/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,2 @@
+""" logging API ('producers' and 'consumers' connected via keywords) """
+

Added: pypy/trunk/py/impl/log/log.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/log/log.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,184 @@
+"""
+basic logging functionality based on a producer/consumer scheme. 
+
+XXX implement this API: (maybe put it into slogger.py?)
+
+        log = Logger(
+                    info=py.log.STDOUT, 
+                    debug=py.log.STDOUT, 
+                    command=None)
+        log.info("hello", "world")
+        log.command("hello", "world")
+
+        log = Logger(info=Logger(something=...), 
+                     debug=py.log.STDOUT, 
+                     command=None)
+"""
+import py, sys
+
+class Message(object): 
+    def __init__(self, keywords, args): 
+        self.keywords = keywords 
+        self.args = args 
+
+    def content(self): 
+        return " ".join(map(str, self.args))
+
+    def prefix(self): 
+        return "[%s] " % (":".join(self.keywords))
+
+    def __str__(self): 
+        return self.prefix() + self.content() 
+
+
+class Producer(object):
+    """ (deprecated) Log producer API which sends messages to be logged
+        to a 'consumer' object, which then prints them to stdout,
+        stderr, files, etc. Used extensively by PyPy-1.1.
+    """
+    
+    Message = Message  # to allow later customization 
+    keywords2consumer = {}
+
+    def __init__(self, keywords, keywordmapper=None, **kw): 
+        if hasattr(keywords, 'split'):
+            keywords = tuple(keywords.split())
+        self._keywords = keywords
+        if keywordmapper is None:
+            keywordmapper = default_keywordmapper
+        self._keywordmapper = keywordmapper
+
+    def __repr__(self):
+        return "<py.log.Producer %s>" % ":".join(self._keywords) 
+
+    def __getattr__(self, name):
+        if '_' in name: 
+            raise AttributeError(name)
+        producer = self.__class__(self._keywords + (name,))
+        setattr(self, name, producer)
+        return producer 
+    
+    def __call__(self, *args):
+        """ write a message to the appropriate consumer(s) """
+        func = self._keywordmapper.getconsumer(self._keywords)
+        if func is not None: 
+            func(self.Message(self._keywords, args))
+
+class KeywordMapper: 
+    def __init__(self):
+        self.keywords2consumer = {}
+
+    def getstate(self):
+        return self.keywords2consumer.copy()
+    def setstate(self, state):
+        self.keywords2consumer.clear()
+        self.keywords2consumer.update(state)
+
+    def getconsumer(self, keywords):
+        """ return a consumer matching the given keywords. 
+        
+            tries to find the most suitable consumer by walking, starting from
+            the back, the list of keywords, the first consumer matching a
+            keyword is returned (falling back to py.log.default)
+        """
+        for i in range(len(keywords), 0, -1): 
+            try: 
+                return self.keywords2consumer[keywords[:i]]
+            except KeyError: 
+                continue
+        return self.keywords2consumer.get('default', default_consumer)
+
+    def setconsumer(self, keywords, consumer): 
+        """ set a consumer for a set of keywords. """ 
+        # normalize to tuples 
+        if isinstance(keywords, str): 
+            keywords = tuple(filter(None, keywords.split()))
+        elif hasattr(keywords, '_keywords'): 
+            keywords = keywords._keywords 
+        elif not isinstance(keywords, tuple): 
+            raise TypeError("key %r is not a string or tuple" % (keywords,))
+        if consumer is not None and not py.builtin.callable(consumer): 
+            if not hasattr(consumer, 'write'): 
+                raise TypeError(
+                    "%r should be None, callable or file-like" % (consumer,))
+            consumer = File(consumer)
+        self.keywords2consumer[keywords] = consumer 
+
+def default_consumer(msg): 
+    """ the default consumer, prints the message to stdout (using 'print') """
+    sys.stderr.write(str(msg)+"\n")
+
+default_keywordmapper = KeywordMapper()
+
+def setconsumer(keywords, consumer):
+    default_keywordmapper.setconsumer(keywords, consumer)
+
+def setstate(state):
+    default_keywordmapper.setstate(state)
+def getstate():
+    return default_keywordmapper.getstate()
+
+#
+# Consumers
+#
+
+class File(object): 
+    """ log consumer wrapping a file(-like) object """
+    def __init__(self, f): 
+        assert hasattr(f, 'write')
+        #assert isinstance(f, file) or not hasattr(f, 'open') 
+        self._file = f 
+
+    def __call__(self, msg): 
+        """ write a message to the log """
+        self._file.write(str(msg) + "\n")
+
+class Path(object): 
+    """ log consumer that opens and writes to a Path """
+    def __init__(self, filename, append=False, 
+                 delayed_create=False, buffering=False):
+        self._append = append
+        self._filename = str(filename)
+        self._buffering = buffering
+        if not delayed_create:
+            self._openfile()
+
+    def _openfile(self):
+        mode = self._append and 'a' or 'w'
+        f = open(self._filename, mode)
+        self._file = f
+
+    def __call__(self, msg):
+        """ write a message to the log """
+        if not hasattr(self, "_file"):
+            self._openfile()
+        self._file.write(str(msg) + "\n")
+        if not self._buffering:
+            self._file.flush()
+
+def STDOUT(msg): 
+    """ consumer that writes to sys.stdout """
+    sys.stdout.write(str(msg)+"\n")
+
+def STDERR(msg): 
+    """ consumer that writes to sys.stderr """
+    sys.stderr.write(str(msg)+"\n")
+
+class Syslog:
+    """ consumer that writes to the syslog daemon """
+
+    def __init__(self, priority = None):
+        if priority is None:
+            priority = self.LOG_INFO
+        self.priority = priority
+
+    def __call__(self, msg):
+        """ write a message to the log """
+        py.std.syslog.syslog(self.priority, str(msg))
+
+for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split():
+    _prio = "LOG_" + _prio
+    try:
+        setattr(Syslog, _prio, getattr(py.std.syslog, _prio))
+    except AttributeError:
+        pass

Added: pypy/trunk/py/impl/log/warning.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/log/warning.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,72 @@
+import py, sys
+
+class Warning(DeprecationWarning):
+    def __init__(self, msg, path, lineno):
+        self.msg = msg
+        self.path = path
+        self.lineno = lineno 
+    def __repr__(self):
+        return "%s:%d: %s" %(self.path, self.lineno+1, self.msg)
+    def __str__(self):
+        return self.msg 
+
+def _apiwarn(startversion, msg, stacklevel=2, function=None):
+    # below is mostly COPIED from python2.4/warnings.py's def warn()
+    # Get context information
+    if stacklevel == "initpkg":
+        frame = sys._getframe(stacklevel == "initpkg" and 1 or stacklevel)
+        level = 2
+        while frame:
+            co = frame.f_code
+            if co.co_name == "__getattr__" and co.co_filename.find("initpkg") !=-1:
+                stacklevel = level 
+                break
+            level += 1
+            frame = frame.f_back
+        else:
+            stacklevel = 1
+    msg = "%s (since version %s)" %(msg, startversion)
+    warn(msg, stacklevel=stacklevel+1, function=function)
+
+def warn(msg, stacklevel=1, function=None):
+    if function is not None:
+        filename = py.std.inspect.getfile(function)
+        lineno = py.code.getrawcode(function).co_firstlineno
+    else:
+        try:
+            caller = sys._getframe(stacklevel)
+        except ValueError:
+            globals = sys.__dict__
+            lineno = 1
+        else:
+            globals = caller.f_globals
+            lineno = caller.f_lineno
+        if '__name__' in globals:
+            module = globals['__name__']
+        else:
+            module = "<string>"
+        filename = globals.get('__file__')
+    if filename:
+        fnl = filename.lower()
+        if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
+            filename = filename[:-1]
+        elif fnl.endswith("$py.class"):
+            filename = filename.replace('$py.class', '.py')
+    else:
+        if module == "__main__":
+            try:
+                filename = sys.argv[0]
+            except AttributeError:
+                # embedded interpreters don't have sys.argv, see bug #839151
+                filename = '__main__'
+        if not filename:
+            filename = module
+    path = py.path.local(filename)
+    warning = Warning(msg, path, lineno)
+    py.std.warnings.warn_explicit(warning, category=Warning, 
+        filename=str(warning.path), 
+        lineno=warning.lineno,
+        registry=py.std.warnings.__dict__.setdefault(
+            "__warningsregistry__", {})
+    )
+

Added: pypy/trunk/py/impl/path/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1 @@
+""" unified file system api """

Added: pypy/trunk/py/impl/path/cacheutil.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/cacheutil.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,111 @@
+"""
+This module contains multithread-safe cache implementations.
+
+All Caches have 
+
+    getorbuild(key, builder) 
+    delentry(key) 
+
+methods and allow configuration when instantiating the cache class. 
+"""
+from time import time as gettime
+
+class BasicCache(object):
+    def __init__(self, maxentries=128):
+        self.maxentries = maxentries
+        self.prunenum = int(maxentries - maxentries/8)
+        self._dict = {}
+
+    def _getentry(self, key):
+        return self._dict[key]
+
+    def _putentry(self, key, entry):
+        self._prunelowestweight()
+        self._dict[key] = entry 
+
+    def delentry(self, key, raising=False):
+        try:
+            del self._dict[key]
+        except KeyError:
+            if raising:
+                raise
+
+    def getorbuild(self, key, builder):
+        try:
+            entry = self._getentry(key)
+        except KeyError:
+            entry = self._build(key, builder)
+            self._putentry(key, entry)
+        return entry.value
+
+    def _prunelowestweight(self):
+        """ prune out entries with lowest weight. """
+        numentries = len(self._dict)
+        if numentries >= self.maxentries:
+            # evict according to entry's weight
+            items = [(entry.weight, key) 
+                        for key, entry in self._dict.items()]
+            items.sort()
+            index = numentries - self.prunenum
+            if index > 0:
+                for weight, key in items[:index]:
+                    # in MT situations the element might be gone
+                    self.delentry(key, raising=False) 
+
+class BuildcostAccessCache(BasicCache):
+    """ A BuildTime/Access-counting cache implementation.
+        the weight of a value is computed as the product of
+
+            num-accesses-of-a-value * time-to-build-the-value
+
+        The values with the least such weights are evicted
+        if the cache maxentries threshold is superceded.
+        For implementation flexibility more than one object
+        might be evicted at a time.
+    """
+    # time function to use for measuring build-times
+
+    def _build(self, key, builder):
+        start = gettime()
+        val = builder()
+        end = gettime()
+        return WeightedCountingEntry(val, end-start)
+
+
+class WeightedCountingEntry(object):
+    def __init__(self, value, oneweight):
+        self._value = value
+        self.weight = self._oneweight = oneweight 
+
+    def value(self):
+        self.weight += self._oneweight
+        return self._value
+    value = property(value)
+
+class AgingCache(BasicCache):
+    """ This cache prunes out cache entries that are too old.
+    """
+    def __init__(self, maxentries=128, maxseconds=10.0):
+        super(AgingCache, self).__init__(maxentries)
+        self.maxseconds = maxseconds
+
+    def _getentry(self, key):
+        entry = self._dict[key]
+        if entry.isexpired():
+            self.delentry(key) 
+            raise KeyError(key) 
+        return entry
+
+    def _build(self, key, builder):
+        val = builder()
+        entry = AgingEntry(val, gettime() + self.maxseconds)
+        return entry
+
+class AgingEntry(object):
+    def __init__(self, value, expirationtime):
+        self.value = value
+        self.weight = expirationtime
+
+    def isexpired(self):
+        t = gettime()
+        return t >= self.weight 

Added: pypy/trunk/py/impl/path/common.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/common.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,333 @@
+"""
+"""
+import os, sys
+import py
+
+class Checkers:
+    _depend_on_existence = 'exists', 'link', 'dir', 'file'
+
+    def __init__(self, path):
+        self.path = path
+
+    def dir(self):
+        raise NotImplementedError
+
+    def file(self):
+        raise NotImplementedError
+
+    def dotfile(self):
+        return self.path.basename.startswith('.')
+
+    def ext(self, arg):
+        if not arg.startswith('.'):
+            arg = '.' + arg
+        return self.path.ext == arg
+
+    def exists(self):
+        raise NotImplementedError
+
+    def basename(self, arg):
+        return self.path.basename == arg
+
+    def basestarts(self, arg):
+        return self.path.basename.startswith(arg)
+
+    def relto(self, arg):
+        return self.path.relto(arg)
+
+    def fnmatch(self, arg):
+        return FNMatcher(arg)(self.path)
+
+    def endswith(self, arg):
+        return str(self.path).endswith(arg)
+
+    def _evaluate(self, kw):
+        for name, value in kw.items():
+            invert = False
+            meth = None
+            try:
+                meth = getattr(self, name)
+            except AttributeError:
+                if name[:3] == 'not':
+                    invert = True
+                    try:
+                        meth = getattr(self, name[3:])
+                    except AttributeError:
+                        pass
+            if meth is None:
+                raise TypeError(
+                    "no %r checker available for %r" % (name, self.path))
+            try:
+                if py.code.getrawcode(meth).co_argcount > 1:
+                    if (not meth(value)) ^ invert:
+                        return False
+                else:
+                    if bool(value) ^ bool(meth()) ^ invert:
+                        return False
+            except (py.error.ENOENT, py.error.ENOTDIR):
+                for name in self._depend_on_existence:
+                    if name in kw:
+                        if kw.get(name):
+                            return False
+                    name = 'not' + name
+                    if name in kw:
+                        if not kw.get(name):
+                            return False
+        return True
+
+class NeverRaised(Exception): 
+    pass
+
+class PathBase(object):
+    """ shared implementation for filesystem path objects."""
+    Checkers = Checkers
+
+    def __div__(self, other):
+        return self.join(str(other))
+    __truediv__ = __div__ # py3k
+
+    def basename(self):
+        """ basename part of path. """
+        return self._getbyspec('basename')[0]
+    basename = property(basename, None, None, basename.__doc__)
+
+    def purebasename(self):
+        """ pure base name of the path."""
+        return self._getbyspec('purebasename')[0]
+    purebasename = property(purebasename, None, None, purebasename.__doc__)
+
+    def ext(self):
+        """ extension of the path (including the '.')."""
+        return self._getbyspec('ext')[0]
+    ext = property(ext, None, None, ext.__doc__)
+
+    def dirpath(self, *args, **kwargs):
+        """ return the directory Path of the current Path joined
+            with any given path arguments.
+        """
+        return self.new(basename='').join(*args, **kwargs)
+
+    def read(self, mode='r'):
+        """ read and return a bytestring from reading the path. """
+        if sys.version_info < (2,3):
+            for x in 'u', 'U':
+                if x in mode:
+                    mode = mode.replace(x, '')
+        f = self.open(mode)
+        try:
+            return f.read()
+        finally:
+            f.close()
+
+    def readlines(self, cr=1):
+        """ read and return a list of lines from the path. if cr is False, the
+newline will be removed from the end of each line. """
+        if not cr:
+            content = self.read('rU')
+            return content.split('\n')
+        else:
+            f = self.open('rU')
+            try:
+                return f.readlines()
+            finally:
+                f.close()
+
+    def load(self):
+        """ (deprecated) return object unpickled from self.read() """
+        f = self.open('rb')
+        try:
+            return py.error.checked_call(py.std.pickle.load, f)
+        finally:
+            f.close()
+
+    def move(self, target):
+        """ move this path to target. """
+        if target.relto(self):
+            raise py.error.EINVAL(target, 
+                "cannot move path into a subdirectory of itself")
+        try:
+            self.rename(target)
+        except py.error.EXDEV:  # invalid cross-device link
+            self.copy(target)
+            self.remove()
+
+    def __repr__(self):
+        """ return a string representation of this path. """
+        return repr(str(self))
+
+    def check(self, **kw):
+        """ check a path for existence, or query its properties
+
+            without arguments, this returns True if the path exists (on the
+            filesystem), False if not
+
+            with (keyword only) arguments, the object compares the value
+            of the argument with the value of a property with the same name
+            (if it has one, else it raises a TypeError)
+
+            when for example the keyword argument 'ext' is '.py', this will
+            return True if self.ext == '.py', False otherwise
+        """
+        if not kw:
+            kw = {'exists' : 1}
+        return self.Checkers(self)._evaluate(kw)
+
+    def relto(self, relpath):
+        """ return a string which is the relative part of the path
+        to the given 'relpath'. 
+        """
+        if not isinstance(relpath, (str, PathBase)): 
+            raise TypeError("%r: not a string or path object" %(relpath,))
+        strrelpath = str(relpath)
+        if strrelpath and strrelpath[-1] != self.sep:
+            strrelpath += self.sep
+        #assert strrelpath[-1] == self.sep
+        #assert strrelpath[-2] != self.sep
+        strself = str(self)
+        if sys.platform == "win32":
+            if os.path.normcase(strself).startswith(
+               os.path.normcase(strrelpath)):
+                return strself[len(strrelpath):]        
+        elif strself.startswith(strrelpath):
+            return strself[len(strrelpath):]
+        return ""
+
+    def bestrelpath(self, dest): 
+        """ return a string which is a relative path from self 
+            to dest such that self.join(bestrelpath) == dest and 
+            if not such path can be determined return dest. 
+        """ 
+        try:
+            base = self.common(dest)
+            if not base:  # can be the case on windows
+                return str(dest)
+            self2base = self.relto(base)
+            reldest = dest.relto(base)
+            if self2base:
+                n = self2base.count(self.sep) + 1
+            else:
+                n = 0
+            l = ['..'] * n
+            if reldest:
+                l.append(reldest)     
+            target = dest.sep.join(l)
+            return target 
+        except AttributeError:
+            return str(dest)
+
+
+    def parts(self, reverse=False):
+        """ return a root-first list of all ancestor directories
+            plus the path itself.
+        """
+        current = self
+        l = [self]
+        while 1:
+            last = current
+            current = current.dirpath()
+            if last == current:
+                break
+            l.insert(0, current)
+        if reverse:
+            l.reverse()
+        return l
+
+    def common(self, other):
+        """ return the common part shared with the other path
+            or None if there is no common part.
+        """
+        last = None
+        for x, y in zip(self.parts(), other.parts()):
+            if x != y:
+                return last
+            last = x
+        return last
+
+    def __add__(self, other):
+        """ return new path object with 'other' added to the basename"""
+        return self.new(basename=self.basename+str(other))
+
+    def __cmp__(self, other):
+        """ return sort value (-1, 0, +1). """
+        try:
+            return cmp(self.strpath, other.strpath)
+        except AttributeError:
+            return cmp(str(self), str(other)) # self.path, other.path)
+
+    def __lt__(self, other):
+        try:
+            return self.strpath < other.strpath 
+        except AttributeError:
+            return str(self) < str(other)
+
+    def visit(self, fil=None, rec=None, ignore=NeverRaised):
+        """ yields all paths below the current one
+
+            fil is a filter (glob pattern or callable), if not matching the
+            path will not be yielded, defaulting to None (everything is
+            returned)
+
+            rec is a filter (glob pattern or callable) that controls whether
+            a node is descended, defaulting to None
+
+            ignore is an Exception class that is ignoredwhen calling dirlist()
+            on any of the paths (by default, all exceptions are reported)
+        """
+        if isinstance(fil, str):
+            fil = FNMatcher(fil)
+        if rec: 
+            if isinstance(rec, str):
+                rec = fnmatch(fil)
+            elif not hasattr(rec, '__call__'):
+                rec = None
+        try:
+            entries = self.listdir()
+        except ignore:
+            return
+        dirs = [p for p in entries 
+                    if p.check(dir=1) and (rec is None or rec(p))]
+        for subdir in dirs:
+            for p in subdir.visit(fil=fil, rec=rec, ignore=ignore):
+                yield p
+        for p in entries:
+            if fil is None or fil(p):
+                yield p
+
+    def _sortlist(self, res, sort):
+        if sort:
+            if hasattr(sort, '__call__'):
+                res.sort(sort)
+            else:
+                res.sort()
+
+    def samefile(self, other):
+        """ return True if other refers to the same stat object as self. """
+        return self.strpath == str(other)
+
+class FNMatcher:
+    def __init__(self, pattern):
+        self.pattern = pattern
+    def __call__(self, path):
+        """return true if the basename/fullname matches the glob-'pattern'.
+
+        *       matches everything
+        ?       matches any single character
+        [seq]   matches any character in seq
+        [!seq]  matches any char not in seq
+
+        if the pattern contains a path-separator then the full path
+        is used for pattern matching and a '*' is prepended to the
+        pattern.
+
+        if the pattern doesn't contain a path-separator the pattern
+        is only matched against the basename.
+        """
+        pattern = self.pattern
+        if pattern.find(path.sep) == -1:
+            name = path.basename
+        else:
+            name = str(path) # path.strpath # XXX svn?
+            pattern = '*' + path.sep + pattern
+        from fnmatch import fnmatch
+        return fnmatch(name, pattern)
+

Added: pypy/trunk/py/impl/path/gateway/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/gateway/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1 @@
+#

Added: pypy/trunk/py/impl/path/gateway/channeltest.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/gateway/channeltest.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,65 @@
+import threading
+
+
+class PathServer:
+
+    def __init__(self, channel):
+        self.channel = channel
+        self.C2P = {}
+        self.next_id = 0
+        threading.Thread(target=self.serve).start()
+
+    def p2c(self, path):
+        id = self.next_id
+        self.next_id += 1
+        self.C2P[id] = path
+        return id
+
+    def command_LIST(self, id, *args):
+        path = self.C2P[id]
+        answer = [(self.p2c(p), p.basename) for p in path.listdir(*args)]
+        self.channel.send(answer)
+
+    def command_DEL(self, id):
+        del self.C2P[id]
+
+    def command_GET(self, id, spec):
+        path = self.C2P[id]
+        self.channel.send(path._getbyspec(spec))
+
+    def command_READ(self, id):
+        path = self.C2P[id]
+        self.channel.send(path.read())
+
+    def command_JOIN(self, id, resultid, *args):
+        path = self.C2P[id]
+        assert resultid not in self.C2P
+        self.C2P[resultid] = path.join(*args)
+
+    def command_DIRPATH(self, id, resultid):
+        path = self.C2P[id]
+        assert resultid not in self.C2P
+        self.C2P[resultid] = path.dirpath()
+
+    def serve(self):
+        try:
+            while 1:
+                msg = self.channel.receive()
+                meth = getattr(self, 'command_' + msg[0])
+                meth(*msg[1:])
+        except EOFError:
+            pass
+
+if __name__ == '__main__':
+    import py
+    gw = execnet.PopenGateway()
+    channel = gw._channelfactory.new()
+    srv = PathServer(channel)
+    c = gw.remote_exec("""
+        import remotepath
+        p = remotepath.RemotePath(channel.receive(), channel.receive())
+        channel.send(len(p.listdir()))
+    """)
+    c.send(channel)
+    c.send(srv.p2c(py.path.local('/tmp')))
+    print(c.receive())

Added: pypy/trunk/py/impl/path/gateway/channeltest2.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/gateway/channeltest2.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,21 @@
+import py
+from remotepath import RemotePath
+
+
+SRC = open('channeltest.py', 'r').read()
+
+SRC += '''
+import py
+srv = PathServer(channel.receive())
+channel.send(srv.p2c(py.path.local("/tmp")))
+'''
+
+
+#gw = execnet.SshGateway('codespeak.net')
+gw = execnet.PopenGateway()
+gw.remote_init_threads(5)
+c = gw.remote_exec(SRC, stdout=py.std.sys.stdout, stderr=py.std.sys.stderr)
+subchannel = gw._channelfactory.new()
+c.send(subchannel)
+
+p = RemotePath(subchannel, c.receive())

Added: pypy/trunk/py/impl/path/gateway/remotepath.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/gateway/remotepath.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,47 @@
+import py, itertools
+from py.impl.path import common
+
+COUNTER = itertools.count()
+
+class RemotePath(common.PathBase):
+    sep = '/'
+
+    def __init__(self, channel, id, basename=None):
+        self._channel = channel
+        self._id = id
+        self._basename = basename
+        self._specs = {}
+
+    def __del__(self):
+        self._channel.send(('DEL', self._id))
+
+    def __repr__(self):
+        return 'RemotePath(%s)' % self.basename
+
+    def listdir(self, *args):
+        self._channel.send(('LIST', self._id) + args)
+        return [RemotePath(self._channel, id, basename)
+                for (id, basename) in self._channel.receive()]
+
+    def dirpath(self):
+        id = ~COUNTER.next()
+        self._channel.send(('DIRPATH', self._id, id))
+        return RemotePath(self._channel, id)
+
+    def join(self, *args):
+        id = ~COUNTER.next()
+        self._channel.send(('JOIN', self._id, id) + args)
+        return RemotePath(self._channel, id)
+
+    def _getbyspec(self, spec):
+        parts = spec.split(',')
+        ask = [x for x in parts  if x not in self._specs]
+        if ask:
+            self._channel.send(('GET', self._id, ",".join(ask)))
+            for part, value in zip(ask, self._channel.receive()):
+                self._specs[part] = value
+        return [self._specs[x] for x in parts]
+
+    def read(self):
+        self._channel.send(('READ', self._id))
+        return self._channel.receive()

Added: pypy/trunk/py/impl/path/local.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/local.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,799 @@
+"""
+local path implementation.
+"""
+import sys, os, stat, re, atexit
+import py
+from py.impl.path import common
+
+iswin32 = sys.platform == "win32"
+
+class Stat(object):
+    def __getattr__(self, name):
+        return getattr(self._osstatresult, "st_" + name)
+
+    def __init__(self, path, osstatresult): 
+        self.path = path 
+        self._osstatresult = osstatresult
+
+    def owner(self):
+        if iswin32:
+            raise NotImplementedError("XXX win32")
+        import pwd 
+        entry = py.error.checked_call(pwd.getpwuid, self.uid)
+        return entry[0]
+    owner = property(owner, None, None, "owner of path") 
+
+    def group(self):
+        """ return group name of file. """
+        if iswin32:
+            raise NotImplementedError("XXX win32")
+        import grp
+        entry = py.error.checked_call(grp.getgrgid, self.gid)
+        return entry[0]
+    group = property(group) 
+
+class PosixPath(common.PathBase):
+    def chown(self, user, group, rec=0):
+        """ change ownership to the given user and group.
+            user and group may be specified by a number or
+            by a name.  if rec is True change ownership
+            recursively.
+        """
+        uid = getuserid(user)
+        gid = getgroupid(group)
+        if rec:
+            for x in self.visit(rec=lambda x: x.check(link=0)): 
+                if x.check(link=0):
+                    py.error.checked_call(os.chown, str(x), uid, gid)
+        py.error.checked_call(os.chown, str(self), uid, gid)
+
+    def readlink(self):
+        """ return value of a symbolic link. """
+        return py.error.checked_call(os.readlink, self.strpath)
+
+    def mklinkto(self, oldname):
+        """ posix style hard link to another name. """
+        py.error.checked_call(os.link, str(oldname), str(self))
+
+    def mksymlinkto(self, value, absolute=1):
+        """ create a symbolic link with the given value (pointing to another name). """
+        if absolute:
+            py.error.checked_call(os.symlink, str(value), self.strpath)
+        else:
+            base = self.common(value)
+            # with posix local paths '/' is always a common base
+            relsource = self.__class__(value).relto(base)
+            reldest = self.relto(base)
+            n = reldest.count(self.sep)
+            target = self.sep.join(('..', )*n + (relsource, ))
+            py.error.checked_call(os.symlink, target, self.strpath)
+
+    def samefile(self, other):
+        """ return True if other refers to the same stat object as self. """
+        return py.error.checked_call(os.path.samefile, str(self), str(other))
+
+def getuserid(user):
+    import pwd
+    if not isinstance(user, int):
+        user = pwd.getpwnam(user)[2]
+    return user
+
+def getgroupid(group):
+    import grp
+    if not isinstance(group, int):
+        group = grp.getgrnam(group)[2]
+    return group
+
+FSBase = not iswin32 and PosixPath or common.PathBase
+
+class LocalPath(FSBase):
+    """ object oriented interface to os.path and other local filesystem 
+        related information. 
+    """
+    sep = os.sep
+    class Checkers(common.Checkers):
+        def _stat(self):
+            try:
+                return self._statcache
+            except AttributeError:
+                try:
+                    self._statcache = self.path.stat()
+                except py.error.ELOOP:
+                    self._statcache = self.path.lstat()
+                return self._statcache
+
+        def dir(self):
+            return stat.S_ISDIR(self._stat().mode)
+
+        def file(self):
+            return stat.S_ISREG(self._stat().mode)
+
+        def exists(self):
+            return self._stat()
+
+        def link(self):
+            st = self.path.lstat()
+            return stat.S_ISLNK(st.mode)
+
+    def __new__(cls, path=None):
+        """ Initialize and return a local Path instance.
+
+        Path can be relative to the current directory.
+        If it is None then the current working directory is taken.
+        Note that Path instances always carry an absolute path.
+        Note also that passing in a local path object will simply return
+        the exact same path object. Use new() to get a new copy.
+        """
+        if isinstance(path, common.PathBase):
+            if path.__class__ == cls:
+                return path
+            path = path.strpath
+        # initialize the path
+        self = object.__new__(cls)
+        if not path:
+            self.strpath = os.getcwd()
+        elif isinstance(path, py.builtin._basestring):
+            self.strpath = os.path.abspath(os.path.normpath(str(path)))
+        else:
+            raise ValueError("can only pass None, Path instances "
+                             "or non-empty strings to LocalPath")
+        assert isinstance(self.strpath, str)
+        return self
+
+    def __hash__(self):
+        return hash(self.strpath)
+
+    def __eq__(self, other):
+        s1 = str(self)
+        s2 = str(other)
+        if iswin32: 
+            s1 = s1.lower()
+            s2 = s2.lower()
+        return s1 == s2
+
+    def __ne__(self, other):
+        return not (self == other)
+
+    def __lt__(self, other):
+        return str(self) < str(other)
+
+    def remove(self, rec=1):
+        """ remove a file or directory (or a directory tree if rec=1).  """
+        if self.check(dir=1, link=0):
+            if rec:
+                # force remove of readonly files on windows 
+                if iswin32: 
+                    self.chmod(448, rec=1) # octcal 0700
+                py.error.checked_call(py.std.shutil.rmtree, self.strpath)
+            else:
+                py.error.checked_call(os.rmdir, self.strpath)
+        else:
+            if iswin32: 
+                self.chmod(448) # octcal 0700
+            py.error.checked_call(os.remove, self.strpath)
+
+    def computehash(self, hashtype="md5", chunksize=524288):
+        """ return hexdigest of hashvalue for this file. """
+        try:
+            try:
+                import hashlib as mod
+            except ImportError:
+                if hashtype == "sha1":
+                    hashtype = "sha"
+                mod = __import__(hashtype)
+            hash = getattr(mod, hashtype)()
+        except (AttributeError, ImportError):
+            raise ValueError("Don't know how to compute %r hash" %(hashtype,))
+        f = self.open('rb')
+        try:
+            while 1:
+                buf = f.read(chunksize)
+                if not buf:
+                    return hash.hexdigest()
+                hash.update(buf) 
+        finally:
+            f.close()
+
+    def new(self, **kw):
+        """ create a modified version of this path.
+            the following keyword arguments modify various path parts:
+
+              a:/some/path/to/a/file.ext
+              ||                            drive
+                |-------------|             dirname
+                                |------|    basename
+                                |--|        purebasename
+                                    |--|    ext
+        """
+        obj = object.__new__(self.__class__)
+        drive, dirname, basename, purebasename,ext = self._getbyspec(
+             "drive,dirname,basename,purebasename,ext")
+        if 'basename' in kw:
+            if 'purebasename' in kw or 'ext' in kw:
+                raise ValueError("invalid specification %r" % kw)
+        else:
+            pb = kw.setdefault('purebasename', purebasename)
+            try:
+                ext = kw['ext']
+            except KeyError:
+                pass
+            else:
+                if ext and not ext.startswith('.'):
+                    ext = '.' + ext
+            kw['basename'] = pb + ext
+
+        kw.setdefault('drive', drive)
+        kw.setdefault('dirname', dirname)
+        kw.setdefault('sep', self.sep)
+        obj.strpath = os.path.normpath(
+            "%(drive)s%(dirname)s%(sep)s%(basename)s" % kw)
+        return obj
+    
+    def _getbyspec(self, spec):
+        """ return a sequence of specified path parts.  'spec' is
+            a comma separated string containing path part names.
+            according to the following convention:
+            a:/some/path/to/a/file.ext
+            ||                            drive
+              |-------------|             dirname
+                              |------|    basename
+                              |--|        purebasename
+                                  |--|    ext
+        """
+        res = []
+        parts = self.strpath.split(self.sep)
+
+        args = filter(None, spec.split(',') )
+        append = res.append
+        for name in args:
+            if name == 'drive':
+                append(parts[0])
+            elif name == 'dirname':
+                append(self.sep.join(['']+parts[1:-1]))
+            else:
+                basename = parts[-1]
+                if name == 'basename':
+                    append(basename)
+                else:
+                    i = basename.rfind('.')
+                    if i == -1:
+                        purebasename, ext = basename, ''
+                    else:
+                        purebasename, ext = basename[:i], basename[i:]
+                    if name == 'purebasename':
+                        append(purebasename)
+                    elif name == 'ext':
+                        append(ext)
+                    else:
+                        raise ValueError("invalid part specification %r" % name)
+        return res
+
+    def join(self, *args, **kwargs):
+        """ return a new path by appending all 'args' as path
+        components.  if abs=1 is used restart from root if any
+        of the args is an absolute path.
+        """
+        if not args:
+            return self
+        strpath = self.strpath
+        sep = self.sep
+        strargs = [str(x) for x in args]
+        if kwargs.get('abs', 0):
+            for i in range(len(strargs)-1, -1, -1):
+                if os.path.isabs(strargs[i]):
+                    strpath = strargs[i]
+                    strargs = strargs[i+1:]
+                    break
+        for arg in strargs:
+            arg = arg.strip(sep)
+            if iswin32:
+                # allow unix style paths even on windows.
+                arg = arg.strip('/')
+                arg = arg.replace('/', sep)
+            if arg:
+                if not strpath.endswith(sep):
+                    strpath += sep
+                strpath += arg
+        obj = self.new()
+        obj.strpath = os.path.normpath(strpath)
+        return obj
+
+    def open(self, mode='r'):
+        """ return an opened file with the given mode. """
+        return py.error.checked_call(open, self.strpath, mode)
+
+    def listdir(self, fil=None, sort=None):
+        """ list directory contents, possibly filter by the given fil func
+            and possibly sorted.
+        """
+        if isinstance(fil, str):
+            fil = common.FNMatcher(fil)
+        res = []
+        for name in py.error.checked_call(os.listdir, self.strpath):
+            childurl = self.join(name)
+            if fil is None or fil(childurl):
+                res.append(childurl)
+        self._sortlist(res, sort)
+        return res 
+
+    def size(self):
+        """ return size of the underlying file object """
+        return self.stat().size
+
+    def mtime(self):
+        """ return last modification time of the path. """
+        return self.stat().mtime
+
+    def copy(self, target, archive=False):
+        """ copy path to target."""
+        assert not archive, "XXX archive-mode not supported"
+        if self.check(file=1):
+            if target.check(dir=1):
+                target = target.join(self.basename)
+            assert self!=target
+            copychunked(self, target)
+        else:
+            def rec(p):
+                return p.check(link=0)
+            for x in self.visit(rec=rec):
+                relpath = x.relto(self)
+                newx = target.join(relpath)
+                newx.dirpath().ensure(dir=1)
+                if x.check(link=1):
+                    newx.mksymlinkto(x.readlink())
+                elif x.check(file=1):
+                    copychunked(x, newx)
+                elif x.check(dir=1):
+                    newx.ensure(dir=1)
+
+    def rename(self, target):
+        """ rename this path to target. """
+        return py.error.checked_call(os.rename, str(self), str(target))
+
+    def dump(self, obj, bin=1):
+        """ pickle object into path location"""
+        f = self.open('wb')
+        try:
+            py.error.checked_call(py.std.pickle.dump, obj, f, bin)
+        finally:
+            f.close()
+
+    def mkdir(self, *args):
+        """ create & return the directory joined with args. """
+        p = self.join(*args)
+        py.error.checked_call(os.mkdir, str(p))
+        return p
+
+    def write(self, data, mode='w'):
+        """ write data into path. """
+        if 'b' in mode:
+            if not py.builtin._isbytes(data):
+                raise ValueError("can only process bytes")
+        else:
+            if not py.builtin._istext(data):
+                if not py.builtin._isbytes(data):
+                    data = str(data)
+                else:
+                    data = py.builtin._totext(data, sys.getdefaultencoding())
+        f = self.open(mode)
+        try:
+            f.write(data)
+        finally:
+            f.close()
+
+    def _ensuredirs(self):
+        parent = self.dirpath()
+        if parent == self:
+            return self
+        if parent.check(dir=0):
+            parent._ensuredirs()
+        if self.check(dir=0):
+            try:
+                self.mkdir()
+            except py.error.EEXIST:
+                # race condition: file/dir created by another thread/process.
+                # complain if it is not a dir
+                if self.check(dir=0):
+                    raise
+        return self
+
+    def ensure(self, *args, **kwargs):
+        """ ensure that an args-joined path exists (by default as
+            a file). if you specify a keyword argument 'dir=True'
+            then the path is forced to be a directory path.
+        """
+        p = self.join(*args)
+        if kwargs.get('dir', 0):
+            return p._ensuredirs()
+        else:
+            p.dirpath()._ensuredirs()
+            if not p.check(file=1):
+                p.open('w').close()
+            return p
+
+    def stat(self):
+        """ Return an os.stat() tuple. """
+        return Stat(self, py.error.checked_call(os.stat, self.strpath))
+
+    def lstat(self):
+        """ Return an os.lstat() tuple. """
+        return Stat(self, py.error.checked_call(os.lstat, self.strpath))
+
+    def setmtime(self, mtime=None):
+        """ set modification time for the given path.  if 'mtime' is None
+        (the default) then the file's mtime is set to current time.
+
+        Note that the resolution for 'mtime' is platform dependent.
+        """
+        if mtime is None:
+            return py.error.checked_call(os.utime, self.strpath, mtime)
+        try:
+            return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
+        except py.error.EINVAL:
+            return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
+
+    def chdir(self):
+        """ change directory to self and return old current directory """
+        old = self.__class__()
+        py.error.checked_call(os.chdir, self.strpath)
+        return old
+
+    def realpath(self):
+        """ return a new path which contains no symbolic links."""
+        return self.__class__(os.path.realpath(self.strpath))
+
+    def atime(self):
+        """ return last access time of the path. """
+        return self.stat().atime
+
+    def __repr__(self):
+        return 'local(%r)' % self.strpath
+
+    def __str__(self):
+        """ return string representation of the Path. """
+        return self.strpath
+
+    def pypkgpath(self, pkgname=None):
+        """ return the path's package path by looking for the given
+            pkgname.  If pkgname is None then look for the last
+            directory upwards which still contains an __init__.py.
+            Return None if a pkgpath can not be determined.
+        """
+        pkgpath = None
+        for parent in self.parts(reverse=True):
+            if pkgname is None:
+                if parent.check(file=1):
+                    continue
+                if parent.join('__init__.py').check():
+                    pkgpath = parent
+                    continue
+                return pkgpath
+            else:
+                if parent.basename == pkgname:
+                    return parent
+        return pkgpath
+
+    def _prependsyspath(self, path):
+        s = str(path)
+        if s != sys.path[0]:
+            #print "prepending to sys.path", s
+            sys.path.insert(0, s)
+
+    def chmod(self, mode, rec=0):
+        """ change permissions to the given mode. If mode is an
+            integer it directly encodes the os-specific modes.
+            if rec is True perform recursively.
+        """
+        if not isinstance(mode, int):
+            raise TypeError("mode %r must be an integer" % (mode,))
+        if rec:
+            for x in self.visit(rec=rec):
+                py.error.checked_call(os.chmod, str(x), mode)
+        py.error.checked_call(os.chmod, str(self), mode)
+
+    def pyimport(self, modname=None, ensuresyspath=True):
+        """ return path as an imported python module.
+            if modname is None, look for the containing package
+            and construct an according module name.
+            The module will be put/looked up in sys.modules.
+        """
+        if not self.check():
+            raise py.error.ENOENT(self)
+        #print "trying to import", self
+        pkgpath = None
+        if modname is None:
+            pkgpath = self.pypkgpath()
+            if pkgpath is not None:
+                if ensuresyspath:
+                    self._prependsyspath(pkgpath.dirpath())
+                pkg = __import__(pkgpath.basename, None, None, [])
+                names = self.new(ext='').relto(pkgpath.dirpath())
+                names = names.split(self.sep)
+                modname = ".".join(names)
+            else:
+                # no package scope, still make it possible
+                if ensuresyspath:
+                    self._prependsyspath(self.dirpath())
+                modname = self.purebasename
+            mod = __import__(modname, None, None, ['__doc__'])
+            modfile = mod.__file__
+            if modfile[-4:] in ('.pyc', '.pyo'):
+                modfile = modfile[:-1]
+            elif modfile.endswith('$py.class'):
+                modfile = modfile[:-9] + '.py'
+            if not self.samefile(modfile):
+                raise EnvironmentError("mismatch:\n"
+                "imported module %r\n"
+                "does not stem from %r\n" 
+                "maybe __init__.py files are missing?" % (mod, str(self)))
+            return mod
+        else:
+            try:
+                return sys.modules[modname]
+            except KeyError:
+                # we have a custom modname, do a pseudo-import
+                mod = py.std.types.ModuleType(modname)
+                mod.__file__ = str(self)
+                sys.modules[modname] = mod
+                try:
+                    py.builtin.execfile(str(self), mod.__dict__)
+                except:
+                    del sys.modules[modname]
+                    raise
+                return mod
+
+    def sysexec(self, *argv, **popen_opts):
+        """ return stdout text from executing a system child process,
+            where the 'self' path points to executable. 
+            The process is directly invoked and not through a system shell. 
+        """
+        from subprocess import Popen, PIPE
+        argv = map(str, argv)
+        popen_opts['stdout'] = popen_opts['stderr'] = PIPE
+        proc = Popen([str(self)] + list(argv), **popen_opts)
+        stdout, stderr = proc.communicate()
+        ret = proc.wait()
+        if py.builtin._isbytes(stdout):
+            stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+        if ret != 0:
+            if py.builtin._isbytes(stderr):
+                stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
+            raise py.process.cmdexec.Error(ret, ret, str(self),
+                                           stdout, stderr,)
+        return stdout
+
+    def sysfind(cls, name, checker=None):
+        """ return a path object found by looking at the systems
+            underlying PATH specification. If the checker is not None
+            it will be invoked to filter matching paths.  If a binary
+            cannot be found, None is returned
+            Note: This is probably not working on plain win32 systems
+            but may work on cygwin.
+        """
+        if os.path.isabs(name):
+            p = py.path.local(name)
+            if p.check(file=1):
+                return p
+        else:
+            if iswin32:
+                paths = py.std.os.environ['Path'].split(';')
+                if '' not in paths and '.' not in paths:
+                    paths.append('.')
+                try:
+                    systemroot = os.environ['SYSTEMROOT']
+                except KeyError:
+                    pass
+                else:
+                    paths = [re.sub('%SystemRoot%', systemroot, path)
+                             for path in paths]
+                tryadd = '', '.exe', '.com', '.bat' # XXX add more?
+            else:
+                paths = py.std.os.environ['PATH'].split(':')
+                tryadd = ('',)
+
+            for x in paths:
+                for addext in tryadd:
+                    p = py.path.local(x).join(name, abs=True) + addext
+                    try:
+                        if p.check(file=1):
+                            if checker:
+                                if not checker(p):
+                                    continue
+                            return p
+                    except py.error.EACCES:
+                        pass
+        return None
+    sysfind = classmethod(sysfind)
+
+    def _gethomedir(cls):
+        try:
+            x = os.environ['HOME']
+        except KeyError:
+            x = os.environ['HOMEPATH']
+        return cls(x)
+    _gethomedir = classmethod(_gethomedir)
+
+    #"""
+    #special class constructors for local filesystem paths
+    #"""
+    def get_temproot(cls):
+        """ return the system's temporary directory
+            (where tempfiles are usually created in)
+        """
+        return py.path.local(py.std.tempfile.gettempdir())
+    get_temproot = classmethod(get_temproot)
+
+    def mkdtemp(cls):
+        """ return a Path object pointing to a fresh new temporary directory
+            (which we created ourself).
+        """
+        import tempfile
+        tries = 10
+        for i in range(tries):
+            dname = tempfile.mktemp()
+            dpath = cls(tempfile.mktemp())
+            try:
+                dpath.mkdir()
+            except (py.error.EEXIST, py.error.EPERM, py.error.EACCES):
+                continue
+            return dpath
+        raise py.error.ENOENT(dpath, "could not create tempdir, %d tries" % tries)
+    mkdtemp = classmethod(mkdtemp)
+
+    def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
+                          lock_timeout = 172800):   # two days
+        """ return unique directory with a number greater than the current
+            maximum one.  The number is assumed to start directly after prefix.
+            if keep is true directories with a number less than (maxnum-keep)
+            will be removed.
+        """
+        if rootdir is None:
+            rootdir = cls.get_temproot()
+
+        def parse_num(path):
+            """ parse the number out of a path (if it matches the prefix) """
+            bn = path.basename
+            if bn.startswith(prefix):
+                try:
+                    return int(bn[len(prefix):])
+                except ValueError:
+                    pass
+
+        # compute the maximum number currently in use with the
+        # prefix
+        lastmax = None
+        while True:
+            maxnum = -1
+            for path in rootdir.listdir():
+                num = parse_num(path)
+                if num is not None:
+                    maxnum = max(maxnum, num)
+
+            # make the new directory
+            try:
+                udir = rootdir.mkdir(prefix + str(maxnum+1))
+            except py.error.EEXIST:
+                # race condition: another thread/process created the dir
+                # in the meantime.  Try counting again
+                if lastmax == maxnum:
+                    raise
+                lastmax = maxnum
+                continue
+            break
+
+        # put a .lock file in the new directory that will be removed at
+        # process exit
+        if lock_timeout:
+            lockfile = udir.join('.lock')
+            mypid = os.getpid()
+            if hasattr(lockfile, 'mksymlinkto'):
+                lockfile.mksymlinkto(str(mypid))
+            else:
+                lockfile.write(str(mypid))
+            def try_remove_lockfile():
+                # in a fork() situation, only the last process should
+                # remove the .lock, otherwise the other processes run the
+                # risk of seeing their temporary dir disappear.  For now
+                # we remove the .lock in the parent only (i.e. we assume
+                # that the children finish before the parent).
+                if os.getpid() != mypid:
+                    return
+                try:
+                    lockfile.remove()
+                except py.error.Error:
+                    pass
+            atexit.register(try_remove_lockfile)
+
+        # prune old directories
+        if keep:
+            for path in rootdir.listdir():
+                num = parse_num(path)
+                if num is not None and num <= (maxnum - keep):
+                    lf = path.join('.lock')
+                    try:
+                        t1 = lf.lstat().mtime
+                        t2 = lockfile.lstat().mtime
+                        if not lock_timeout or abs(t2-t1) < lock_timeout:
+                            continue   # skip directories still locked
+                    except py.error.Error:
+                        pass   # assume that it means that there is no 'lf'
+                    try:
+                        path.remove(rec=1)
+                    except KeyboardInterrupt:
+                        raise
+                    except: # this might be py.error.Error, WindowsError ...
+                        pass
+        
+        # make link...
+        try:
+            username = os.environ['USER']           #linux, et al
+        except KeyError:
+            try:
+                username = os.environ['USERNAME']   #windows
+            except KeyError:
+                username = 'current'
+
+        src  = str(udir)
+        dest = src[:src.rfind('-')] + '-' + username
+        try:
+            os.unlink(dest)
+        except OSError:
+            pass
+        try:
+            os.symlink(src, dest)
+        except (OSError, AttributeError): # AttributeError on win32
+            pass
+
+        return udir
+    make_numbered_dir = classmethod(make_numbered_dir)
+
+def copychunked(src, dest):
+    chunksize = 524288 # half a meg of bytes
+    fsrc = src.open('rb')
+    try:
+        fdest = dest.open('wb')
+        try:
+            while 1:
+                buf = fsrc.read(chunksize)
+                if not buf:
+                    break
+                fdest.write(buf)
+        finally:
+            fdest.close()
+    finally:
+        fsrc.close()
+
+def autopath(globs=None):
+    """ (deprecated) return the (local) path of the "current" file pointed to by globals or - if it is none - alternatively the callers frame globals.
+
+        the path will always point to a .py file  or to None.
+        the path will have the following payload:
+        pkgdir   is the last parent directory path containing __init__.py 
+    """
+    py.log._apiwarn("1.1", "py.magic.autopath deprecated, "
+        "use py.path.local(__file__) and maybe pypkgpath/pyimport().")
+    if globs is None:
+        globs = sys._getframe(1).f_globals
+    try:
+        __file__ = globs['__file__']
+    except KeyError:
+        if not sys.argv[0]:
+            raise ValueError("cannot compute autopath in interactive mode")
+        __file__ = os.path.abspath(sys.argv[0])
+
+    ret = py.path.local(__file__)
+    if ret.ext in ('.pyc', '.pyo'):
+        ret = ret.new(ext='.py')
+    current = pkgdir = ret.dirpath()
+    while 1:
+        if current.join('__init__.py').check():
+            pkgdir = current
+            current = current.dirpath()
+            if pkgdir != current:
+                continue
+        elif str(current) not in sys.path:
+            sys.path.insert(0, str(current))
+        break
+    ret.pkgdir = pkgdir
+    return ret
+

Added: pypy/trunk/py/impl/path/local.py.orig
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/local.py.orig	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,802 @@
+"""
+local path implementation.
+"""
+import sys, os, stat, re, atexit
+import py
+from py.impl.path import common
+
+iswin32 = sys.platform == "win32"
+
+class Stat(object):
+    def __getattr__(self, name):
+        return getattr(self._osstatresult, "st_" + name)
+
+    def __init__(self, path, osstatresult): 
+        self.path = path 
+        self._osstatresult = osstatresult
+
+    def owner(self):
+        if iswin32:
+            raise NotImplementedError("XXX win32")
+        import pwd 
+        entry = py.error.checked_call(pwd.getpwuid, self.uid)
+        return entry[0]
+    owner = property(owner, None, None, "owner of path") 
+
+    def group(self):
+        """ return group name of file. """
+        if iswin32:
+            raise NotImplementedError("XXX win32")
+        import grp
+        entry = py.error.checked_call(grp.getgrgid, self.gid)
+        return entry[0]
+    group = property(group) 
+
+class PosixPath(common.PathBase):
+    def chown(self, user, group, rec=0):
+        """ change ownership to the given user and group.
+            user and group may be specified by a number or
+            by a name.  if rec is True change ownership
+            recursively.
+        """
+        uid = getuserid(user)
+        gid = getgroupid(group)
+        if rec:
+            for x in self.visit(rec=lambda x: x.check(link=0)): 
+                if x.check(link=0):
+                    py.error.checked_call(os.chown, str(x), uid, gid)
+        py.error.checked_call(os.chown, str(self), uid, gid)
+
+    def readlink(self):
+        """ return value of a symbolic link. """
+        return py.error.checked_call(os.readlink, self.strpath)
+
+    def mklinkto(self, oldname):
+        """ posix style hard link to another name. """
+        py.error.checked_call(os.link, str(oldname), str(self))
+
+    def mksymlinkto(self, value, absolute=1):
+        """ create a symbolic link with the given value (pointing to another name). """
+        if absolute:
+            py.error.checked_call(os.symlink, str(value), self.strpath)
+        else:
+            base = self.common(value)
+            # with posix local paths '/' is always a common base
+            relsource = self.__class__(value).relto(base)
+            reldest = self.relto(base)
+            n = reldest.count(self.sep)
+            target = self.sep.join(('..', )*n + (relsource, ))
+            py.error.checked_call(os.symlink, target, self.strpath)
+
+    def samefile(self, other):
+        """ return True if other refers to the same stat object as self. """
+        return py.error.checked_call(os.path.samefile, str(self), str(other))
+
+def getuserid(user):
+    import pwd
+    if not isinstance(user, int):
+        user = pwd.getpwnam(user)[2]
+    return user
+
+def getgroupid(group):
+    import grp
+    if not isinstance(group, int):
+        group = grp.getgrnam(group)[2]
+    return group
+
+FSBase = not iswin32 and PosixPath or common.PathBase
+
+class LocalPath(FSBase):
+    """ object oriented interface to os.path and other local filesystem 
+        related information. 
+    """
+    sep = os.sep
+    class Checkers(common.Checkers):
+        def _stat(self):
+            try:
+                return self._statcache
+            except AttributeError:
+                try:
+                    self._statcache = self.path.stat()
+                except py.error.ELOOP:
+                    self._statcache = self.path.lstat()
+                return self._statcache
+
+        def dir(self):
+            return stat.S_ISDIR(self._stat().mode)
+
+        def file(self):
+            return stat.S_ISREG(self._stat().mode)
+
+        def exists(self):
+            return self._stat()
+
+        def link(self):
+            st = self.path.lstat()
+            return stat.S_ISLNK(st.mode)
+
+    def __new__(cls, path=None):
+        """ Initialize and return a local Path instance.
+
+        Path can be relative to the current directory.
+        If it is None then the current working directory is taken.
+        Note that Path instances always carry an absolute path.
+        Note also that passing in a local path object will simply return
+        the exact same path object. Use new() to get a new copy.
+        """
+        if isinstance(path, common.PathBase):
+            if path.__class__ == cls:
+                return path
+            path = path.strpath
+        # initialize the path
+        self = object.__new__(cls)
+        if not path:
+            self.strpath = os.getcwd()
+        elif isinstance(path, py.builtin._basestring):
+            self.strpath = os.path.abspath(os.path.normpath(str(path)))
+        else:
+            raise ValueError("can only pass None, Path instances "
+                             "or non-empty strings to LocalPath")
+        assert isinstance(self.strpath, str)
+        return self
+
+    def __hash__(self):
+        return hash(self.strpath)
+
+    def __eq__(self, other):
+        s1 = str(self)
+        s2 = str(other)
+        if iswin32: 
+            s1 = s1.lower()
+            s2 = s2.lower()
+        return s1 == s2
+
+    def __ne__(self, other):
+        return not (self == other)
+
+    def __lt__(self, other):
+        return str(self) < str(other)
+
+    def remove(self, rec=1):
+        """ remove a file or directory (or a directory tree if rec=1).  """
+        if self.check(dir=1, link=0):
+            if rec:
+                # force remove of readonly files on windows 
+                if iswin32: 
+                    self.chmod(448, rec=1) # octcal 0700
+                py.error.checked_call(py.std.shutil.rmtree, self.strpath)
+            else:
+                py.error.checked_call(os.rmdir, self.strpath)
+        else:
+            if iswin32: 
+                self.chmod(448) # octcal 0700
+            py.error.checked_call(os.remove, self.strpath)
+
+    def computehash(self, hashtype="md5", chunksize=524288):
+        """ return hexdigest of hashvalue for this file. """
+        try:
+            try:
+                import hashlib as mod
+            except ImportError:
+                if hashtype == "sha1":
+                    hashtype = "sha"
+                mod = __import__(hashtype)
+            hash = getattr(mod, hashtype)()
+        except (AttributeError, ImportError):
+            raise ValueError("Don't know how to compute %r hash" %(hashtype,))
+        f = self.open('rb')
+        try:
+            while 1:
+                buf = f.read(chunksize)
+                if not buf:
+                    return hash.hexdigest()
+                hash.update(buf) 
+        finally:
+            f.close()
+
+    def new(self, **kw):
+        """ create a modified version of this path.
+            the following keyword arguments modify various path parts:
+
+              a:/some/path/to/a/file.ext
+              ||                            drive
+                |-------------|             dirname
+                                |------|    basename
+                                |--|        purebasename
+                                    |--|    ext
+        """
+        obj = object.__new__(self.__class__)
+        drive, dirname, basename, purebasename,ext = self._getbyspec(
+             "drive,dirname,basename,purebasename,ext")
+        if 'basename' in kw:
+            if 'purebasename' in kw or 'ext' in kw:
+                raise ValueError("invalid specification %r" % kw)
+        else:
+            pb = kw.setdefault('purebasename', purebasename)
+            try:
+                ext = kw['ext']
+            except KeyError:
+                pass
+            else:
+                if ext and not ext.startswith('.'):
+                    ext = '.' + ext
+            kw['basename'] = pb + ext
+
+        kw.setdefault('drive', drive)
+        kw.setdefault('dirname', dirname)
+        kw.setdefault('sep', self.sep)
+        obj.strpath = os.path.normpath(
+            "%(drive)s%(dirname)s%(sep)s%(basename)s" % kw)
+        return obj
+    
+    def _getbyspec(self, spec):
+        """ return a sequence of specified path parts.  'spec' is
+            a comma separated string containing path part names.
+            according to the following convention:
+            a:/some/path/to/a/file.ext
+            ||                            drive
+              |-------------|             dirname
+                              |------|    basename
+                              |--|        purebasename
+                                  |--|    ext
+        """
+        res = []
+        parts = self.strpath.split(self.sep)
+
+        args = filter(None, spec.split(',') )
+        append = res.append
+        for name in args:
+            if name == 'drive':
+                append(parts[0])
+            elif name == 'dirname':
+                append(self.sep.join(['']+parts[1:-1]))
+            else:
+                basename = parts[-1]
+                if name == 'basename':
+                    append(basename)
+                else:
+                    i = basename.rfind('.')
+                    if i == -1:
+                        purebasename, ext = basename, ''
+                    else:
+                        purebasename, ext = basename[:i], basename[i:]
+                    if name == 'purebasename':
+                        append(purebasename)
+                    elif name == 'ext':
+                        append(ext)
+                    else:
+                        raise ValueError("invalid part specification %r" % name)
+        return res
+
+    def join(self, *args, **kwargs):
+        """ return a new path by appending all 'args' as path
+        components.  if abs=1 is used restart from root if any
+        of the args is an absolute path.
+        """
+        if not args:
+            return self
+        strpath = self.strpath
+        sep = self.sep
+        strargs = [str(x) for x in args]
+        if kwargs.get('abs', 0):
+            for i in range(len(strargs)-1, -1, -1):
+                if os.path.isabs(strargs[i]):
+                    strpath = strargs[i]
+                    strargs = strargs[i+1:]
+                    break
+        for arg in strargs:
+            arg = arg.strip(sep)
+            if iswin32:
+                # allow unix style paths even on windows.
+                arg = arg.strip('/')
+                arg = arg.replace('/', sep)
+            if arg:
+                if not strpath.endswith(sep):
+                    strpath += sep
+                strpath += arg
+        obj = self.new()
+        obj.strpath = os.path.normpath(strpath)
+        return obj
+
+    def open(self, mode='r'):
+        """ return an opened file with the given mode. """
+        return py.error.checked_call(open, self.strpath, mode)
+
+    def listdir(self, fil=None, sort=None):
+        """ list directory contents, possibly filter by the given fil func
+            and possibly sorted.
+        """
+        if isinstance(fil, str):
+            fil = common.FNMatcher(fil)
+        res = []
+        for name in py.error.checked_call(os.listdir, self.strpath):
+            childurl = self.join(name)
+            if fil is None or fil(childurl):
+                res.append(childurl)
+        self._sortlist(res, sort)
+        return res 
+
+    def size(self):
+        """ return size of the underlying file object """
+        return self.stat().size
+
+    def mtime(self):
+        """ return last modification time of the path. """
+        return self.stat().mtime
+
+    def copy(self, target, archive=False):
+        """ copy path to target."""
+        assert not archive, "XXX archive-mode not supported"
+        if self.check(file=1):
+            if target.check(dir=1):
+                target = target.join(self.basename)
+            assert self!=target
+            copychunked(self, target)
+        else:
+            def rec(p):
+                return p.check(link=0)
+            for x in self.visit(rec=rec):
+                relpath = x.relto(self)
+                newx = target.join(relpath)
+                newx.dirpath().ensure(dir=1)
+                if x.check(link=1):
+                    newx.mksymlinkto(x.readlink())
+                elif x.check(file=1):
+                    copychunked(x, newx)
+                elif x.check(dir=1):
+                    newx.ensure(dir=1)
+
+    def rename(self, target):
+        """ rename this path to target. """
+        return py.error.checked_call(os.rename, str(self), str(target))
+
+    def dump(self, obj, bin=1):
+        """ pickle object into path location"""
+        f = self.open('wb')
+        try:
+            py.error.checked_call(py.std.pickle.dump, obj, f, bin)
+        finally:
+            f.close()
+
+    def mkdir(self, *args):
+        """ create & return the directory joined with args. """
+        p = self.join(*args)
+        py.error.checked_call(os.mkdir, str(p))
+        return p
+
+    def write(self, data, mode='w'):
+        """ write data into path. """
+        if 'b' in mode:
+            if not py.builtin._isbytes(data):
+                raise ValueError("can only process bytes")
+        else:
+            if not py.builtin._istext(data):
+                if not py.builtin._isbytes(data):
+                    data = str(data)
+                else:
+                    try:
+                        data = py.builtin._totext(data, sys.getdefaultencoding())
+                    except UnicodeDecodeError:
+                        pass
+        f = self.open(mode)
+        try:
+            f.write(data)
+        finally:
+            f.close()
+
+    def _ensuredirs(self):
+        parent = self.dirpath()
+        if parent == self:
+            return self
+        if parent.check(dir=0):
+            parent._ensuredirs()
+        if self.check(dir=0):
+            try:
+                self.mkdir()
+            except py.error.EEXIST:
+                # race condition: file/dir created by another thread/process.
+                # complain if it is not a dir
+                if self.check(dir=0):
+                    raise
+        return self
+
+    def ensure(self, *args, **kwargs):
+        """ ensure that an args-joined path exists (by default as
+            a file). if you specify a keyword argument 'dir=True'
+            then the path is forced to be a directory path.
+        """
+        p = self.join(*args)
+        if kwargs.get('dir', 0):
+            return p._ensuredirs()
+        else:
+            p.dirpath()._ensuredirs()
+            if not p.check(file=1):
+                p.open('w').close()
+            return p
+
+    def stat(self):
+        """ Return an os.stat() tuple. """
+        return Stat(self, py.error.checked_call(os.stat, self.strpath))
+
+    def lstat(self):
+        """ Return an os.lstat() tuple. """
+        return Stat(self, py.error.checked_call(os.lstat, self.strpath))
+
+    def setmtime(self, mtime=None):
+        """ set modification time for the given path.  if 'mtime' is None
+        (the default) then the file's mtime is set to current time.
+
+        Note that the resolution for 'mtime' is platform dependent.
+        """
+        if mtime is None:
+            return py.error.checked_call(os.utime, self.strpath, mtime)
+        try:
+            return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
+        except py.error.EINVAL:
+            return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
+
+    def chdir(self):
+        """ change directory to self and return old current directory """
+        old = self.__class__()
+        py.error.checked_call(os.chdir, self.strpath)
+        return old
+
+    def realpath(self):
+        """ return a new path which contains no symbolic links."""
+        return self.__class__(os.path.realpath(self.strpath))
+
+    def atime(self):
+        """ return last access time of the path. """
+        return self.stat().atime
+
+    def __repr__(self):
+        return 'local(%r)' % self.strpath
+
+    def __str__(self):
+        """ return string representation of the Path. """
+        return self.strpath
+
+    def pypkgpath(self, pkgname=None):
+        """ return the path's package path by looking for the given
+            pkgname.  If pkgname is None then look for the last
+            directory upwards which still contains an __init__.py.
+            Return None if a pkgpath can not be determined.
+        """
+        pkgpath = None
+        for parent in self.parts(reverse=True):
+            if pkgname is None:
+                if parent.check(file=1):
+                    continue
+                if parent.join('__init__.py').check():
+                    pkgpath = parent
+                    continue
+                return pkgpath
+            else:
+                if parent.basename == pkgname:
+                    return parent
+        return pkgpath
+
+    def _prependsyspath(self, path):
+        s = str(path)
+        if s != sys.path[0]:
+            #print "prepending to sys.path", s
+            sys.path.insert(0, s)
+
+    def chmod(self, mode, rec=0):
+        """ change permissions to the given mode. If mode is an
+            integer it directly encodes the os-specific modes.
+            if rec is True perform recursively.
+        """
+        if not isinstance(mode, int):
+            raise TypeError("mode %r must be an integer" % (mode,))
+        if rec:
+            for x in self.visit(rec=rec):
+                py.error.checked_call(os.chmod, str(x), mode)
+        py.error.checked_call(os.chmod, str(self), mode)
+
+    def pyimport(self, modname=None, ensuresyspath=True):
+        """ return path as an imported python module.
+            if modname is None, look for the containing package
+            and construct an according module name.
+            The module will be put/looked up in sys.modules.
+        """
+        if not self.check():
+            raise py.error.ENOENT(self)
+        #print "trying to import", self
+        pkgpath = None
+        if modname is None:
+            pkgpath = self.pypkgpath()
+            if pkgpath is not None:
+                if ensuresyspath:
+                    self._prependsyspath(pkgpath.dirpath())
+                pkg = __import__(pkgpath.basename, None, None, [])
+                names = self.new(ext='').relto(pkgpath.dirpath())
+                names = names.split(self.sep)
+                modname = ".".join(names)
+            else:
+                # no package scope, still make it possible
+                if ensuresyspath:
+                    self._prependsyspath(self.dirpath())
+                modname = self.purebasename
+            mod = __import__(modname, None, None, ['__doc__'])
+            modfile = mod.__file__
+            if modfile[-4:] in ('.pyc', '.pyo'):
+                modfile = modfile[:-1]
+            elif modfile.endswith('$py.class'):
+                modfile = modfile[:-9] + '.py'
+            if not self.samefile(modfile):
+                raise EnvironmentError("mismatch:\n"
+                "imported module %r\n"
+                "does not stem from %r\n" 
+                "maybe __init__.py files are missing?" % (mod, str(self)))
+            return mod
+        else:
+            try:
+                return sys.modules[modname]
+            except KeyError:
+                # we have a custom modname, do a pseudo-import
+                mod = py.std.types.ModuleType(modname)
+                mod.__file__ = str(self)
+                sys.modules[modname] = mod
+                try:
+                    py.builtin.execfile(str(self), mod.__dict__)
+                except:
+                    del sys.modules[modname]
+                    raise
+                return mod
+
+    def sysexec(self, *argv, **popen_opts):
+        """ return stdout text from executing a system child process,
+            where the 'self' path points to executable. 
+            The process is directly invoked and not through a system shell. 
+        """
+        from subprocess import Popen, PIPE
+        argv = map(str, argv)
+        popen_opts['stdout'] = popen_opts['stderr'] = PIPE
+        proc = Popen([str(self)] + list(argv), **popen_opts)
+        stdout, stderr = proc.communicate()
+        ret = proc.wait()
+        if py.builtin._isbytes(stdout):
+            stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+        if ret != 0:
+            if py.builtin._isbytes(stderr):
+                stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
+            raise py.process.cmdexec.Error(ret, ret, str(self),
+                                           stdout, stderr,)
+        return stdout
+
+    def sysfind(cls, name, checker=None):
+        """ return a path object found by looking at the systems
+            underlying PATH specification. If the checker is not None
+            it will be invoked to filter matching paths.  If a binary
+            cannot be found, None is returned
+            Note: This is probably not working on plain win32 systems
+            but may work on cygwin.
+        """
+        if os.path.isabs(name):
+            p = py.path.local(name)
+            if p.check(file=1):
+                return p
+        else:
+            if iswin32:
+                paths = py.std.os.environ['Path'].split(';')
+                if '' not in paths and '.' not in paths:
+                    paths.append('.')
+                try:
+                    systemroot = os.environ['SYSTEMROOT']
+                except KeyError:
+                    pass
+                else:
+                    paths = [re.sub('%SystemRoot%', systemroot, path)
+                             for path in paths]
+                tryadd = '', '.exe', '.com', '.bat' # XXX add more?
+            else:
+                paths = py.std.os.environ['PATH'].split(':')
+                tryadd = ('',)
+
+            for x in paths:
+                for addext in tryadd:
+                    p = py.path.local(x).join(name, abs=True) + addext
+                    try:
+                        if p.check(file=1):
+                            if checker:
+                                if not checker(p):
+                                    continue
+                            return p
+                    except py.error.EACCES:
+                        pass
+        return None
+    sysfind = classmethod(sysfind)
+
+    def _gethomedir(cls):
+        try:
+            x = os.environ['HOME']
+        except KeyError:
+            x = os.environ['HOMEPATH']
+        return cls(x)
+    _gethomedir = classmethod(_gethomedir)
+
+    #"""
+    #special class constructors for local filesystem paths
+    #"""
+    def get_temproot(cls):
+        """ return the system's temporary directory
+            (where tempfiles are usually created in)
+        """
+        return py.path.local(py.std.tempfile.gettempdir())
+    get_temproot = classmethod(get_temproot)
+
+    def mkdtemp(cls):
+        """ return a Path object pointing to a fresh new temporary directory
+            (which we created ourself).
+        """
+        import tempfile
+        tries = 10
+        for i in range(tries):
+            dname = tempfile.mktemp()
+            dpath = cls(tempfile.mktemp())
+            try:
+                dpath.mkdir()
+            except (py.error.EEXIST, py.error.EPERM, py.error.EACCES):
+                continue
+            return dpath
+        raise py.error.ENOENT(dpath, "could not create tempdir, %d tries" % tries)
+    mkdtemp = classmethod(mkdtemp)
+
+    def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
+                          lock_timeout = 172800):   # two days
+        """ return unique directory with a number greater than the current
+            maximum one.  The number is assumed to start directly after prefix.
+            if keep is true directories with a number less than (maxnum-keep)
+            will be removed.
+        """
+        if rootdir is None:
+            rootdir = cls.get_temproot()
+
+        def parse_num(path):
+            """ parse the number out of a path (if it matches the prefix) """
+            bn = path.basename
+            if bn.startswith(prefix):
+                try:
+                    return int(bn[len(prefix):])
+                except ValueError:
+                    pass
+
+        # compute the maximum number currently in use with the
+        # prefix
+        lastmax = None
+        while True:
+            maxnum = -1
+            for path in rootdir.listdir():
+                num = parse_num(path)
+                if num is not None:
+                    maxnum = max(maxnum, num)
+
+            # make the new directory
+            try:
+                udir = rootdir.mkdir(prefix + str(maxnum+1))
+            except py.error.EEXIST:
+                # race condition: another thread/process created the dir
+                # in the meantime.  Try counting again
+                if lastmax == maxnum:
+                    raise
+                lastmax = maxnum
+                continue
+            break
+
+        # put a .lock file in the new directory that will be removed at
+        # process exit
+        if lock_timeout:
+            lockfile = udir.join('.lock')
+            mypid = os.getpid()
+            if hasattr(lockfile, 'mksymlinkto'):
+                lockfile.mksymlinkto(str(mypid))
+            else:
+                lockfile.write(str(mypid))
+            def try_remove_lockfile():
+                # in a fork() situation, only the last process should
+                # remove the .lock, otherwise the other processes run the
+                # risk of seeing their temporary dir disappear.  For now
+                # we remove the .lock in the parent only (i.e. we assume
+                # that the children finish before the parent).
+                if os.getpid() != mypid:
+                    return
+                try:
+                    lockfile.remove()
+                except py.error.Error:
+                    pass
+            atexit.register(try_remove_lockfile)
+
+        # prune old directories
+        if keep:
+            for path in rootdir.listdir():
+                num = parse_num(path)
+                if num is not None and num <= (maxnum - keep):
+                    lf = path.join('.lock')
+                    try:
+                        t1 = lf.lstat().mtime
+                        t2 = lockfile.lstat().mtime
+                        if not lock_timeout or abs(t2-t1) < lock_timeout:
+                            continue   # skip directories still locked
+                    except py.error.Error:
+                        pass   # assume that it means that there is no 'lf'
+                    try:
+                        path.remove(rec=1)
+                    except KeyboardInterrupt:
+                        raise
+                    except: # this might be py.error.Error, WindowsError ...
+                        pass
+        
+        # make link...
+        try:
+            username = os.environ['USER']           #linux, et al
+        except KeyError:
+            try:
+                username = os.environ['USERNAME']   #windows
+            except KeyError:
+                username = 'current'
+
+        src  = str(udir)
+        dest = src[:src.rfind('-')] + '-' + username
+        try:
+            os.unlink(dest)
+        except OSError:
+            pass
+        try:
+            os.symlink(src, dest)
+        except (OSError, AttributeError): # AttributeError on win32
+            pass
+
+        return udir
+    make_numbered_dir = classmethod(make_numbered_dir)
+
+def copychunked(src, dest):
+    chunksize = 524288 # half a meg of bytes
+    fsrc = src.open('rb')
+    try:
+        fdest = dest.open('wb')
+        try:
+            while 1:
+                buf = fsrc.read(chunksize)
+                if not buf:
+                    break
+                fdest.write(buf)
+        finally:
+            fdest.close()
+    finally:
+        fsrc.close()
+
+def autopath(globs=None):
+    """ (deprecated) return the (local) path of the "current" file pointed to by globals or - if it is none - alternatively the callers frame globals.
+
+        the path will always point to a .py file  or to None.
+        the path will have the following payload:
+        pkgdir   is the last parent directory path containing __init__.py 
+    """
+    py.log._apiwarn("1.1", "py.magic.autopath deprecated, "
+        "use py.path.local(__file__) and maybe pypkgpath/pyimport().")
+    if globs is None:
+        globs = sys._getframe(1).f_globals
+    try:
+        __file__ = globs['__file__']
+    except KeyError:
+        if not sys.argv[0]:
+            raise ValueError("cannot compute autopath in interactive mode")
+        __file__ = os.path.abspath(sys.argv[0])
+
+    ret = py.path.local(__file__)
+    if ret.ext in ('.pyc', '.pyo'):
+        ret = ret.new(ext='.py')
+    current = pkgdir = ret.dirpath()
+    while 1:
+        if current.join('__init__.py').check():
+            pkgdir = current
+            current = current.dirpath()
+            if pkgdir != current:
+                continue
+        elif str(current) not in sys.path:
+            sys.path.insert(0, str(current))
+        break
+    ret.pkgdir = pkgdir
+    return ret
+

Added: pypy/trunk/py/impl/path/svnurl.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/svnurl.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,365 @@
+"""
+module defining a subversion path object based on the external
+command 'svn'. This modules aims to work with svn 1.3 and higher
+but might also interact well with earlier versions. 
+"""
+
+import os, sys, time, re
+import py
+from py import path, process
+from py.impl.path import common
+from py.impl.path import svnwc as svncommon
+from py.impl.path.cacheutil import BuildcostAccessCache, AgingCache
+
+DEBUG=False 
+
+class SvnCommandPath(svncommon.SvnPathBase):
+    """ path implementation that offers access to (possibly remote) subversion
+    repositories. """
+
+    _lsrevcache = BuildcostAccessCache(maxentries=128)
+    _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0)
+
+    def __new__(cls, path, rev=None, auth=None):
+        self = object.__new__(cls)
+        if isinstance(path, cls): 
+            rev = path.rev 
+            auth = path.auth
+            path = path.strpath 
+        svncommon.checkbadchars(path)
+        path = path.rstrip('/')
+        self.strpath = path
+        self.rev = rev
+        self.auth = auth
+        return self
+
+    def __repr__(self):
+        if self.rev == -1:
+            return 'svnurl(%r)' % self.strpath
+        else:
+            return 'svnurl(%r, %r)' % (self.strpath, self.rev)
+
+    def _svnwithrev(self, cmd, *args):
+        """ execute an svn command, append our own url and revision """
+        if self.rev is None:
+            return self._svnwrite(cmd, *args)
+        else:
+            args = ['-r', self.rev] + list(args)
+            return self._svnwrite(cmd, *args)
+
+    def _svnwrite(self, cmd, *args):
+        """ execute an svn command, append our own url """
+        l = ['svn %s' % cmd]
+        args = ['"%s"' % self._escape(item) for item in args]
+        l.extend(args)
+        l.append('"%s"' % self._encodedurl())
+        # fixing the locale because we can't otherwise parse
+        string = " ".join(l)
+        if DEBUG:
+            print("execing %s" % string)
+        out = self._svncmdexecauth(string)
+        return out
+
+    def _svncmdexecauth(self, cmd):
+        """ execute an svn command 'as is' """
+        cmd = svncommon.fixlocale() + cmd
+        if self.auth is not None:
+            cmd += ' ' + self.auth.makecmdoptions()
+        return self._cmdexec(cmd)
+
+    def _cmdexec(self, cmd):
+        try:
+            out = process.cmdexec(cmd)
+        except py.process.cmdexec.Error:
+            e = sys.exc_info()[1]
+            if (e.err.find('File Exists') != -1 or
+                            e.err.find('File already exists') != -1):
+                raise py.error.EEXIST(self)
+            raise
+        return out
+
+    def _svnpopenauth(self, cmd):
+        """ execute an svn command, return a pipe for reading stdin """
+        cmd = svncommon.fixlocale() + cmd
+        if self.auth is not None:
+            cmd += ' ' + self.auth.makecmdoptions()
+        return self._popen(cmd)
+
+    def _popen(self, cmd):
+        return os.popen(cmd)
+
+    def _encodedurl(self):
+        return self._escape(self.strpath)
+
+    def _norev_delentry(self, path):
+        auth = self.auth and self.auth.makecmdoptions() or None
+        self._lsnorevcache.delentry((str(path), auth))
+
+    def open(self, mode='r'):
+        """ return an opened file with the given mode. """
+        if mode not in ("r", "rU",): 
+            raise ValueError("mode %r not supported" % (mode,))
+        assert self.check(file=1) # svn cat returns an empty file otherwise
+        if self.rev is None:
+            return self._svnpopenauth('svn cat "%s"' % (
+                                      self._escape(self.strpath), ))
+        else:
+            return self._svnpopenauth('svn cat -r %s "%s"' % (
+                                      self.rev, self._escape(self.strpath)))
+
+    def dirpath(self, *args, **kwargs):
+        """ return the directory path of the current path joined
+            with any given path arguments.
+        """
+        l = self.strpath.split(self.sep) 
+        if len(l) < 4: 
+            raise py.error.EINVAL(self, "base is not valid") 
+        elif len(l) == 4: 
+            return self.join(*args, **kwargs) 
+        else: 
+            return self.new(basename='').join(*args, **kwargs)
+
+    # modifying methods (cache must be invalidated)
+    def mkdir(self, *args, **kwargs):
+        """ create & return the directory joined with args. 
+        pass a 'msg' keyword argument to set the commit message.
+        """
+        commit_msg = kwargs.get('msg', "mkdir by py lib invocation")
+        createpath = self.join(*args)
+        createpath._svnwrite('mkdir', '-m', commit_msg)
+        self._norev_delentry(createpath.dirpath())
+        return createpath
+
+    def copy(self, target, msg='copied by py lib invocation'):
+        """ copy path to target with checkin message msg."""
+        if getattr(target, 'rev', None) is not None:
+            raise py.error.EINVAL(target, "revisions are immutable")
+        self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg,
+                             self._escape(self), self._escape(target)))
+        self._norev_delentry(target.dirpath())
+
+    def rename(self, target, msg="renamed by py lib invocation"):
+        """ rename this path to target with checkin message msg. """
+        if getattr(self, 'rev', None) is not None:
+            raise py.error.EINVAL(self, "revisions are immutable")
+        self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %(
+                             msg, self._escape(self), self._escape(target)))
+        self._norev_delentry(self.dirpath())
+        self._norev_delentry(self)
+
+    def remove(self, rec=1, msg='removed by py lib invocation'):
+        """ remove a file or directory (or a directory tree if rec=1) with
+checkin message msg."""
+        if self.rev is not None:
+            raise py.error.EINVAL(self, "revisions are immutable")
+        self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self)))
+        self._norev_delentry(self.dirpath())
+
+    def export(self, topath):
+        """ export to a local path
+
+            topath should not exist prior to calling this, returns a
+            py.path.local instance
+        """
+        topath = py.path.local(topath)
+        args = ['"%s"' % (self._escape(self),),
+                '"%s"' % (self._escape(topath),)]
+        if self.rev is not None:
+            args = ['-r', str(self.rev)] + args
+        self._svncmdexecauth('svn export %s' % (' '.join(args),))
+        return topath
+
+    def ensure(self, *args, **kwargs):
+        """ ensure that an args-joined path exists (by default as
+            a file). If you specify a keyword argument 'dir=True'
+            then the path is forced to be a directory path.
+        """
+        if getattr(self, 'rev', None) is not None:
+            raise py.error.EINVAL(self, "revisions are immutable")
+        target = self.join(*args)
+        dir = kwargs.get('dir', 0) 
+        for x in target.parts(reverse=True): 
+            if x.check(): 
+                break 
+        else: 
+            raise py.error.ENOENT(target, "has not any valid base!") 
+        if x == target: 
+            if not x.check(dir=dir): 
+                raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x) 
+            return x 
+        tocreate = target.relto(x) 
+        basename = tocreate.split(self.sep, 1)[0]
+        tempdir = py.path.local.mkdtemp()
+        try:    
+            tempdir.ensure(tocreate, dir=dir) 
+            cmd = 'svn import -m "%s" "%s" "%s"' % (
+                    "ensure %s" % self._escape(tocreate), 
+                    self._escape(tempdir.join(basename)), 
+                    x.join(basename)._encodedurl())
+            self._svncmdexecauth(cmd) 
+            self._norev_delentry(x)
+        finally:    
+            tempdir.remove() 
+        return target
+
+    # end of modifying methods
+    def _propget(self, name):
+        res = self._svnwithrev('propget', name)
+        return res[:-1] # strip trailing newline
+
+    def _proplist(self):
+        res = self._svnwithrev('proplist')
+        lines = res.split('\n')
+        lines = [x.strip() for x in lines[1:]]
+        return svncommon.PropListDict(self, lines)
+
+    def _listdir_nameinfo(self):
+        """ return sequence of name-info directory entries of self """
+        def builder():
+            try:
+                res = self._svnwithrev('ls', '-v')
+            except process.cmdexec.Error:
+                e = sys.exc_info()[1]
+                if e.err.find('non-existent in that revision') != -1:
+                    raise py.error.ENOENT(self, e.err)
+                elif e.err.find('File not found') != -1:
+                    raise py.error.ENOENT(self, e.err)
+                elif e.err.find('not part of a repository')!=-1:
+                    raise py.error.ENOENT(self, e.err)
+                elif e.err.find('Unable to open')!=-1:
+                    raise py.error.ENOENT(self, e.err)
+                elif e.err.lower().find('method not allowed')!=-1:
+                    raise py.error.EACCES(self, e.err)
+                raise py.error.Error(e.err)
+            lines = res.split('\n')
+            nameinfo_seq = []
+            for lsline in lines:
+                if lsline:
+                    info = InfoSvnCommand(lsline)
+                    if info._name != '.':  # svn 1.5 produces '.' dirs, 
+                        nameinfo_seq.append((info._name, info))
+            nameinfo_seq.sort()
+            return nameinfo_seq
+        auth = self.auth and self.auth.makecmdoptions() or None
+        if self.rev is not None:
+            return self._lsrevcache.getorbuild((self.strpath, self.rev, auth),
+                                               builder)
+        else:
+            return self._lsnorevcache.getorbuild((self.strpath, auth),
+                                                 builder)
+
+    def listdir(self, fil=None, sort=None):
+        """ list directory contents, possibly filter by the given fil func
+            and possibly sorted.
+        """
+        if isinstance(fil, str):
+            fil = common.FNMatcher(fil)
+        nameinfo_seq = self._listdir_nameinfo()
+        if len(nameinfo_seq) == 1:
+            name, info = nameinfo_seq[0]
+            if name == self.basename and info.kind == 'file':
+                #if not self.check(dir=1):
+                raise py.error.ENOTDIR(self)
+        paths = [self.join(name) for (name, info) in nameinfo_seq]
+        if fil:
+            paths = [x for x in paths if fil(x)]
+        self._sortlist(paths, sort)
+        return paths
+
+
+    def log(self, rev_start=None, rev_end=1, verbose=False):
+        """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+        assert self.check() #make it simpler for the pipe
+        rev_start = rev_start is None and "HEAD" or rev_start
+        rev_end = rev_end is None and "HEAD" or rev_end
+
+        if rev_start == "HEAD" and rev_end == 1:
+            rev_opt = ""
+        else:
+            rev_opt = "-r %s:%s" % (rev_start, rev_end)
+        verbose_opt = verbose and "-v" or ""
+        xmlpipe =  self._svnpopenauth('svn log --xml %s %s "%s"' %
+                                      (rev_opt, verbose_opt, self.strpath))
+        from xml.dom import minidom
+        tree = minidom.parse(xmlpipe)
+        result = []
+        for logentry in filter(None, tree.firstChild.childNodes):
+            if logentry.nodeType == logentry.ELEMENT_NODE:
+                result.append(svncommon.LogEntry(logentry))
+        return result
+
+#01234567890123456789012345678901234567890123467
+#   2256      hpk        165 Nov 24 17:55 __init__.py
+# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!!
+#   1312 johnny           1627 May 05 14:32 test_decorators.py
+#
+class InfoSvnCommand:
+    # the '0?' part in the middle is an indication of whether the resource is
+    # locked, see 'svn help ls'
+    lspattern = re.compile(
+        r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? '
+            '*(?P<date>\w+ +\d{2} +[\d:]+) +(?P<file>.*)$')
+    def __init__(self, line):
+        # this is a typical line from 'svn ls http://...'
+        #_    1127      jum        0 Jul 13 15:28 branch/
+        match = self.lspattern.match(line)
+        data = match.groupdict()
+        self._name = data['file']
+        if self._name[-1] == '/':
+            self._name = self._name[:-1]
+            self.kind = 'dir'
+        else:
+            self.kind = 'file'
+        #self.has_props = l.pop(0) == 'P'
+        self.created_rev = int(data['rev'])
+        self.last_author = data['author']
+        self.size = data['size'] and int(data['size']) or 0
+        self.mtime = parse_time_with_missing_year(data['date'])
+        self.time = self.mtime * 1000000
+
+    def __eq__(self, other):
+        return self.__dict__ == other.__dict__
+
+
+#____________________________________________________
+#
+# helper functions
+#____________________________________________________
+def parse_time_with_missing_year(timestr):
+    """ analyze the time part from a single line of "svn ls -v"
+    the svn output doesn't show the year makes the 'timestr'
+    ambigous.
+    """
+    import calendar
+    t_now = time.gmtime()
+
+    tparts = timestr.split()
+    month = time.strptime(tparts.pop(0), '%b')[1]
+    day = time.strptime(tparts.pop(0), '%d')[2]
+    last = tparts.pop(0) # year or hour:minute
+    try:
+        year = time.strptime(last, '%Y')[0]
+        hour = minute = 0
+    except ValueError:
+        hour, minute = time.strptime(last, '%H:%M')[3:5]
+        year = t_now[0]
+
+        t_result = (year, month, day, hour, minute, 0,0,0,0)
+        if t_result > t_now:
+            year -= 1
+    t_result = (year, month, day, hour, minute, 0,0,0,0)
+    return calendar.timegm(t_result)
+
+class PathEntry:
+    def __init__(self, ppart):
+        self.strpath = ppart.firstChild.nodeValue.encode('UTF-8')
+        self.action = ppart.getAttribute('action').encode('UTF-8')
+        if self.action == 'A':
+            self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8')
+            if self.copyfrom_path:
+                self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev'))
+

Added: pypy/trunk/py/impl/path/svnwc.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/path/svnwc.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,1236 @@
+"""
+svn-Command based Implementation of a Subversion WorkingCopy Path.
+
+  SvnWCCommandPath  is the main class.
+
+"""
+
+import os, sys, time, re, calendar
+import py
+import subprocess
+from py.impl.path import common
+
+#-----------------------------------------------------------
+# Caching latest repository revision and repo-paths
+# (getting them is slow with the current implementations)
+#
+# XXX make mt-safe
+#-----------------------------------------------------------
+
+class cache:
+    proplist = {}
+    info = {}
+    entries = {}
+    prop = {}
+
+class RepoEntry:
+    def __init__(self, url, rev, timestamp):
+        self.url = url
+        self.rev = rev
+        self.timestamp = timestamp
+
+    def __str__(self):
+        return "repo: %s;%s  %s" %(self.url, self.rev, self.timestamp)
+
+class RepoCache:
+    """ The Repocache manages discovered repository paths
+    and their revisions.  If inside a timeout the cache
+    will even return the revision of the root.
+    """
+    timeout = 20 # seconds after which we forget that we know the last revision
+
+    def __init__(self):
+        self.repos = []
+
+    def clear(self):
+        self.repos = []
+
+    def put(self, url, rev, timestamp=None):
+        if rev is None:
+            return
+        if timestamp is None:
+            timestamp = time.time()
+
+        for entry in self.repos:
+            if url == entry.url:
+                entry.timestamp = timestamp
+                entry.rev = rev
+                #print "set repo", entry
+                break
+        else:
+            entry = RepoEntry(url, rev, timestamp)
+            self.repos.append(entry)
+            #print "appended repo", entry
+
+    def get(self, url):
+        now = time.time()
+        for entry in self.repos:
+            if url.startswith(entry.url):
+                if now < entry.timestamp + self.timeout:
+                    #print "returning immediate Etrny", entry
+                    return entry.url, entry.rev
+                return entry.url, -1
+        return url, -1
+
+repositories = RepoCache()
+
+
+# svn support code 
+
+ALLOWED_CHARS = "_ -/\\=$.~+" #add characters as necessary when tested
+if sys.platform == "win32":
+    ALLOWED_CHARS += ":"
+ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:'
+    
+def _getsvnversion(ver=[]):
+    try:
+        return ver[0]
+    except IndexError:
+        v = py.process.cmdexec("svn -q --version")
+        v.strip()
+        v = '.'.join(v.split('.')[:2])
+        ver.append(v)
+        return v
+
+def _escape_helper(text):
+    text = str(text)
+    if py.std.sys.platform != 'win32':
+        text = str(text).replace('$', '\\$')
+    return text
+
+def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS):
+    for c in str(text):
+        if c.isalnum():
+            continue
+        if c in allowed_chars:
+            continue
+        return True
+    return False
+
+def checkbadchars(url):
+    # (hpk) not quite sure about the exact purpose, guido w.? 
+    proto, uri = url.split("://", 1)
+    if proto != "file":
+        host, uripath = uri.split('/', 1)
+        # only check for bad chars in the non-protocol parts
+        if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \
+            or _check_for_bad_chars(uripath, ALLOWED_CHARS)):
+            raise ValueError("bad char in %r" % (url, ))
+            
+
+#_______________________________________________________________
+
+class SvnPathBase(common.PathBase):
+    """ Base implementation for SvnPath implementations. """
+    sep = '/'
+
+    def _geturl(self):
+        return self.strpath
+    url = property(_geturl, None, None, "url of this svn-path.")
+
+    def __str__(self):
+        """ return a string representation (including rev-number) """
+        return self.strpath
+
+    def __hash__(self):
+        return hash(self.strpath)
+
+    def new(self, **kw):
+        """ create a modified version of this path. A 'rev' argument
+            indicates a new revision.
+            the following keyword arguments modify various path parts:
+
+              http://host.com/repo/path/file.ext
+              |-----------------------|          dirname
+                                        |------| basename
+                                        |--|     purebasename
+                                            |--| ext
+        """
+        obj = object.__new__(self.__class__)
+        obj.rev = kw.get('rev', self.rev)
+        obj.auth = kw.get('auth', self.auth)
+        dirname, basename, purebasename, ext = self._getbyspec(
+             "dirname,basename,purebasename,ext")
+        if 'basename' in kw:
+            if 'purebasename' in kw or 'ext' in kw:
+                raise ValueError("invalid specification %r" % kw)
+        else:
+            pb = kw.setdefault('purebasename', purebasename)
+            ext = kw.setdefault('ext', ext)
+            if ext and not ext.startswith('.'):
+                ext = '.' + ext
+            kw['basename'] = pb + ext
+
+        kw.setdefault('dirname', dirname)
+        kw.setdefault('sep', self.sep)
+        if kw['basename']:
+            obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw
+        else:
+            obj.strpath = "%(dirname)s" % kw
+        return obj
+
+    def _getbyspec(self, spec):
+        """ get specified parts of the path.  'arg' is a string
+            with comma separated path parts. The parts are returned
+            in exactly the order of the specification.
+
+            you may specify the following parts:
+
+            http://host.com/repo/path/file.ext
+            |-----------------------|          dirname
+                                      |------| basename
+                                      |--|     purebasename
+                                          |--| ext
+        """
+        res = []
+        parts = self.strpath.split(self.sep)
+        for name in spec.split(','):
+            name = name.strip()
+            if name == 'dirname':
+                res.append(self.sep.join(parts[:-1]))
+            elif name == 'basename':
+                res.append(parts[-1])
+            else:
+                basename = parts[-1]
+                i = basename.rfind('.')
+                if i == -1:
+                    purebasename, ext = basename, ''
+                else:
+                    purebasename, ext = basename[:i], basename[i:]
+                if name == 'purebasename':
+                    res.append(purebasename)
+                elif name == 'ext':
+                    res.append(ext)
+                else:
+                    raise NameError("Don't know part %r" % name)
+        return res
+
+    def __eq__(self, other):
+        """ return true if path and rev attributes each match """
+        return (str(self) == str(other) and
+               (self.rev == other.rev or self.rev == other.rev))
+
+    def __ne__(self, other):
+        return not self == other
+
+    def join(self, *args):
+        """ return a new Path (with the same revision) which is composed
+            of the self Path followed by 'args' path components.
+        """
+        if not args:
+            return self
+
+        args = tuple([arg.strip(self.sep) for arg in args])
+        parts = (self.strpath, ) + args
+        newpath = self.__class__(self.sep.join(parts), self.rev, self.auth)
+        return newpath
+
+    def propget(self, name):
+        """ return the content of the given property. """
+        value = self._propget(name)
+        return value
+
+    def proplist(self):
+        """ list all property names. """
+        content = self._proplist()
+        return content
+
+    def info(self):
+        """ return an Info structure with svn-provided information. """
+        parent = self.dirpath()
+        nameinfo_seq = parent._listdir_nameinfo()
+        bn = self.basename
+        for name, info in nameinfo_seq:
+            if name == bn:
+                return info
+        raise py.error.ENOENT(self)
+
+    def size(self):
+        """ Return the size of the file content of the Path. """
+        return self.info().size
+
+    def mtime(self):
+        """ Return the last modification time of the file. """
+        return self.info().mtime
+
+    # shared help methods
+
+    def _escape(self, cmd):
+        return _escape_helper(cmd)
+
+
+    #def _childmaxrev(self):
+    #    """ return maximum revision number of childs (or self.rev if no childs) """
+    #    rev = self.rev
+    #    for name, info in self._listdir_nameinfo():
+    #        rev = max(rev, info.created_rev)
+    #    return rev
+
+    #def _getlatestrevision(self):
+    #    """ return latest repo-revision for this path. """
+    #    url = self.strpath
+    #    path = self.__class__(url, None)
+    #
+    #    # we need a long walk to find the root-repo and revision
+    #    while 1:
+    #        try:
+    #            rev = max(rev, path._childmaxrev())
+    #            previous = path
+    #            path = path.dirpath()
+    #        except (IOError, process.cmdexec.Error):
+    #            break
+    #    if rev is None:
+    #        raise IOError, "could not determine newest repo revision for %s" % self
+    #    return rev
+
+    class Checkers(common.Checkers):
+        def dir(self):
+            try:
+                return self.path.info().kind == 'dir'
+            except py.error.Error:
+                return self._listdirworks()
+
+        def _listdirworks(self):
+            try:
+                self.path.listdir()
+            except py.error.ENOENT:
+                return False
+            else:
+                return True
+
+        def file(self):
+            try:
+                return self.path.info().kind == 'file'
+            except py.error.ENOENT:
+                return False
+
+        def exists(self):
+            try:
+                return self.path.info()
+            except py.error.ENOENT:
+                return self._listdirworks()
+
+def parse_apr_time(timestr):
+    i = timestr.rfind('.')
+    if i == -1:
+        raise ValueError("could not parse %s" % timestr)
+    timestr = timestr[:i]
+    parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
+    return time.mktime(parsedtime)
+
+class PropListDict(dict):
+    """ a Dictionary which fetches values (InfoSvnCommand instances) lazily"""
+    def __init__(self, path, keynames):
+        dict.__init__(self, [(x, None) for x in keynames])
+        self.path = path
+
+    def __getitem__(self, key):
+        value = dict.__getitem__(self, key)
+        if value is None:
+            value = self.path.propget(key)
+            dict.__setitem__(self, key, value)
+        return value
+
+def fixlocale():
+    if sys.platform != 'win32':
+        return 'LC_ALL=C '
+    return ''
+
+# some nasty chunk of code to solve path and url conversion and quoting issues
+ILLEGAL_CHARS = '* | \ / : < > ? \t \n \x0b \x0c \r'.split(' ')
+if os.sep in ILLEGAL_CHARS:
+    ILLEGAL_CHARS.remove(os.sep)
+ISWINDOWS = sys.platform == 'win32'
+_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I)
+def _check_path(path):
+    illegal = ILLEGAL_CHARS[:]
+    sp = path.strpath
+    if ISWINDOWS:
+        illegal.remove(':')
+        if not _reg_allow_disk.match(sp):
+            raise ValueError('path may not contain a colon (:)')
+    for char in sp:
+        if char not in string.printable or char in illegal:
+            raise ValueError('illegal character %r in path' % (char,))
+
+def path_to_fspath(path, addat=True):
+    _check_path(path)
+    sp = path.strpath
+    if addat and path.rev != -1:
+        sp = '%s@%s' % (sp, path.rev)
+    elif addat:
+        sp = '%s at HEAD' % (sp,)
+    return sp
+    
+def url_from_path(path):
+    fspath = path_to_fspath(path, False)
+    quote = py.std.urllib.quote
+    if ISWINDOWS:
+        match = _reg_allow_disk.match(fspath)
+        fspath = fspath.replace('\\', '/')
+        if match.group(1):
+            fspath = '/%s%s' % (match.group(1).replace('\\', '/'),
+                                quote(fspath[len(match.group(1)):]))
+        else:
+            fspath = quote(fspath)
+    else:
+        fspath = quote(fspath)
+    if path.rev != -1:
+        fspath = '%s@%s' % (fspath, path.rev)
+    else:
+        fspath = '%s at HEAD' % (fspath,)
+    return 'file://%s' % (fspath,)
+
+class SvnAuth(object):
+    """ container for auth information for Subversion """
+    def __init__(self, username, password, cache_auth=True, interactive=True):
+        self.username = username
+        self.password = password
+        self.cache_auth = cache_auth
+        self.interactive = interactive
+
+    def makecmdoptions(self):
+        uname = self.username.replace('"', '\\"')
+        passwd = self.password.replace('"', '\\"')
+        ret = []
+        if uname:
+            ret.append('--username="%s"' % (uname,))
+        if passwd:
+            ret.append('--password="%s"' % (passwd,))
+        if not self.cache_auth:
+            ret.append('--no-auth-cache')
+        if not self.interactive:
+            ret.append('--non-interactive')
+        return ' '.join(ret)
+
+    def __str__(self):
+        return "<SvnAuth username=%s ...>" %(self.username,)
+
+rex_blame = re.compile(r'\s*(\d+)\s*(\S+) (.*)')
+
+class SvnWCCommandPath(common.PathBase):
+    """ path implementation offering access/modification to svn working copies.
+        It has methods similar to the functions in os.path and similar to the
+        commands of the svn client.
+    """
+    sep = os.sep
+
+    def __new__(cls, wcpath=None, auth=None):
+        self = object.__new__(cls)
+        if isinstance(wcpath, cls):
+            if wcpath.__class__ == cls:
+                return wcpath
+            wcpath = wcpath.localpath
+        if _check_for_bad_chars(str(wcpath),
+                                          ALLOWED_CHARS):
+            raise ValueError("bad char in wcpath %s" % (wcpath, ))
+        self.localpath = py.path.local(wcpath)
+        self.auth = auth
+        return self
+
+    strpath = property(lambda x: str(x.localpath), None, None, "string path")
+
+    def __eq__(self, other):
+        return self.localpath == getattr(other, 'localpath', None)
+
+    def _geturl(self):
+        if getattr(self, '_url', None) is None:
+            info = self.info()
+            self._url = info.url #SvnPath(info.url, info.rev)
+        assert isinstance(self._url, py.builtin._basestring)
+        return self._url
+
+    url = property(_geturl, None, None, "url of this WC item")
+
+    def _escape(self, cmd):
+        return _escape_helper(cmd)
+
+    def dump(self, obj):
+        """ pickle object into path location"""
+        return self.localpath.dump(obj)
+
+    def svnurl(self):
+        """ return current SvnPath for this WC-item. """
+        info = self.info()
+        return py.path.svnurl(info.url)
+
+    def __repr__(self):
+        return "svnwc(%r)" % (self.strpath) # , self._url)
+
+    def __str__(self):
+        return str(self.localpath)
+
+    def _makeauthoptions(self):
+        if self.auth is None:
+            return ''
+        return self.auth.makecmdoptions()
+
+    def _authsvn(self, cmd, args=None):
+        args = args and list(args) or []
+        args.append(self._makeauthoptions())
+        return self._svn(cmd, *args)
+        
+    def _svn(self, cmd, *args):
+        l = ['svn %s' % cmd]
+        args = [self._escape(item) for item in args]
+        l.extend(args)
+        l.append('"%s"' % self._escape(self.strpath))
+        # try fixing the locale because we can't otherwise parse
+        string = fixlocale() + " ".join(l)
+        try:
+            try:
+                key = 'LC_MESSAGES'
+                hold = os.environ.get(key)
+                os.environ[key] = 'C'
+                out = py.process.cmdexec(string)
+            finally:
+                if hold:
+                    os.environ[key] = hold
+                else:
+                    del os.environ[key]
+        except py.process.cmdexec.Error:
+            e = sys.exc_info()[1]
+            strerr = e.err.lower()
+            if strerr.find('file not found') != -1: 
+                raise py.error.ENOENT(self) 
+            if (strerr.find('file exists') != -1 or 
+                strerr.find('file already exists') != -1 or
+                strerr.find("can't create directory") != -1):
+                raise py.error.EEXIST(self)
+            raise
+        return out
+
+    def switch(self, url):
+        """ switch to given URL. """
+        self._authsvn('switch', [url])
+
+    def checkout(self, url=None, rev=None):
+        """ checkout from url to local wcpath. """
+        args = []
+        if url is None:
+            url = self.url
+        if rev is None or rev == -1:
+            if (py.std.sys.platform != 'win32' and
+                    _getsvnversion() == '1.3'):
+                url += "@HEAD" 
+        else:
+            if _getsvnversion() == '1.3':
+                url += "@%d" % rev
+            else:
+                args.append('-r' + str(rev))
+        args.append(url)
+        self._authsvn('co', args)
+
+    def update(self, rev='HEAD'):
+        """ update working copy item to given revision. (None -> HEAD). """
+        self._authsvn('up', ['-r', rev, "--non-interactive"],)
+
+    def write(self, content, mode='w'):
+        """ write content into local filesystem wc. """
+        self.localpath.write(content, mode)
+
+    def dirpath(self, *args):
+        """ return the directory Path of the current Path. """
+        return self.__class__(self.localpath.dirpath(*args), auth=self.auth)
+
+    def _ensuredirs(self):
+        parent = self.dirpath()
+        if parent.check(dir=0):
+            parent._ensuredirs()
+        if self.check(dir=0):
+            self.mkdir()
+        return self
+
+    def ensure(self, *args, **kwargs):
+        """ ensure that an args-joined path exists (by default as
+            a file). if you specify a keyword argument 'directory=True'
+            then the path is forced  to be a directory path.
+        """
+        p = self.join(*args)
+        if p.check():
+            if p.check(versioned=False):
+                p.add()
+            return p 
+        if kwargs.get('dir', 0):
+            return p._ensuredirs()
+        parent = p.dirpath()
+        parent._ensuredirs()
+        p.write("")
+        p.add()
+        return p
+
+    def mkdir(self, *args):
+        """ create & return the directory joined with args. """
+        if args:
+            return self.join(*args).mkdir()
+        else:
+            self._svn('mkdir')
+            return self
+
+    def add(self):
+        """ add ourself to svn """
+        self._svn('add')
+
+    def remove(self, rec=1, force=1):
+        """ remove a file or a directory tree. 'rec'ursive is
+            ignored and considered always true (because of
+            underlying svn semantics.
+        """
+        assert rec, "svn cannot remove non-recursively"
+        if not self.check(versioned=True):
+            # not added to svn (anymore?), just remove
+            py.path.local(self).remove()
+            return
+        flags = []
+        if force:
+            flags.append('--force')
+        self._svn('remove', *flags)
+
+    def copy(self, target):
+        """ copy path to target."""
+        py.process.cmdexec("svn copy %s %s" %(str(self), str(target)))
+
+    def rename(self, target):
+        """ rename this path to target. """
+        py.process.cmdexec("svn move --force %s %s" %(str(self), str(target)))
+
+    def lock(self):
+        """ set a lock (exclusive) on the resource """
+        out = self._authsvn('lock').strip()
+        if not out:
+            # warning or error, raise exception
+            raise Exception(out[4:])
+    
+    def unlock(self):
+        """ unset a previously set lock """
+        out = self._authsvn('unlock').strip()
+        if out.startswith('svn:'):
+            # warning or error, raise exception
+            raise Exception(out[4:])
+
+    def cleanup(self):
+        """ remove any locks from the resource """
+        # XXX should be fixed properly!!!
+        try:
+            self.unlock()
+        except:
+            pass
+
+    def status(self, updates=0, rec=0, externals=0):
+        """ return (collective) Status object for this file. """
+        # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1
+        #             2201     2192        jum   test
+        # XXX
+        if externals:
+            raise ValueError("XXX cannot perform status() "
+                             "on external items yet")
+        else:
+            #1.2 supports: externals = '--ignore-externals'
+            externals = ''
+        if rec:
+            rec= ''
+        else:
+            rec = '--non-recursive'
+
+        # XXX does not work on all subversion versions
+        #if not externals: 
+        #    externals = '--ignore-externals' 
+
+        if updates:
+            updates = '-u'
+        else:
+            updates = ''
+
+        try:
+            cmd = 'status -v --xml --no-ignore %s %s %s' % (
+                    updates, rec, externals)
+            out = self._authsvn(cmd)
+        except py.process.cmdexec.Error:
+            cmd = 'status -v --no-ignore %s %s %s' % (
+                    updates, rec, externals)
+            out = self._authsvn(cmd)
+            rootstatus = WCStatus(self).fromstring(out, self)
+        else:
+            rootstatus = XMLWCStatus(self).fromstring(out, self)
+        return rootstatus
+
+    def diff(self, rev=None):
+        """ return a diff of the current path against revision rev (defaulting
+            to the last one).
+        """
+        args = []
+        if rev is not None:
+            args.append("-r %d" % rev)
+        out = self._authsvn('diff', args)
+        return out
+
+    def blame(self):
+        """ return a list of tuples of three elements:
+            (revision, commiter, line)
+        """
+        out = self._svn('blame')
+        result = []
+        blamelines = out.splitlines()
+        reallines = py.path.svnurl(self.url).readlines()
+        for i, (blameline, line) in enumerate(
+                zip(blamelines, reallines)):
+            m = rex_blame.match(blameline)
+            if not m:
+                raise ValueError("output line %r of svn blame does not match "
+                                 "expected format" % (line, ))
+            rev, name, _ = m.groups()
+            result.append((int(rev), name, line))
+        return result
+
+    _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL)
+    def commit(self, msg='', rec=1):
+        """ commit with support for non-recursive commits """
+        # XXX i guess escaping should be done better here?!?
+        cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),)
+        if not rec:
+            cmd += ' -N'
+        out = self._authsvn(cmd)
+        try:
+            del cache.info[self]
+        except KeyError:
+            pass
+        if out: 
+            m = self._rex_commit.match(out)
+            return int(m.group(1))
+
+    def propset(self, name, value, *args):
+        """ set property name to value on this path. """
+        d = py.path.local.mkdtemp() 
+        try: 
+            p = d.join('value') 
+            p.write(value) 
+            self._svn('propset', name, '--file', str(p), *args)
+        finally: 
+            d.remove() 
+
+    def propget(self, name):
+        """ get property name on this path. """
+        res = self._svn('propget', name)
+        return res[:-1] # strip trailing newline
+
+    def propdel(self, name):
+        """ delete property name on this path. """
+        res = self._svn('propdel', name)
+        return res[:-1] # strip trailing newline
+
+    def proplist(self, rec=0):
+        """ return a mapping of property names to property values.
+If rec is True, then return a dictionary mapping sub-paths to such mappings.
+"""
+        if rec:
+            res = self._svn('proplist -R')
+            return make_recursive_propdict(self, res)
+        else:
+            res = self._svn('proplist')
+            lines = res.split('\n')
+            lines = [x.strip() for x in lines[1:]]
+            return PropListDict(self, lines)
+
+    def revert(self, rec=0):
+        """ revert the local changes of this path. if rec is True, do so
+recursively. """
+        if rec:
+            result = self._svn('revert -R')
+        else:
+            result = self._svn('revert')
+        return result
+
+    def new(self, **kw):
+        """ create a modified version of this path. A 'rev' argument
+            indicates a new revision.
+            the following keyword arguments modify various path parts:
+
+              http://host.com/repo/path/file.ext
+              |-----------------------|          dirname
+                                        |------| basename
+                                        |--|     purebasename
+                                            |--| ext
+        """
+        if kw:
+            localpath = self.localpath.new(**kw)
+        else:
+            localpath = self.localpath
+        return self.__class__(localpath, auth=self.auth)
+
+    def join(self, *args, **kwargs):
+        """ return a new Path (with the same revision) which is composed
+            of the self Path followed by 'args' path components.
+        """
+        if not args:
+            return self
+        localpath = self.localpath.join(*args, **kwargs)
+        return self.__class__(localpath, auth=self.auth)
+
+    def info(self, usecache=1):
+        """ return an Info structure with svn-provided information. """
+        info = usecache and cache.info.get(self)
+        if not info:
+            try:
+                output = self._svn('info')
+            except py.process.cmdexec.Error:
+                e = sys.exc_info()[1]
+                if e.err.find('Path is not a working copy directory') != -1:
+                    raise py.error.ENOENT(self, e.err)
+                elif e.err.find("is not under version control") != -1:
+                    raise py.error.ENOENT(self, e.err)
+                raise
+            # XXX SVN 1.3 has output on stderr instead of stdout (while it does
+            # return 0!), so a bit nasty, but we assume no output is output
+            # to stderr...
+            if (output.strip() == '' or 
+                    output.lower().find('not a versioned resource') != -1):
+                raise py.error.ENOENT(self, output)
+            info = InfoSvnWCCommand(output)
+
+            # Can't reliably compare on Windows without access to win32api
+            if py.std.sys.platform != 'win32': 
+                if info.path != self.localpath: 
+                    raise py.error.ENOENT(self, "not a versioned resource:" + 
+                            " %s != %s" % (info.path, self.localpath)) 
+            cache.info[self] = info
+        self.rev = info.rev
+        return info
+
+    def listdir(self, fil=None, sort=None):
+        """ return a sequence of Paths.
+
+        listdir will return either a tuple or a list of paths
+        depending on implementation choices.
+        """
+        if isinstance(fil, str):
+            fil = common.FNMatcher(fil)
+        # XXX unify argument naming with LocalPath.listdir
+        def notsvn(path):
+            return path.basename != '.svn' 
+
+        paths = [self.__class__(p, auth=self.auth) 
+                    for p in self.localpath.listdir()
+                        if notsvn(p) and (not fil or fil(p))]
+        self._sortlist(paths, sort)
+        return paths
+
+    def open(self, mode='r'):
+        """ return an opened file with the given mode. """
+        return open(self.strpath, mode)
+
+    def _getbyspec(self, spec):
+        return self.localpath._getbyspec(spec)
+
+    class Checkers(py.path.local.Checkers):
+        def __init__(self, path):
+            self.svnwcpath = path
+            self.path = path.localpath
+        def versioned(self):
+            try:
+                s = self.svnwcpath.info()
+            except (py.error.ENOENT, py.error.EEXIST): 
+                return False 
+            except py.process.cmdexec.Error:
+                e = sys.exc_info()[1]
+                if e.err.find('is not a working copy')!=-1:
+                    return False
+                if e.err.lower().find('not a versioned resource') != -1:
+                    return False
+                raise
+            else:
+                return True 
+
+    def log(self, rev_start=None, rev_end=1, verbose=False):
+        """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+        assert self.check()   # make it simpler for the pipe
+        rev_start = rev_start is None and "HEAD" or rev_start
+        rev_end = rev_end is None and "HEAD" or rev_end
+        if rev_start == "HEAD" and rev_end == 1:
+                rev_opt = ""
+        else:
+            rev_opt = "-r %s:%s" % (rev_start, rev_end)
+        verbose_opt = verbose and "-v" or ""
+        locale_env = fixlocale()
+        # some blather on stderr
+        auth_opt = self._makeauthoptions()
+        #stdin, stdout, stderr  = os.popen3(locale_env +
+        #                                   'svn log --xml %s %s %s "%s"' % (
+        #                                    rev_opt, verbose_opt, auth_opt,
+        #                                    self.strpath))
+        cmd = locale_env + 'svn log --xml %s %s %s "%s"' % (
+            rev_opt, verbose_opt, auth_opt, self.strpath)
+
+        popen = subprocess.Popen(cmd, 
+                    stdout=subprocess.PIPE, 
+                    stderr=subprocess.PIPE,  
+                    shell=True,
+        )
+        stdout, stderr = popen.communicate()
+        stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+        minidom,ExpatError = importxml()
+        try:
+            tree = minidom.parseString(stdout)
+        except ExpatError:
+            raise ValueError('no such revision')
+        result = []
+        for logentry in filter(None, tree.firstChild.childNodes):
+            if logentry.nodeType == logentry.ELEMENT_NODE:
+                result.append(LogEntry(logentry))
+        return result
+
+    def size(self):
+        """ Return the size of the file content of the Path. """
+        return self.info().size
+
+    def mtime(self):
+        """ Return the last modification time of the file. """
+        return self.info().mtime
+
+    def __hash__(self):
+        return hash((self.strpath, self.__class__, self.auth))
+
+
+class WCStatus:
+    attrnames = ('modified','added', 'conflict', 'unchanged', 'external',
+                'deleted', 'prop_modified', 'unknown', 'update_available',
+                'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced'
+                )
+
+    def __init__(self, wcpath, rev=None, modrev=None, author=None):
+        self.wcpath = wcpath
+        self.rev = rev
+        self.modrev = modrev
+        self.author = author
+
+        for name in self.attrnames:
+            setattr(self, name, [])
+
+    def allpath(self, sort=True, **kw):
+        d = {}
+        for name in self.attrnames:
+            if name not in kw or kw[name]:
+                for path in getattr(self, name):
+                    d[path] = 1
+        l = d.keys()
+        if sort:
+            l.sort()
+        return l
+
+    # XXX a bit scary to assume there's always 2 spaces between username and
+    # path, however with win32 allowing spaces in user names there doesn't
+    # seem to be a more solid approach :(
+    _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)')
+
+    def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+        """ return a new WCStatus object from data 's'
+        """
+        rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+        update_rev = None
+        for line in data.split('\n'):
+            if not line.strip():
+                continue
+            #print "processing %r" % line
+            flags, rest = line[:8], line[8:]
+            # first column
+            c0,c1,c2,c3,c4,c5,x6,c7 = flags
+            #if '*' in line:
+            #    print "flags", repr(flags), "rest", repr(rest)
+
+            if c0 in '?XI':
+                fn = line.split(None, 1)[1]
+                if c0 == '?':
+                    wcpath = rootwcpath.join(fn, abs=1)
+                    rootstatus.unknown.append(wcpath)
+                elif c0 == 'X':
+                    wcpath = rootwcpath.__class__(
+                        rootwcpath.localpath.join(fn, abs=1),
+                        auth=rootwcpath.auth)
+                    rootstatus.external.append(wcpath)
+                elif c0 == 'I':
+                    wcpath = rootwcpath.join(fn, abs=1)
+                    rootstatus.ignored.append(wcpath)
+
+                continue
+
+            #elif c0 in '~!' or c4 == 'S':
+            #    raise NotImplementedError("received flag %r" % c0)
+
+            m = WCStatus._rex_status.match(rest)
+            if not m:
+                if c7 == '*':
+                    fn = rest.strip()
+                    wcpath = rootwcpath.join(fn, abs=1)
+                    rootstatus.update_available.append(wcpath)
+                    continue
+                if line.lower().find('against revision:')!=-1:
+                    update_rev = int(rest.split(':')[1].strip())
+                    continue
+                if line.lower().find('status on external') > -1:
+                    # XXX not sure what to do here... perhaps we want to
+                    # store some state instead of just continuing, as right
+                    # now it makes the top-level external get added twice
+                    # (once as external, once as 'normal' unchanged item)
+                    # because of the way SVN presents external items
+                    continue
+                # keep trying
+                raise ValueError("could not parse line %r" % line)
+            else:
+                rev, modrev, author, fn = m.groups()
+            wcpath = rootwcpath.join(fn, abs=1)
+            #assert wcpath.check()
+            if c0 == 'M':
+                assert wcpath.check(file=1), "didn't expect a directory with changed content here"
+                rootstatus.modified.append(wcpath)
+            elif c0 == 'A' or c3 == '+' :
+                rootstatus.added.append(wcpath)
+            elif c0 == 'D':
+                rootstatus.deleted.append(wcpath)
+            elif c0 == 'C':
+                rootstatus.conflict.append(wcpath)
+            elif c0 == '~':
+                rootstatus.kindmismatch.append(wcpath)
+            elif c0 == '!':
+                rootstatus.incomplete.append(wcpath)
+            elif c0 == 'R':
+                rootstatus.replaced.append(wcpath)
+            elif not c0.strip():
+                rootstatus.unchanged.append(wcpath)
+            else:
+                raise NotImplementedError("received flag %r" % c0)
+
+            if c1 == 'M':
+                rootstatus.prop_modified.append(wcpath)
+            # XXX do we cover all client versions here?
+            if c2 == 'L' or c5 == 'K':
+                rootstatus.locked.append(wcpath)
+            if c7 == '*':
+                rootstatus.update_available.append(wcpath)
+
+            if wcpath == rootwcpath:
+                rootstatus.rev = rev
+                rootstatus.modrev = modrev
+                rootstatus.author = author
+                if update_rev:
+                    rootstatus.update_rev = update_rev
+                continue
+        return rootstatus
+    fromstring = staticmethod(fromstring)
+
+class XMLWCStatus(WCStatus):
+    def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+        """ parse 'data' (XML string as outputted by svn st) into a status obj
+        """
+        # XXX for externals, the path is shown twice: once
+        # with external information, and once with full info as if
+        # the item was a normal non-external... the current way of
+        # dealing with this issue is by ignoring it - this does make
+        # externals appear as external items as well as 'normal',
+        # unchanged ones in the status object so this is far from ideal
+        rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+        update_rev = None
+        minidom, ExpatError = importxml()
+        try:
+            doc = minidom.parseString(data)
+        except ExpatError:
+            e = sys.exc_info()[1]
+            raise ValueError(str(e))
+        urevels = doc.getElementsByTagName('against')
+        if urevels:
+            rootstatus.update_rev = urevels[-1].getAttribute('revision')
+        for entryel in doc.getElementsByTagName('entry'):
+            path = entryel.getAttribute('path')
+            statusel = entryel.getElementsByTagName('wc-status')[0]
+            itemstatus = statusel.getAttribute('item')
+
+            if itemstatus == 'unversioned':
+                wcpath = rootwcpath.join(path, abs=1)
+                rootstatus.unknown.append(wcpath)
+                continue
+            elif itemstatus == 'external':
+                wcpath = rootwcpath.__class__(
+                    rootwcpath.localpath.join(path, abs=1),
+                    auth=rootwcpath.auth)
+                rootstatus.external.append(wcpath)
+                continue
+            elif itemstatus == 'ignored':
+                wcpath = rootwcpath.join(path, abs=1)
+                rootstatus.ignored.append(wcpath)
+                continue
+            elif itemstatus == 'incomplete':
+                wcpath = rootwcpath.join(path, abs=1)
+                rootstatus.incomplete.append(wcpath)
+                continue
+
+            rev = statusel.getAttribute('revision')
+            if itemstatus == 'added' or itemstatus == 'none':
+                rev = '0'
+                modrev = '?'
+                author = '?'
+                date = ''
+            else:
+                #print entryel.toxml()
+                commitel = entryel.getElementsByTagName('commit')[0]
+                if commitel:
+                    modrev = commitel.getAttribute('revision')
+                    author = ''
+                    author_els = commitel.getElementsByTagName('author')
+                    if author_els:
+                        for c in author_els[0].childNodes:
+                            author += c.nodeValue
+                    date = ''
+                    for c in commitel.getElementsByTagName('date')[0]\
+                            .childNodes:
+                        date += c.nodeValue
+
+            wcpath = rootwcpath.join(path, abs=1)
+
+            assert itemstatus != 'modified' or wcpath.check(file=1), (
+                'did\'t expect a directory with changed content here')
+
+            itemattrname = {
+                'normal': 'unchanged',
+                'unversioned': 'unknown',
+                'conflicted': 'conflict',
+                'none': 'added',
+            }.get(itemstatus, itemstatus)
+
+            attr = getattr(rootstatus, itemattrname)
+            attr.append(wcpath)
+
+            propsstatus = statusel.getAttribute('props')
+            if propsstatus not in ('none', 'normal'):
+                rootstatus.prop_modified.append(wcpath)
+
+            if wcpath == rootwcpath:
+                rootstatus.rev = rev
+                rootstatus.modrev = modrev
+                rootstatus.author = author
+                rootstatus.date = date
+
+            # handle repos-status element (remote info)
+            rstatusels = entryel.getElementsByTagName('repos-status')
+            if rstatusels:
+                rstatusel = rstatusels[0]
+                ritemstatus = rstatusel.getAttribute('item')
+                if ritemstatus in ('added', 'modified'):
+                    rootstatus.update_available.append(wcpath)
+
+            lockels = entryel.getElementsByTagName('lock')
+            if len(lockels):
+                rootstatus.locked.append(wcpath)
+
+        return rootstatus
+    fromstring = staticmethod(fromstring)
+
+class InfoSvnWCCommand:
+    def __init__(self, output):
+        # Path: test
+        # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test
+        # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
+        # Revision: 2151
+        # Node Kind: directory
+        # Schedule: normal
+        # Last Changed Author: hpk
+        # Last Changed Rev: 2100
+        # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+        # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003)
+
+        d = {}
+        for line in output.split('\n'):
+            if not line.strip():
+                continue
+            key, value = line.split(':', 1)
+            key = key.lower().replace(' ', '')
+            value = value.strip()
+            d[key] = value
+        try:
+            self.url = d['url']
+        except KeyError:
+            raise  ValueError("Not a versioned resource")
+            #raise ValueError, "Not a versioned resource %r" % path
+        self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
+        self.rev = int(d['revision'])
+        self.path = py.path.local(d['path'])
+        self.size = self.path.size()
+        if 'lastchangedrev' in d:
+            self.created_rev = int(d['lastchangedrev'])
+        if 'lastchangedauthor' in d:
+            self.last_author = d['lastchangedauthor']
+        if 'lastchangeddate' in d:
+            self.mtime = parse_wcinfotime(d['lastchangeddate'])
+            self.time = self.mtime * 1000000
+
+    def __eq__(self, other):
+        return self.__dict__ == other.__dict__
+
+def parse_wcinfotime(timestr):
+    """ Returns seconds since epoch, UTC. """
+    # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+    m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
+    if not m:
+        raise ValueError("timestring %r does not match" % timestr)
+    timestr, timezone = m.groups()
+    # do not handle timezone specially, return value should be UTC
+    parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
+    return calendar.timegm(parsedtime)
+
+def make_recursive_propdict(wcroot,
+                            output,
+                            rex = re.compile("Properties on '(.*)':")):
+    """ Return a dictionary of path->PropListDict mappings. """
+    lines = [x for x in output.split('\n') if x]
+    pdict = {}
+    while lines:
+        line = lines.pop(0)
+        m = rex.match(line)
+        if not m:
+            raise ValueError("could not parse propget-line: %r" % line)
+        path = m.groups()[0]
+        wcpath = wcroot.join(path, abs=1)
+        propnames = []
+        while lines and lines[0].startswith('  '):
+            propname = lines.pop(0).strip()
+            propnames.append(propname)
+        assert propnames, "must have found properties!"
+        pdict[wcpath] = PropListDict(wcpath, propnames)
+    return pdict
+
+
+def importxml(cache=[]):
+    if cache:
+        return cache
+    from xml.dom import minidom
+    from xml.parsers.expat import ExpatError
+    cache.extend([minidom, ExpatError])
+    return cache
+
+class LogEntry:
+    def __init__(self, logentry):
+        self.rev = int(logentry.getAttribute('revision'))
+        for lpart in filter(None, logentry.childNodes):
+            if lpart.nodeType == lpart.ELEMENT_NODE:
+                if lpart.nodeName == 'author':
+                    self.author = lpart.firstChild.nodeValue
+                elif lpart.nodeName == 'msg':
+                    if lpart.firstChild:
+                        self.msg = lpart.firstChild.nodeValue
+                    else:
+                        self.msg = ''
+                elif lpart.nodeName == 'date':
+                    #2003-07-29T20:05:11.598637Z
+                    timestr = lpart.firstChild.nodeValue
+                    self.date = parse_apr_time(timestr)
+                elif lpart.nodeName == 'paths':
+                    self.strpaths = []
+                    for ppart in filter(None, lpart.childNodes):
+                        if ppart.nodeType == ppart.ELEMENT_NODE:
+                            self.strpaths.append(PathEntry(ppart))
+    def __repr__(self):
+        return '<Logentry rev=%d author=%s date=%s>' % (
+            self.rev, self.author, self.date)
+
+

Added: pypy/trunk/py/impl/process/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/process/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1 @@
+""" high-level sub-process handling """

Added: pypy/trunk/py/impl/process/cmdexec.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/process/cmdexec.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,44 @@
+"""
+
+"""
+
+import os, sys
+import subprocess
+import py
+from subprocess import Popen, PIPE
+
+def cmdexec(cmd):
+    """ return output of executing 'cmd' in a separate process.
+
+    raise cmdexec.ExecutionFailed exeception if the command failed.
+    the exception will provide an 'err' attribute containing
+    the error-output from the command.
+    """
+    process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    out, err = process.communicate()
+    out = py.builtin._totext(out, sys.getdefaultencoding())
+    err = py.builtin._totext(err, sys.getdefaultencoding())
+    status = process.poll()
+    if status:
+        raise ExecutionFailed(status, status, cmd, out, err)
+    return out
+
+class ExecutionFailed(py.error.Error):
+    def __init__(self, status, systemstatus, cmd, out, err):
+        Exception.__init__(self)
+        self.status = status
+        self.systemstatus = systemstatus
+        self.cmd = cmd
+        self.err = err
+        self.out = out
+
+    def __str__(self):
+        return "ExecutionFailed: %d  %s\n%s" %(self.status, self.cmd, self.err)
+
+# export the exception under the name 'py.process.cmdexec.Error'
+cmdexec.Error = ExecutionFailed
+try:
+    ExecutionFailed.__module__ = 'py.process.cmdexec'
+    ExecutionFailed.__name__ = 'Error'
+except (AttributeError, TypeError):
+    pass

Added: pypy/trunk/py/impl/process/forkedfunc.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/process/forkedfunc.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,108 @@
+
+""" 
+    ForkedFunc provides a way to run a function in a forked process
+    and get at its return value, stdout and stderr output as well
+    as signals and exitstatusus. 
+
+    XXX see if tempdir handling is sane 
+"""
+
+import py
+import os
+import sys
+import marshal
+
+class ForkedFunc(object):
+    EXITSTATUS_EXCEPTION = 3
+    def __init__(self, fun, args=None, kwargs=None, nice_level=0):
+        if args is None:
+            args = []
+        if kwargs is None:
+            kwargs = {}
+        self.fun = fun
+        self.args = args
+        self.kwargs = kwargs
+        self.tempdir = tempdir = py.path.local.mkdtemp()
+        self.RETVAL = tempdir.ensure('retval')
+        self.STDOUT = tempdir.ensure('stdout')
+        self.STDERR = tempdir.ensure('stderr')
+
+        pid = os.fork()
+        if pid: # in parent process
+            self.pid = pid 
+        else: # in child process 
+            self._child(nice_level)
+
+    def _child(self, nice_level):
+        # right now we need to call a function, but first we need to
+        # map all IO that might happen
+        # make sure sys.stdout points to file descriptor one
+        sys.stdout = stdout = self.STDOUT.open('w')
+        sys.stdout.flush()
+        fdstdout = stdout.fileno()
+        if fdstdout != 1:
+            os.dup2(fdstdout, 1)
+        sys.stderr = stderr = self.STDERR.open('w')
+        fdstderr = stderr.fileno()
+        if fdstderr != 2:
+            os.dup2(fdstderr, 2)
+        retvalf = self.RETVAL.open("wb")
+        EXITSTATUS = 0
+        try:
+            if nice_level:
+                os.nice(nice_level)
+            try:
+                retval = self.fun(*self.args, **self.kwargs)
+                retvalf.write(marshal.dumps(retval))
+            except:
+                excinfo = py.code.ExceptionInfo()
+                stderr.write(excinfo.exconly())
+                EXITSTATUS = self.EXITSTATUS_EXCEPTION
+        finally:
+            stdout.close()
+            stderr.close()
+            retvalf.close()
+        os.close(1)
+        os.close(2)
+        os._exit(EXITSTATUS)
+    
+    def waitfinish(self, waiter=os.waitpid):
+        pid, systemstatus = waiter(self.pid, 0)
+        if systemstatus:
+            if os.WIFSIGNALED(systemstatus):
+                exitstatus = os.WTERMSIG(systemstatus) + 128
+            else:
+                exitstatus = os.WEXITSTATUS(systemstatus)
+            #raise ExecutionFailed(status, systemstatus, cmd,
+            #                      ''.join(out), ''.join(err))
+        else:
+            exitstatus = 0
+        signal = systemstatus & 0x7f
+        if not exitstatus and not signal:
+            retval = self.RETVAL.open('rb')
+            try:
+                retval_data = retval.read()
+            finally:
+                retval.close()
+            retval = marshal.loads(retval_data)
+        else:
+            retval = None
+        stdout = self.STDOUT.read()
+        stderr = self.STDERR.read()
+        self._removetemp()
+        return Result(exitstatus, signal, retval, stdout, stderr)
+
+    def _removetemp(self):
+        if self.tempdir.check():
+            self.tempdir.remove()
+
+    def __del__(self):
+        self._removetemp()
+
+class Result(object):
+    def __init__(self, exitstatus, signal, retval, stdout, stderr):
+        self.exitstatus = exitstatus
+        self.signal = signal
+        self.retval = retval
+        self.out = stdout
+        self.err = stderr

Added: pypy/trunk/py/impl/process/killproc.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/process/killproc.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,23 @@
+import py
+import os, sys
+
+if sys.platform == "win32":
+    try:
+        import ctypes
+    except ImportError:
+        def dokill(pid):
+            py.process.cmdexec("taskkill /F /PID %d" %(pid,))
+    else: 
+        def dokill(pid):
+            PROCESS_TERMINATE = 1
+            handle = ctypes.windll.kernel32.OpenProcess(
+                        PROCESS_TERMINATE, False, pid)
+            ctypes.windll.kernel32.TerminateProcess(handle, -1)
+            ctypes.windll.kernel32.CloseHandle(handle)
+else:
+    def dokill(pid):
+        os.kill(pid, 15)     
+
+def kill(pid):
+    """ kill process by id. """
+    dokill(pid)

Added: pypy/trunk/py/impl/std.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/std.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,18 @@
+import sys
+
+class Std(object):
+    """ makes top-level python modules available as an attribute, 
+        importing them on first access. 
+    """ 
+
+    def __init__(self):
+        self.__dict__ = sys.modules
+
+    def __getattr__(self, name):
+        try:
+            m = __import__(name)
+        except ImportError:
+            raise AttributeError("py.std: could not import %s" % name)
+        return m
+
+std = Std()

Added: pypy/trunk/py/impl/test/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1 @@
+""" versatile unit-testing tool + libraries """

Added: pypy/trunk/py/impl/test/cmdline.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/cmdline.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,23 @@
+import py
+import sys
+
+#
+# main entry point
+#
+
+def main(args=None):
+    if args is None:
+        args = sys.argv[1:]
+    config = py.test.config
+    try:
+        config.parse(args) 
+        config.pluginmanager.do_configure(config)
+        session = config.initsession()
+        exitstatus = session.main()
+        config.pluginmanager.do_unconfigure(config)
+        raise SystemExit(exitstatus)
+    except config.Error:
+        e = sys.exc_info()[1]
+        sys.stderr.write("ERROR: %s\n" %(e.args[0],))
+        raise SystemExit(3)
+

Added: pypy/trunk/py/impl/test/collect.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/collect.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,466 @@
+"""
+base test collection objects.  
+Collectors and test Items form a tree
+that is usually built iteratively.  
+""" 
+import py
+
+def configproperty(name):
+    def fget(self):
+        #print "retrieving %r property from %s" %(name, self.fspath)
+        return self.config.getvalue(name, self.fspath) 
+    return property(fget)
+
+class Node(object): 
+    """ base class for Nodes in the collection tree.  
+        Collector nodes have children and 
+        Item nodes are terminal. 
+
+        All nodes of the collection tree carry a _config 
+        attribute for these reasons: 
+        - to access custom Collection Nodes from a project 
+          (defined in conftest's)
+        - to pickle themselves relatively to the "topdir" 
+        - configuration/options for setup/teardown 
+          stdout/stderr capturing and execution of test items 
+    """
+    def __init__(self, name, parent=None):
+        self.name = name 
+        self.parent = parent
+        self.config = getattr(parent, 'config', None)
+        self.fspath = getattr(parent, 'fspath', None) 
+
+    def _checkcollectable(self):
+        if not hasattr(self, 'fspath'):
+            self.parent._memocollect() # to reraise exception
+            
+    # 
+    # note to myself: Pickling is uh.
+    # 
+    def __getstate__(self):
+        return (self.name, self.parent)
+    def __setstate__(self, nameparent):
+        name, parent = nameparent
+        try:
+            colitems = parent._memocollect()
+        except KeyboardInterrupt:
+            raise
+        except Exception:
+            # seems our parent can't collect us 
+            # so let's be somewhat operable 
+            # _checkcollectable() is to tell outsiders about the fact
+            self.name = name 
+            self.parent = parent 
+            self.config = parent.config
+            #self._obj = "could not unpickle" 
+        else:
+            for colitem in colitems:
+                if colitem.name == name:
+                    # we are a copy that will not be returned
+                    # by our parent 
+                    self.__dict__ = colitem.__dict__
+                    break
+
+    def __repr__(self): 
+        if getattr(self.config.option, 'debug', False):
+            return "<%s %r %0x>" %(self.__class__.__name__, 
+                getattr(self, 'name', None), id(self))
+        else:
+            return "<%s %r>" %(self.__class__.__name__, 
+                getattr(self, 'name', None))
+
+    # methods for ordering nodes
+
+    def __eq__(self, other): 
+        if not isinstance(other, Node):
+            return False 
+        return self.name == other.name and self.parent == other.parent 
+
+    def __ne__(self, other):
+        return not self == other
+    
+    def __hash__(self):
+        return hash((self.name, self.parent))
+ 
+    def setup(self): 
+        pass
+
+    def teardown(self): 
+        pass
+
+    def _memoizedcall(self, attrname, function):
+        exattrname = "_ex_" + attrname 
+        failure = getattr(self, exattrname, None)
+        if failure is not None:
+            py.builtin._reraise(failure[0], failure[1], failure[2])
+        if hasattr(self, attrname):
+            return getattr(self, attrname)
+        try:
+            res = function()
+        except (KeyboardInterrupt, SystemExit):
+            raise
+        except:
+            failure = py.std.sys.exc_info()
+            setattr(self, exattrname, failure)
+            raise
+        setattr(self, attrname, res)
+        return res 
+
+    def listchain(self, rootfirst=False):
+        """ return list of all parent collectors up to self, 
+            starting form root of collection tree. """ 
+        l = [self]
+        while 1: 
+            x = l[-1]
+            if x.parent is not None: 
+                l.append(x.parent) 
+            else: 
+                if not rootfirst:
+                    l.reverse() 
+                return l 
+
+    def listnames(self): 
+        return [x.name for x in self.listchain()]
+
+    def getparent(self, cls):
+        current = self
+        while current and not isinstance(current, cls):
+            current = current.parent
+        return current 
+
+    def _getitembynames(self, namelist):
+        cur = self
+        for name in namelist:
+            if name:
+                next = cur.collect_by_name(name)
+                if next is None: 
+                    existingnames = [x.name for x in self._memocollect()]
+                    msg = ("Collector %r does not have name %r "
+                           "existing names are: %s" %
+                           (cur, name, existingnames))
+                    raise AssertionError(msg) 
+                cur = next
+        return cur
+
+    
+    def _getfsnode(self, path):
+        # this method is usually called from
+        # config.getfsnode() which returns a colitem 
+        # from filename arguments
+        #
+        # pytest's collector tree does not neccessarily 
+        # follow the filesystem and we thus need to do 
+        # some special matching code here because
+        # _getitembynames() works by colitem names, not
+        # basenames. 
+        if path == self.fspath:
+            return self 
+        basenames = path.relto(self.fspath).split(path.sep)
+        cur = self
+        while basenames:
+            basename = basenames.pop(0)
+            assert basename
+            fspath = cur.fspath.join(basename)
+            colitems = cur._memocollect()
+            l = []
+            for colitem in colitems:
+                if colitem.fspath == fspath or colitem.name == basename:
+                    l.append(colitem)
+            if not l:
+                raise self.config.Error("can't collect: %s" %(fspath,))
+            if basenames:
+                if len(l) > 1:
+                    msg = ("Collector %r has more than one %r colitem "
+                           "existing colitems are: %s" %
+                           (cur, fspath, colitems))
+                    raise self.config.Error("xxx-too many test types for: %s" % (fspath, ))
+                cur = l[0]
+            else:
+                if len(l) > 1:
+                    cur = l
+                else:
+                    cur = l[0]
+                break
+        return cur 
+
+    def readkeywords(self):
+        return dict([(x, True) for x in self._keywords()])
+
+    def _keywords(self):
+        return [self.name]
+
+    def _skipbykeyword(self, keywordexpr): 
+        """ return True if they given keyword expression means to 
+            skip this collector/item. 
+        """
+        if not keywordexpr:
+            return
+        chain = self.listchain()
+        for key in filter(None, keywordexpr.split()):
+            eor = key[:1] == '-'
+            if eor:
+                key = key[1:]
+            if not (eor ^ self._matchonekeyword(key, chain)):
+                return True
+
+    def _matchonekeyword(self, key, chain):
+        elems = key.split(".")
+        # XXX O(n^2), anyone cares?
+        chain = [item.readkeywords() for item in chain if item._keywords()]
+        for start, _ in enumerate(chain):
+            if start + len(elems) > len(chain):
+                return False
+            for num, elem in enumerate(elems):
+                for keyword in chain[num + start]:
+                    ok = False
+                    if elem in keyword:
+                        ok = True
+                        break
+                if not ok:
+                    break
+            if num == len(elems) - 1 and ok:
+                return True
+        return False
+
+    def _prunetraceback(self, traceback):
+        return traceback 
+
+    def _totrail(self):
+        """ provide a trail relative to the topdir, 
+            which can be used to reconstruct the
+            collector (possibly on a different host
+            starting from a different topdir). 
+        """ 
+        chain = self.listchain()
+        topdir = self.config.topdir 
+        relpath = chain[0].fspath.relto(topdir)
+        if not relpath:
+            if chain[0].fspath == topdir:
+                relpath = "."
+            else:
+                raise ValueError("%r not relative to topdir %s" 
+                         %(chain[0].fspath, topdir))
+        return relpath, tuple([x.name for x in chain[1:]])
+
+    def _fromtrail(trail, config):
+        relpath, names = trail
+        fspath = config.topdir.join(relpath)
+        col = config.getfsnode(fspath)
+        return col._getitembynames(names)
+    _fromtrail = staticmethod(_fromtrail)
+
+    def _repr_failure_py(self, excinfo, outerr=None):
+        assert outerr is None, "XXX deprecated"
+        excinfo.traceback = self._prunetraceback(excinfo.traceback)
+        # XXX temporary hack: getrepr() should not take a 'style' argument
+        # at all; it should record all data in all cases, and the style
+        # should be parametrized in toterminal().
+        if self.config.option.tbstyle == "short":
+            style = "short"
+        else:
+            style = "long"
+        return excinfo.getrepr(funcargs=True, 
+                               showlocals=self.config.option.showlocals,
+                               style=style)
+
+    repr_failure = _repr_failure_py
+    shortfailurerepr = "F"
+
+class Collector(Node):
+    """ 
+        Collector instances create children through collect()
+        and thus iteratively build a tree.  attributes::
+
+        parent: attribute pointing to the parent collector
+                (or None if this is the root collector)
+        name:   basename of this collector object
+    """
+    Directory = configproperty('Directory')
+    Module = configproperty('Module')
+
+    def collect(self):
+        """ returns a list of children (items and collectors) 
+            for this collection node. 
+        """
+        raise NotImplementedError("abstract")
+
+    def collect_by_name(self, name):
+        """ return a child matching the given name, else None. """
+        for colitem in self._memocollect():
+            if colitem.name == name:
+                return colitem
+
+    def repr_failure(self, excinfo, outerr=None):
+        """ represent a failure. """
+        assert outerr is None, "XXX deprecated"
+        return self._repr_failure_py(excinfo)
+
+    def _memocollect(self):
+        """ internal helper method to cache results of calling collect(). """
+        return self._memoizedcall('_collected', self.collect)
+
+    # **********************************************************************
+    # DEPRECATED METHODS 
+    # **********************************************************************
+    
+    def _deprecated_collect(self):
+        # avoid recursion:
+        # collect -> _deprecated_collect -> custom run() ->
+        # super().run() -> collect
+        attrname = '_depcollectentered'
+        if hasattr(self, attrname):
+            return
+        setattr(self, attrname, True)
+        method = getattr(self.__class__, 'run', None)
+        if method is not None and method != Collector.run:
+            warnoldcollect(function=method)
+            names = self.run()
+            return [x for x in [self.join(name) for name in names] if x]
+
+    def run(self):
+        """ DEPRECATED: returns a list of names available from this collector.
+            You can return an empty list.  Callers of this method
+            must take care to catch exceptions properly.  
+        """
+        return [colitem.name for colitem in self._memocollect()]
+
+    def join(self, name): 
+        """  DEPRECATED: return a child collector or item for the given name.  
+             If the return value is None there is no such child. 
+        """
+        return self.collect_by_name(name)
+
+    def _prunetraceback(self, traceback):
+        if hasattr(self, 'fspath'):
+            path = self.fspath 
+            ntraceback = traceback.cut(path=self.fspath)
+            if ntraceback == traceback:
+                ntraceback = ntraceback.cut(excludepath=py._dir)
+            traceback = ntraceback.filter()
+        return traceback 
+
+class FSCollector(Collector): 
+    def __init__(self, fspath, parent=None):
+        fspath = py.path.local(fspath) 
+        super(FSCollector, self).__init__(fspath.basename, parent)
+        self.fspath = fspath 
+
+    def __getstate__(self):
+        if self.parent is None:
+            # the root node needs to pickle more context info 
+            topdir = self.config.topdir
+            relpath = self.fspath.relto(topdir)
+            if not relpath:
+                if self.fspath == topdir:
+                    relpath = "."
+                else:
+                    raise ValueError("%r not relative to topdir %s" 
+                            %(self.fspath, topdir))
+            return (self.name, self.config, relpath)
+        else:
+            return (self.name, self.parent)
+
+    def __setstate__(self, picklestate):
+        if len(picklestate) == 3:
+            # root node
+            name, config, relpath = picklestate
+            fspath = config.topdir.join(relpath)
+            fsnode = config.getfsnode(fspath)
+            self.__dict__.update(fsnode.__dict__)
+        else:
+            name, parent = picklestate
+            self.__init__(parent.fspath.join(name), parent=parent)
+
+class File(FSCollector):
+    """ base class for collecting tests from a file. """
+
+class Directory(FSCollector): 
+    def recfilter(self, path): 
+        if path.check(dir=1, dotfile=0):
+            return path.basename not in ('CVS', '_darcs', '{arch}')
+
+    def collect(self):
+        l = self._deprecated_collect() 
+        if l is not None:
+            return l 
+        l = []
+        for path in self.fspath.listdir(sort=True): 
+            res = self.consider(path)
+            if res is not None:
+                if isinstance(res, (list, tuple)):
+                    l.extend(res)
+                else:
+                    l.append(res)
+        return l
+
+    def _ignore(self, path):
+        ignore_paths = self.config.getconftest_pathlist("collect_ignore", path=path)
+        return ignore_paths and path in ignore_paths
+        # XXX more refined would be: 
+        if ignore_paths:
+            for p in ignore_paths:
+                if path == p or path.relto(p):
+                    return True
+
+    def consider(self, path):
+        if self._ignore(path):
+            return
+        if path.check(file=1):
+            res = self.consider_file(path)
+        elif path.check(dir=1):
+            res = self.consider_dir(path)
+        else:
+            res = None            
+        if isinstance(res, list):
+            # throw out identical results
+            l = []
+            for x in res:
+                if x not in l:
+                    assert x.parent == self, "wrong collection tree construction"
+                    l.append(x)
+            res = l 
+        return res
+
+    def consider_file(self, path):
+        return self.config.hook.pytest_collect_file(path=path, parent=self)
+
+    def consider_dir(self, path, usefilters=None):
+        if usefilters is not None:
+            py.log._apiwarn("0.99", "usefilters argument not needed")
+        return self.config.hook.pytest_collect_directory(
+            path=path, parent=self)
+
+class Item(Node): 
+    """ a basic test item. """
+    def _deprecated_testexecution(self):
+        if self.__class__.run != Item.run:
+            warnoldtestrun(function=self.run)
+        elif self.__class__.execute != Item.execute:
+            warnoldtestrun(function=self.execute)
+        else:
+            return False
+        self.run()
+        return True
+
+    def run(self):
+        """ deprecated, here because subclasses might call it. """
+        return self.execute(self.obj)
+
+    def execute(self, obj):
+        """ deprecated, here because subclasses might call it. """
+        return obj()
+
+    def reportinfo(self):
+        return self.fspath, None, ""
+        
+def warnoldcollect(function=None):
+    py.log._apiwarn("1.0", 
+        "implement collector.collect() instead of "
+        "collector.run() and collector.join()",
+        stacklevel=2, function=function)
+
+def warnoldtestrun(function=None):
+    py.log._apiwarn("1.0", 
+        "implement item.runtest() instead of "
+        "item.run() and item.execute()",
+        stacklevel=2, function=function)

Added: pypy/trunk/py/impl/test/compat.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/compat.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,58 @@
+import py
+
+from py.test.collect import Function
+
+class TestCaseUnit(Function):
+    """ compatibility Unit executor for TestCase methods
+        honouring setUp and tearDown semantics.
+    """
+    def runtest(self, _deprecated=None):
+        boundmethod = self.obj 
+        instance = py.builtin._getimself(boundmethod)
+        instance.setUp()
+        try:
+            boundmethod()
+        finally:
+            instance.tearDown()
+
+class TestCase(object):
+    """compatibility class of unittest's TestCase. """
+    Function = TestCaseUnit
+
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        pass
+
+    def fail(self, msg=None):
+        """ fail immediate with given message. """
+        py.test.fail(msg)
+
+    def assertRaises(self, excclass, func, *args, **kwargs):
+        py.test.raises(excclass, func, *args, **kwargs)
+    failUnlessRaises = assertRaises
+
+    # dynamically construct (redundant) methods
+    aliasmap = [
+        ('x',   'not x', 'assert_, failUnless'),
+        ('x',   'x',     'failIf'),
+        ('x,y', 'x!=y',  'failUnlessEqual,assertEqual, assertEquals'),
+        ('x,y', 'x==y',  'failIfEqual,assertNotEqual, assertNotEquals'),
+        ]
+    items = []
+    for sig, expr, names in aliasmap:
+        names = map(str.strip, names.split(','))
+        sigsubst = expr.replace('y', '%s').replace('x', '%s')
+        for name in names:
+            items.append("""
+                def %(name)s(self, %(sig)s, msg=""):
+                    __tracebackhide__ = True
+                    if %(expr)s:
+                        py.test.fail(msg=msg + (%(sigsubst)r %% (%(sig)s)))
+            """ % locals() )
+
+    source = "".join(items)
+    exec(py.code.Source(source).compile())
+
+__all__ = ['TestCase']

Added: pypy/trunk/py/impl/test/config.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/config.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,307 @@
+import py, os
+from py.impl.test.conftesthandle import Conftest
+
+from py.impl.test import parseopt
+
+def ensuretemp(string, dir=1): 
+    """ return temporary directory path with
+        the given string as the trailing part. 
+    """ 
+    return py.test.config.ensuretemp(string, dir=dir)
+  
+class CmdOptions(object):
+    """ pure container instance for holding cmdline options 
+        as attributes. 
+    """
+    def __repr__(self):
+        return "<CmdOptions %r>" %(self.__dict__,)
+
+class Error(Exception):
+    """ Test Configuration Error. """
+
+class Config(object): 
+    """ test configuration object, provides access to config valueso, 
+        the pluginmanager and plugin api. 
+    """
+    Option = py.std.optparse.Option 
+    Error = Error
+    basetemp = None
+    _sessionclass = None
+
+    def __init__(self, pluginmanager=None, topdir=None): 
+        self.option = CmdOptions()
+        self.topdir = topdir
+        self._parser = parseopt.Parser(
+            usage="usage: %prog [options] [file_or_dir] [file_or_dir] [...]",
+            processopt=self._processopt,
+        )
+        if pluginmanager is None:
+            pluginmanager = py.test._PluginManager()
+        assert isinstance(pluginmanager, py.test._PluginManager)
+        self.pluginmanager = pluginmanager
+        self._conftest = Conftest(onimport=self._onimportconftest)
+        self.hook = pluginmanager.hook
+
+    def _onimportconftest(self, conftestmodule):
+        self.trace("loaded conftestmodule %r" %(conftestmodule,))
+        self.pluginmanager.consider_conftest(conftestmodule)
+
+    def trace(self, msg):
+        if getattr(self.option, 'traceconfig', None):
+            self.hook.pytest_trace(category="config", msg=msg)
+
+    def _processopt(self, opt):
+        if hasattr(opt, 'default') and opt.dest:
+            val = os.environ.get("PYTEST_OPTION_" + opt.dest.upper(), None)
+            if val is not None:
+                if opt.type == "int":
+                    val = int(val)
+                elif opt.type == "long":
+                    val = long(val)
+                elif opt.type == "float":
+                    val = float(val)
+                elif not opt.type and opt.action in ("store_true", "store_false"):
+                    val = eval(val)
+                opt.default = val 
+            else:
+                name = "option_" + opt.dest
+                try:
+                    opt.default = self._conftest.rget(name)
+                except (ValueError, KeyError):
+                    pass
+            if not hasattr(self.option, opt.dest):
+                setattr(self.option, opt.dest, opt.default)
+
+    def _preparse(self, args):
+        self._conftest.setinitial(args) 
+        self.pluginmanager.consider_preparse(args)
+        self.pluginmanager.consider_env()
+        self.pluginmanager.do_addoption(self._parser)
+
+    def parse(self, args): 
+        """ parse cmdline arguments into this config object. 
+            Note that this can only be called once per testing process. 
+        """ 
+        assert not hasattr(self, 'args'), (
+                "can only parse cmdline args at most once per Config object")
+        self._preparse(args)
+        args = self._parser.parse_setoption(args, self.option)
+        if not args:
+            args.append(py.std.os.getcwd())
+        self.topdir = gettopdir(args)
+        self.args = [py.path.local(x) for x in args]
+
+    # config objects are usually pickled across system
+    # barriers but they contain filesystem paths. 
+    # upon getstate/setstate we take care to do everything
+    # relative to "topdir". 
+    def __getstate__(self):
+        l = []
+        for path in self.args:
+            path = py.path.local(path)
+            l.append(path.relto(self.topdir)) 
+        return l, self.option
+
+    def __setstate__(self, repr):
+        # warning global side effects:
+        # * registering to py lib plugins 
+        # * setting py.test.config 
+        self.__init__(
+            pluginmanager=py.test._PluginManager(py._com.comregistry),
+            topdir=py.path.local(),
+        )
+        # we have to set py.test.config because preparse()
+        # might load conftest files which have
+        # py.test.config.addoptions() lines in them 
+        py.test.config = self 
+        args, cmdlineopts = repr 
+        args = [self.topdir.join(x) for x in args]
+        self.option = cmdlineopts
+        self._preparse(args)
+        self.args = args 
+
+    def ensuretemp(self, string, dir=True):
+        return self.getbasetemp().ensure(string, dir=dir) 
+
+    def getbasetemp(self):
+        if self.basetemp is None:
+            basetemp = self.option.basetemp 
+            if basetemp:
+                basetemp = py.path.local(basetemp)
+                if not basetemp.check(dir=1):
+                    basetemp.mkdir()
+            else:
+                basetemp = py.path.local.make_numbered_dir(prefix='pytest-')
+            self.basetemp = basetemp
+        return self.basetemp 
+
+    def mktemp(self, basename, numbered=False):
+        basetemp = self.getbasetemp()
+        if not numbered:
+            return basetemp.mkdir(basename)
+        else:
+            return py.path.local.make_numbered_dir(prefix=basename + "-", 
+                keep=0, rootdir=basetemp, lock_timeout=None)
+
+    def getcolitems(self):
+        return [self.getfsnode(arg) for arg in self.args]
+
+    def getfsnode(self, path):
+        path = py.path.local(path)
+        if not path.check():
+            raise self.Error("file not found: %s" %(path,))
+        # we want our possibly custom collection tree to start at pkgroot 
+        pkgpath = path.pypkgpath()
+        if pkgpath is None:
+            pkgpath = path.check(file=1) and path.dirpath() or path
+        Dir = self._conftest.rget("Directory", pkgpath)
+        col = Dir(pkgpath)
+        col.config = self 
+        return col._getfsnode(path)
+
+    def getconftest_pathlist(self, name, path=None):
+        """ return a matching value, which needs to be sequence
+            of filenames that will be returned as a list of Path
+            objects (they can be relative to the location 
+            where they were found).
+        """
+        try:
+            mod, relroots = self._conftest.rget_with_confmod(name, path)
+        except KeyError:
+            return None
+        modpath = py.path.local(mod.__file__).dirpath()
+        l = []
+        for relroot in relroots:
+            relroot = relroot.replace("/", py.path.local.sep)
+            l.append(modpath.join(relroot, abs=True))
+        return l 
+             
+    def addoptions(self, groupname, *specs): 
+        """ add a named group of options to the current testing session. 
+            This function gets invoked during testing session initialization. 
+        """ 
+        py.log._apiwarn("1.0", "define plugins to add options", stacklevel=2)
+        group = self._parser.addgroup(groupname)
+        for opt in specs:
+            group._addoption_instance(opt)
+        return self.option 
+
+    def addoption(self, *optnames, **attrs):
+        return self._parser.addoption(*optnames, **attrs)
+
+    def getvalueorskip(self, name, path=None): 
+        """ return getvalue() or call py.test.skip if no value exists. """
+        try:
+            val = self.getvalue(name, path)
+            if val is None:
+                raise KeyError(name)
+            return val
+        except KeyError:
+            py.test.skip("no %r value found" %(name,))
+
+    def getvalue(self, name, path=None): 
+        """ return 'name' value looked up from the 'options'
+            and then from the first conftest file found up 
+            the path (including the path itself). 
+            if path is None, lookup the value in the initial
+            conftest modules found during command line parsing. 
+        """
+        try:
+            return getattr(self.option, name)
+        except AttributeError:
+            return self._conftest.rget(name, path)
+
+    def setsessionclass(self, cls):
+        if self._sessionclass is not None:
+            raise ValueError("sessionclass already set to: %r" %(
+                self._sessionclass))
+        self._sessionclass = cls
+
+    def initsession(self):
+        """ return an initialized session object. """
+        cls = self._sessionclass 
+        if cls is None:
+            from py.impl.test.session import Session
+            cls = Session
+        session = cls(self)
+        self.trace("instantiated session %r" % session)
+        return session
+
+    def _reparse(self, args):
+        """ this is used from tests that want to re-invoke parse(). """
+        #assert args # XXX should not be empty
+        global config_per_process
+        oldconfig = py.test.config
+        try:
+            config_per_process = py.test.config = Config()
+            config_per_process.basetemp = self.mktemp("reparse", numbered=True)
+            config_per_process.parse(args) 
+            return config_per_process
+        finally: 
+            config_per_process = py.test.config = oldconfig 
+
+    def getxspecs(self):
+        xspeclist = []
+        for xspec in self.getvalue("tx"):
+            i = xspec.find("*")
+            try:
+                num = int(xspec[:i])
+            except ValueError:
+                xspeclist.append(xspec)
+            else:
+                xspeclist.extend([xspec[i+1:]] * num)
+        if not xspeclist:
+            raise self.Error("MISSING test execution (tx) nodes: please specify --tx")
+        import execnet
+        return [execnet.XSpec(x) for x in xspeclist]
+
+    def getrsyncdirs(self):
+        config = self
+        roots = config.option.rsyncdir
+        conftestroots = config.getconftest_pathlist("rsyncdirs")
+        if conftestroots:
+            roots.extend(conftestroots)
+        pydirs = [x.realpath() for x in py._pydirs]
+        roots = [py.path.local(root) for root in roots]
+        for root in roots:
+            if not root.check():
+                raise config.Error("rsyncdir doesn't exist: %r" %(root,))
+            if pydirs is not None and root.basename in ("py", "_py"):
+                pydirs.remove(root) # otherwise it's a conflict
+        roots.extend(pydirs)
+        return roots
+
+#
+# helpers
+#
+
+def checkmarshal(name, value):
+    try:
+        py.std.marshal.dumps(value)
+    except ValueError:
+        raise ValueError("%s=%r is not marshallable" %(name, value))
+
+def gettopdir(args): 
+    """ return the top directory for the given paths.
+        if the common base dir resides in a python package 
+        parent directory of the root package is returned. 
+    """
+    args = [py.path.local(arg) for arg in args]
+    p = args and args[0] or None
+    for x in args[1:]:
+        p = p.common(x)
+    assert p, "cannot determine common basedir of %s" %(args,)
+    pkgdir = p.pypkgpath()
+    if pkgdir is None:
+        if p.check(file=1):
+            p = p.dirpath()
+        return p
+    else:
+        return pkgdir.dirpath()
+
+
+# this is the one per-process instance of py.test configuration 
+config_per_process = Config(
+    pluginmanager=py.test._PluginManager(py._com.comregistry)
+)
+

Added: pypy/trunk/py/impl/test/conftesthandle.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/conftesthandle.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,82 @@
+import py
+defaultconftestpath = py.path.local(__file__).dirpath("defaultconftest.py")
+
+class Conftest(object):
+    """ the single place for accessing values and interacting 
+        towards conftest modules from py.test objects. 
+
+        Note that triggering Conftest instances to import 
+        conftest.py files may result in added cmdline options. 
+        XXX
+    """ 
+    def __init__(self, path=None, onimport=None):
+        self._path2confmods = {}
+        self._onimport = onimport
+        if path is not None:
+            self.setinitial([path])
+
+    def setinitial(self, args):
+        """ try to find a first anchor path for looking up global values
+            from conftests. This function is usually called _before_  
+            argument parsing.  conftest files may add command line options
+            and we thus have no completely safe way of determining
+            which parts of the arguments are actually related to options
+            and which are file system paths.  We just try here to get 
+            bootstrapped ... 
+        """
+        current = py.path.local()
+        for arg in args + [current]:
+            anchor = current.join(arg, abs=1)
+            if anchor.check(): # we found some file object 
+                self._path2confmods[None] = self.getconftestmodules(anchor)
+                break
+        else:
+            assert 0, "no root of filesystem?"
+
+    def getconftestmodules(self, path):
+        """ return a list of imported conftest modules for the given path.  """ 
+        try:
+            clist = self._path2confmods[path]
+        except KeyError:
+            if path is None:
+                raise ValueError("missing default conftest.")
+            dp = path.dirpath()
+            if dp == path: 
+                return [self.importconftest(defaultconftestpath)]
+            clist = self.getconftestmodules(dp)
+            conftestpath = path.join("conftest.py")
+            if conftestpath.check(file=1):
+                clist.append(self.importconftest(conftestpath))
+            self._path2confmods[path] = clist
+        # be defensive: avoid changes from caller side to
+        # affect us by always returning a copy of the actual list 
+        return clist[:]
+
+    def rget(self, name, path=None):
+        mod, value = self.rget_with_confmod(name, path)
+        return value
+
+    def rget_with_confmod(self, name, path=None):
+        modules = self.getconftestmodules(path)
+        modules.reverse()
+        for mod in modules:
+            try:
+                return mod, getattr(mod, name)
+            except AttributeError:
+                continue
+        raise KeyError(name)
+
+    def importconftest(self, conftestpath):
+        # Using caching here looks redundant since ultimately
+        # sys.modules caches already 
+        assert conftestpath.check(), conftestpath
+        if not conftestpath.dirpath('__init__.py').check(file=1): 
+            # HACK: we don't want any "globally" imported conftest.py, 
+            #       prone to conflicts and subtle problems 
+            modname = str(conftestpath).replace('.', conftestpath.sep)
+            mod = conftestpath.pyimport(modname=modname)
+        else:
+            mod = conftestpath.pyimport()
+        if self._onimport:
+            self._onimport(mod)
+        return mod

Added: pypy/trunk/py/impl/test/defaultconftest.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/defaultconftest.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,14 @@
+import py
+
+Module = py.test.collect.Module
+Directory = py.test.collect.Directory
+File = py.test.collect.File
+
+# python collectors 
+Class = py.test.collect.Class
+Generator = py.test.collect.Generator
+Function = py.test.collect.Function
+Instance = py.test.collect.Instance
+
+pytest_plugins = "default runner capture terminal mark skipping tmpdir monkeypatch recwarn pdb pastebin unittest helpconfig nose assertion".split()
+

Added: pypy/trunk/py/impl/test/dist/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/dist/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1 @@
+#

Added: pypy/trunk/py/impl/test/dist/dsession.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/dist/dsession.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,280 @@
+""" 
+
+    EXPERIMENTAL dsession session  (for dist/non-dist unification)
+
+"""
+
+import py
+from py.impl.test.session import Session
+from py.impl.test import outcome 
+from py.impl.test.dist.nodemanage import NodeManager
+queue = py.builtin._tryimport('queue', 'Queue')
+
+debug_file = None # open('/tmp/loop.log', 'w')
+def debug(*args):
+    if debug_file is not None:
+        s = " ".join(map(str, args))
+        debug_file.write(s+"\n")
+        debug_file.flush()
+
+class LoopState(object):
+    def __init__(self, dsession, colitems):
+        self.dsession = dsession
+        self.colitems = colitems
+        self.exitstatus = None 
+        # loopstate.dowork is False after reschedule events 
+        # because otherwise we might very busily loop 
+        # waiting for a host to become ready.  
+        self.dowork = True
+        self.shuttingdown = False
+        self.testsfailed = False
+
+    def __repr__(self):
+        return "<LoopState exitstatus=%r shuttingdown=%r len(colitems)=%d>" % (
+            self.exitstatus, self.shuttingdown, len(self.colitems))
+
+    def pytest_runtest_logreport(self, report):
+        if report.item in self.dsession.item2nodes:
+            if report.when != "teardown": # otherwise we already managed it
+                self.dsession.removeitem(report.item, report.node)
+        if report.failed:
+            self.testsfailed = True
+
+    def pytest_collectreport(self, report):
+        if report.passed:
+            self.colitems.extend(report.result)
+
+    def pytest_testnodeready(self, node):
+        self.dsession.addnode(node)
+
+    def pytest_testnodedown(self, node, error=None):
+        pending = self.dsession.removenode(node)
+        if pending:
+            if error:
+                crashitem = pending[0]
+                debug("determined crashitem", crashitem)
+                self.dsession.handle_crashitem(crashitem, node)
+                # XXX recovery handling for "each"? 
+                # currently pending items are not retried 
+                if self.dsession.config.option.dist == "load":
+                    self.colitems.extend(pending[1:])
+
+    def pytest_rescheduleitems(self, items):
+        self.colitems.extend(items)
+        self.dowork = False # avoid busywait
+
+class DSession(Session):
+    """ 
+        Session drives the collection and running of tests
+        and generates test events for reporters. 
+    """ 
+    MAXITEMSPERHOST = 15
+    
+    def __init__(self, config):
+        self.queue = queue.Queue()
+        self.node2pending = {}
+        self.item2nodes = {}
+        super(DSession, self).__init__(config=config)
+
+    #def pytest_configure(self, __multicall__, config):
+    #    __multicall__.execute()
+    #    try:
+    #        config.getxspecs()
+    #    except config.Error:
+    #        print
+    #        raise config.Error("dist mode %r needs test execution environments, "
+    #                           "none found." %(config.option.dist))
+
+    def main(self, colitems=None):
+        colitems = self.getinitialitems(colitems)
+        self.sessionstarts()
+        self.setup()
+        exitstatus = self.loop(colitems)
+        self.teardown()
+        self.sessionfinishes(exitstatus=exitstatus) 
+        return exitstatus
+
+    def loop_once(self, loopstate):
+        if loopstate.shuttingdown:
+            return self.loop_once_shutdown(loopstate)
+        colitems = loopstate.colitems 
+        if loopstate.dowork and colitems:
+            self.triggertesting(loopstate.colitems) 
+            colitems[:] = []
+        # we use a timeout here so that control-C gets through 
+        while 1:
+            try:
+                eventcall = self.queue.get(timeout=2.0)
+                break
+            except queue.Empty:
+                continue
+        loopstate.dowork = True 
+          
+        callname, args, kwargs = eventcall
+        if callname is not None:
+            call = getattr(self.config.hook, callname)
+            assert not args
+            call(**kwargs)
+
+        # termination conditions
+        if ((loopstate.testsfailed and self.config.option.exitfirst) or 
+            (not self.item2nodes and not colitems and not self.queue.qsize())):
+            self.triggershutdown()
+            loopstate.shuttingdown = True
+        elif not self.node2pending:
+            loopstate.exitstatus = outcome.EXIT_NOHOSTS
+           
+    def loop_once_shutdown(self, loopstate):
+        # once we are in shutdown mode we dont send 
+        # events other than HostDown upstream 
+        eventname, args, kwargs = self.queue.get()
+        if eventname == "pytest_testnodedown":
+            self.config.hook.pytest_testnodedown(**kwargs)
+            self.removenode(kwargs['node'])
+        elif eventname == "pytest_runtest_logreport":
+            # might be some teardown report
+            self.config.hook.pytest_runtest_logreport(**kwargs)
+        if not self.node2pending:
+            # finished
+            if loopstate.testsfailed:
+                loopstate.exitstatus = outcome.EXIT_TESTSFAILED
+            else:
+                loopstate.exitstatus = outcome.EXIT_OK
+        #self.config.pluginmanager.unregister(loopstate)
+
+    def _initloopstate(self, colitems):
+        loopstate = LoopState(self, colitems)
+        self.config.pluginmanager.register(loopstate)
+        return loopstate
+
+    def loop(self, colitems):
+        try:
+            loopstate = self._initloopstate(colitems)
+            loopstate.dowork = False # first receive at least one HostUp events
+            while 1:
+                self.loop_once(loopstate)
+                if loopstate.exitstatus is not None:
+                    exitstatus = loopstate.exitstatus
+                    break 
+        except KeyboardInterrupt:
+            excinfo = py.code.ExceptionInfo()
+            self.config.hook.pytest_keyboard_interrupt(excinfo=excinfo)
+            exitstatus = outcome.EXIT_INTERRUPTED
+        except:
+            self.config.pluginmanager.notify_exception()
+            exitstatus = outcome.EXIT_INTERNALERROR
+        self.config.pluginmanager.unregister(loopstate)
+        if exitstatus == 0 and self._testsfailed:
+            exitstatus = outcome.EXIT_TESTSFAILED
+        return exitstatus
+
+    def triggershutdown(self):
+        for node in self.node2pending:
+            node.shutdown()
+
+    def addnode(self, node):
+        assert node not in self.node2pending
+        self.node2pending[node] = []
+
+    def removenode(self, node):
+        try:
+            pending = self.node2pending.pop(node)
+        except KeyError:
+            # this happens if we didn't receive a testnodeready event yet
+            return []
+        for item in pending:
+            l = self.item2nodes[item]
+            l.remove(node)
+            if not l:
+                del self.item2nodes[item]
+        return pending
+
+    def triggertesting(self, colitems):
+        colitems = self.filteritems(colitems)
+        senditems = []
+        for next in colitems:
+            if isinstance(next, py.test.collect.Item):
+                senditems.append(next)
+            else:
+                self.config.hook.pytest_collectstart(collector=next)
+                colrep = self.config.hook.pytest_make_collect_report(collector=next)
+                self.queueevent("pytest_collectreport", report=colrep)
+        if self.config.option.dist == "each":
+            self.senditems_each(senditems)
+        else:
+            # XXX assert self.config.option.dist == "load"
+            self.senditems_load(senditems)
+
+    def queueevent(self, eventname, **kwargs):
+        self.queue.put((eventname, (), kwargs)) 
+
+    def senditems_each(self, tosend):
+        if not tosend:
+            return 
+        room = self.MAXITEMSPERHOST
+        for node, pending in self.node2pending.items():
+            room = min(self.MAXITEMSPERHOST - len(pending), room)
+        sending = tosend[:room]
+        for node, pending in self.node2pending.items():
+            node.sendlist(sending)
+            pending.extend(sending)
+            for item in sending:
+                nodes = self.item2nodes.setdefault(item, [])
+                assert node not in nodes
+                nodes.append(node)
+                self.config.hook.pytest_itemstart(item=item, node=node)
+        tosend[:] = tosend[room:]  # update inplace
+        if tosend:
+            # we have some left, give it to the main loop
+            self.queueevent("pytest_rescheduleitems", items=tosend)
+
+    def senditems_load(self, tosend):
+        if not tosend:
+            return 
+        for node, pending in self.node2pending.items():
+            room = self.MAXITEMSPERHOST - len(pending)
+            if room > 0:
+                sending = tosend[:room]
+                node.sendlist(sending)
+                for item in sending:
+                    #assert item not in self.item2node, (
+                    #    "sending same item %r to multiple "
+                    #    "not implemented" %(item,))
+                    self.item2nodes.setdefault(item, []).append(node)
+                    self.config.hook.pytest_itemstart(item=item, node=node)
+                pending.extend(sending)
+                tosend[:] = tosend[room:]  # update inplace
+                if not tosend:
+                    break
+        if tosend:
+            # we have some left, give it to the main loop
+            self.queueevent("pytest_rescheduleitems", items=tosend)
+
+    def removeitem(self, item, node):
+        if item not in self.item2nodes:
+            raise AssertionError(item, self.item2nodes)
+        nodes = self.item2nodes[item]
+        if node in nodes: # the node might have gone down already
+            nodes.remove(node)
+        if not nodes:
+            del self.item2nodes[item]
+        pending = self.node2pending[node]
+        pending.remove(item)
+
+    def handle_crashitem(self, item, node):
+        runner = item.config.pluginmanager.getplugin("runner") 
+        info = "!!! Node %r crashed during running of test %r" %(node, item)
+        rep = runner.ItemTestReport(item=item, excinfo=info, when="???")
+        rep.node = node
+        self.config.hook.pytest_runtest_logreport(report=rep)
+
+    def setup(self):
+        """ setup any neccessary resources ahead of the test run. """
+        self.nodemanager = NodeManager(self.config)
+        self.nodemanager.setup_nodes(putevent=self.queue.put)
+        if self.config.option.dist == "each":
+            self.nodemanager.wait_nodesready(5.0)
+
+    def teardown(self):
+        """ teardown any resources after a test run. """ 
+        self.nodemanager.teardown_nodes()

Added: pypy/trunk/py/impl/test/dist/gwmanage.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/dist/gwmanage.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,124 @@
+"""
+    instantiating, managing and rsyncing to test hosts
+"""
+
+import py
+import sys, os
+import execnet
+from execnet.gateway_base import RemoteError
+
+class GatewayManager:
+    RemoteError = RemoteError
+    def __init__(self, specs, hook, defaultchdir="pyexecnetcache"):
+        self.gateways = []
+        self.specs = []
+        self.hook = hook
+        for spec in specs:
+            if not isinstance(spec, execnet.XSpec):
+                spec = execnet.XSpec(spec)
+            if not spec.chdir and not spec.popen:
+                spec.chdir = defaultchdir
+            self.specs.append(spec)
+
+    def makegateways(self):
+        assert not self.gateways
+        for spec in self.specs:
+            gw = execnet.makegateway(spec)
+            self.gateways.append(gw)
+            gw.id = "[%s]" % len(self.gateways)
+            self.hook.pytest_gwmanage_newgateway(
+                gateway=gw, platinfo=gw._rinfo())
+
+    def getgateways(self, remote=True, inplacelocal=True):
+        if not self.gateways and self.specs:
+            self.makegateways()
+        l = []
+        for gw in self.gateways:
+            if gw.spec._samefilesystem():
+                if inplacelocal:
+                    l.append(gw)
+            else:
+                if remote:
+                    l.append(gw)
+        return execnet.MultiGateway(gateways=l)
+
+    def multi_exec(self, source, inplacelocal=True):
+        """ remote execute code on all gateways. 
+            @param inplacelocal=False: don't send code to inplacelocal hosts. 
+        """
+        multigw = self.getgateways(inplacelocal=inplacelocal)
+        return multigw.remote_exec(source)
+
+    def multi_chdir(self, basename, inplacelocal=True):
+        """ perform a remote chdir to the given path, may be relative. 
+            @param inplacelocal=False: don't send code to inplacelocal hosts. 
+        """ 
+        self.multi_exec("import os ; os.chdir(%r)" % basename, 
+                        inplacelocal=inplacelocal).waitclose()
+
+    def rsync(self, source, notify=None, verbose=False, ignores=None):
+        """ perform rsync to all remote hosts. 
+        """ 
+        rsync = HostRSync(source, verbose=verbose, ignores=ignores)
+        seen = py.builtin.set()
+        gateways = []
+        for gateway in self.gateways:
+            spec = gateway.spec
+            if not spec._samefilesystem():
+                if spec not in seen:
+                    def finished():
+                        if notify:
+                            notify("rsyncrootready", spec, source)
+                    rsync.add_target_host(gateway, finished=finished)
+                    seen.add(spec)
+                    gateways.append(gateway)
+        if seen:
+            self.hook.pytest_gwmanage_rsyncstart(
+                source=source, 
+                gateways=gateways, 
+            )
+            rsync.send()
+            self.hook.pytest_gwmanage_rsyncfinish(
+                source=source, 
+                gateways=gateways, 
+            )
+
+    def exit(self):
+        while self.gateways:
+            gw = self.gateways.pop()
+            gw.exit()
+
+class HostRSync(execnet.RSync):
+    """ RSyncer that filters out common files 
+    """
+    def __init__(self, sourcedir, *args, **kwargs):
+        self._synced = {}
+        ignores= None
+        if 'ignores' in kwargs:
+            ignores = kwargs.pop('ignores')
+        self._ignores = ignores or []
+        super(HostRSync, self).__init__(sourcedir=sourcedir, **kwargs)
+
+    def filter(self, path):
+        path = py.path.local(path)
+        if not path.ext in ('.pyc', '.pyo'):
+            if not path.basename.endswith('~'): 
+                if path.check(dotfile=0):
+                    for x in self._ignores:
+                        if path == x:
+                            break
+                    else:
+                        return True
+
+    def add_target_host(self, gateway, finished=None):
+        remotepath = os.path.basename(self._sourcedir)
+        super(HostRSync, self).add_target(gateway, remotepath, 
+                                          finishedcallback=finished,
+                                          delete=True,)
+
+    def _report_send_file(self, gateway, modified_rel_path):
+        if self._verbose:
+            path = os.path.basename(self._sourcedir) + "/" + modified_rel_path
+            remotepath = gateway.spec.chdir
+            py.builtin.print_('%s:%s <= %s' %
+                              (gateway.spec, remotepath, path))

Added: pypy/trunk/py/impl/test/dist/mypickle.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/dist/mypickle.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,191 @@
+"""
+
+   Pickling support for two processes that want to exchange 
+   *immutable* object instances.  Immutable in the sense 
+   that the receiving side of an object can modify its 
+   copy but when it sends it back the original sending 
+   side will continue to see its unmodified version
+   (and no actual state will go over the wire).
+
+   This module also implements an experimental 
+   execnet pickling channel using this idea. 
+
+"""
+
+import py
+import sys, os, struct
+#debug = open("log-mypickle-%d" % os.getpid(), 'w')
+
+if sys.version_info >= (3,0):
+    makekey = lambda x: x
+    fromkey = lambda x: x 
+    from pickle import _Pickler as Pickler
+    from pickle import _Unpickler as Unpickler
+else:
+    makekey = str
+    fromkey = int
+    from pickle import Pickler, Unpickler
+
+
+class MyPickler(Pickler):
+    """ Pickler with a custom memoize()
+        to take care of unique ID creation. 
+        See the usage in ImmutablePickler
+        XXX we could probably extend Pickler 
+            and Unpickler classes to directly
+            update the other'S memos. 
+    """
+    def __init__(self, file, protocol, uneven):
+        Pickler.__init__(self, file, protocol)
+        self.uneven = uneven
+        
+    def memoize(self, obj):
+        if self.fast:
+            return
+        assert id(obj) not in self.memo
+        memo_len = len(self.memo)
+        key = memo_len * 2 + self.uneven
+        self.write(self.put(key))
+        self.memo[id(obj)] = key, obj
+
+    #if sys.version_info < (3,0):
+    #    def save_string(self, obj, pack=struct.pack):
+    #        obj = unicode(obj)
+    #        self.save_unicode(obj, pack=pack)
+    #    Pickler.dispatch[str] = save_string 
+
+class ImmutablePickler:
+    def __init__(self, uneven, protocol=0):
+        """ ImmutablePicklers are instantiated in Pairs. 
+            The two sides need to create unique IDs
+            while pickling their objects.  This is
+            done by using either even or uneven 
+            numbers, depending on the instantiation
+            parameter.
+        """
+        self._picklememo = {}
+        self._unpicklememo = {}
+        self._protocol = protocol
+        self.uneven = uneven and 1 or 0
+
+    def selfmemoize(self, obj):
+        # this is for feeding objects to ourselfes
+        # which be the case e.g. if you want to pickle 
+        # from a forked process back to the original 
+        f = py.io.BytesIO()
+        pickler = MyPickler(f, self._protocol, uneven=self.uneven)
+        pickler.memo = self._picklememo
+        pickler.memoize(obj)
+        self._updateunpicklememo()
+
+    def dumps(self, obj):
+        f = py.io.BytesIO()
+        pickler = MyPickler(f, self._protocol, uneven=self.uneven)
+        pickler.memo = self._picklememo
+        pickler.dump(obj)
+        if obj is not None:
+            self._updateunpicklememo()
+        #print >>debug, "dumped", obj 
+        #print >>debug, "picklememo", self._picklememo
+        return f.getvalue()
+
+    def loads(self, string):
+        f = py.io.BytesIO(string)
+        unpickler = Unpickler(f)
+        unpickler.memo = self._unpicklememo
+        res = unpickler.load()
+        self._updatepicklememo()
+        #print >>debug, "loaded", res
+        #print >>debug, "unpicklememo", self._unpicklememo
+        return res
+
+    def _updatepicklememo(self):
+        for x, obj in self._unpicklememo.items():
+            self._picklememo[id(obj)] = (fromkey(x), obj)
+
+    def _updateunpicklememo(self):
+        for key,obj in self._picklememo.values():
+            key = makekey(key) 
+            if key in self._unpicklememo:
+                assert self._unpicklememo[key] is obj
+            self._unpicklememo[key] = obj
+
+NO_ENDMARKER_WANTED = object()
+
+class UnpickleError(Exception):
+    """ Problems while unpickling. """
+    def __init__(self, formatted):
+        self.formatted = formatted
+        Exception.__init__(self, formatted)
+    def __str__(self):
+        return self.formatted
+
+class PickleChannel(object):
+    """ PickleChannels wrap execnet channels 
+        and allow to send/receive by using
+        "immutable pickling". 
+    """
+    _unpicklingerror = None
+    def __init__(self, channel):
+        self._channel = channel
+        # we use the fact that each side of a 
+        # gateway connection counts with uneven
+        # or even numbers depending on which 
+        # side it is (for the purpose of creating
+        # unique ids - which is what we need it here for)
+        uneven = channel.gateway._channelfactory.count % 2 
+        self._ipickle = ImmutablePickler(uneven=uneven) 
+        self.RemoteError = channel.RemoteError
+
+    def send(self, obj):
+        from execnet.gateway_base import Channel
+        if not isinstance(obj, Channel):
+            pickled_obj = self._ipickle.dumps(obj)
+            self._channel.send(pickled_obj)
+        else:
+            self._channel.send(obj)
+
+    def receive(self):
+        pickled_obj = self._channel.receive()
+        return self._unpickle(pickled_obj)
+
+    def _unpickle(self, pickled_obj):
+        if isinstance(pickled_obj, self._channel.__class__):
+            return pickled_obj
+        return self._ipickle.loads(pickled_obj)
+
+    def _getremoteerror(self):
+        return self._unpicklingerror or self._channel._getremoteerror()
+
+    def close(self):
+        return self._channel.close()
+
+    def isclosed(self):
+        return self._channel.isclosed()
+
+    def waitclose(self, timeout=None):
+        return self._channel.waitclose(timeout=timeout)
+
+    def setcallback(self, callback, endmarker=NO_ENDMARKER_WANTED):
+        if endmarker is NO_ENDMARKER_WANTED:
+            def unpickle_callback(pickled_obj):
+                obj = self._unpickle(pickled_obj)
+                callback(obj)
+            self._channel.setcallback(unpickle_callback)
+            return
+        uniqueendmarker = object()
+        def unpickle_callback(pickled_obj):
+            if pickled_obj is uniqueendmarker:
+                return callback(endmarker)
+            try:
+                obj = self._unpickle(pickled_obj)
+            except KeyboardInterrupt:
+                raise
+            except:
+                excinfo = py.code.ExceptionInfo()
+                formatted = str(excinfo.getrepr(showlocals=True,funcargs=True))
+                self._unpicklingerror = UnpickleError(formatted)
+                callback(endmarker)
+            else:
+                callback(obj)
+        self._channel.setcallback(unpickle_callback, uniqueendmarker)

Added: pypy/trunk/py/impl/test/dist/nodemanage.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/dist/nodemanage.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,81 @@
+import py
+import sys, os
+from py.impl.test.dist.txnode import TXNode
+from py.impl.test.dist.gwmanage import GatewayManager
+
+    
+class NodeManager(object):
+    def __init__(self, config, specs=None):
+        self.config = config 
+        if specs is None:
+            specs = self.config.getxspecs()
+        self.roots = self.config.getrsyncdirs()
+        self.gwmanager = GatewayManager(specs, config.hook)
+        self.nodes = []
+        self._nodesready = py.std.threading.Event()
+
+    def trace(self, msg):
+        self.config.hook.pytest_trace(category="nodemanage", msg=msg)
+
+    def config_getignores(self):
+        return self.config.getconftest_pathlist("rsyncignore")
+
+    def rsync_roots(self):
+        """ make sure that all remote gateways
+            have the same set of roots in their
+            current directory. 
+        """
+        self.makegateways()
+        options = {
+            'ignores': self.config_getignores(), 
+            'verbose': self.config.option.verbose,
+        }
+        if self.roots:
+            # send each rsync root
+            for root in self.roots:
+                self.gwmanager.rsync(root, **options)
+        else: 
+            XXX # do we want to care for situations without explicit rsyncdirs? 
+            # we transfer our topdir as the root
+            self.gwmanager.rsync(self.config.topdir, **options)
+            # and cd into it 
+            self.gwmanager.multi_chdir(self.config.topdir.basename, inplacelocal=False)
+
+    def makegateways(self):
+        # we change to the topdir sot that 
+        # PopenGateways will have their cwd 
+        # such that unpickling configs will 
+        # pick it up as the right topdir 
+        # (for other gateways this chdir is irrelevant)
+        self.trace("making gateways")
+        old = self.config.topdir.chdir()  
+        try:
+            self.gwmanager.makegateways()
+        finally:
+            old.chdir()
+
+    def setup_nodes(self, putevent):
+        self.rsync_roots()
+        self.trace("setting up nodes")
+        for gateway in self.gwmanager.gateways:
+            node = TXNode(gateway, self.config, putevent, slaveready=self._slaveready)
+            gateway.node = node  # to keep node alive 
+            self.trace("started node %r" % node)
+
+    def _slaveready(self, node):
+        #assert node.gateway == node.gateway
+        #assert node.gateway.node == node
+        self.nodes.append(node)
+        self.trace("%s slave node ready %r" % (node.gateway.id, node))
+        if len(self.nodes) == len(self.gwmanager.gateways):
+            self._nodesready.set()
+   
+    def wait_nodesready(self, timeout=None):
+        self._nodesready.wait(timeout)
+        if not self._nodesready.isSet():
+            raise IOError("nodes did not get ready for %r secs" % timeout)
+
+    def teardown_nodes(self):
+        # XXX do teardown nodes? 
+        self.gwmanager.exit()
+

Added: pypy/trunk/py/impl/test/dist/txnode.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/dist/txnode.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,153 @@
+"""
+    Manage setup, running and local representation of remote nodes/processes. 
+"""
+import py
+from py.impl.test.dist.mypickle import PickleChannel
+
+class TXNode(object):
+    """ Represents a Test Execution environment in the controlling process. 
+        - sets up a slave node through an execnet gateway 
+        - manages sending of test-items and receival of results and events
+        - creates events when the remote side crashes 
+    """
+    ENDMARK = -1
+
+    def __init__(self, gateway, config, putevent, slaveready=None):
+        self.config = config 
+        self.putevent = putevent 
+        self.gateway = gateway
+        self.channel = install_slave(gateway, config)
+        self._sendslaveready = slaveready
+        self.channel.setcallback(self.callback, endmarker=self.ENDMARK)
+        self._down = False
+
+    def __repr__(self):
+        id = self.gateway.id
+        status = self._down and 'true' or 'false'
+        return "<TXNode %r down=%s>" %(id, status)
+
+    def notify(self, eventname, *args, **kwargs):
+        assert not args
+        self.putevent((eventname, args, kwargs))
+      
+    def callback(self, eventcall):
+        """ this gets called for each object we receive from 
+            the other side and if the channel closes. 
+
+            Note that channel callbacks run in the receiver
+            thread of execnet gateways - we need to 
+            avoid raising exceptions or doing heavy work.
+        """
+        try:
+            if eventcall == self.ENDMARK:
+                err = self.channel._getremoteerror()
+                if not self._down:
+                    if not err:
+                        err = "Not properly terminated"
+                    self.notify("pytest_testnodedown", node=self, error=err)
+                    self._down = True
+                return
+            eventname, args, kwargs = eventcall 
+            if eventname == "slaveready":
+                if self._sendslaveready:
+                    self._sendslaveready(self)
+                self.notify("pytest_testnodeready", node=self)
+            elif eventname == "slavefinished":
+                self._down = True
+                self.notify("pytest_testnodedown", error=None, node=self)
+            elif eventname == "pytest_runtest_logreport":
+                rep = kwargs['report']
+                rep.node = self
+                self.notify("pytest_runtest_logreport", report=rep)
+            else:
+                self.notify(eventname, *args, **kwargs)
+        except KeyboardInterrupt: 
+            # should not land in receiver-thread
+            raise 
+        except:
+            excinfo = py.code.ExceptionInfo()
+            py.builtin.print_("!" * 20, excinfo)
+            self.config.pluginmanager.notify_exception(excinfo)
+
+    def send(self, item):
+        assert item is not None
+        self.channel.send(item)
+
+    def sendlist(self, itemlist):
+        self.channel.send(itemlist)
+
+    def shutdown(self):
+        self.channel.send(None)
+
+# setting up slave code 
+def install_slave(gateway, config):
+    channel = gateway.remote_exec(source="""
+        import os, sys 
+        sys.path.insert(0, os.getcwd()) 
+        from py.impl.test.dist.mypickle import PickleChannel
+        from py.impl.test.dist.txnode import SlaveNode
+        channel = PickleChannel(channel)
+        slavenode = SlaveNode(channel)
+        slavenode.run()
+    """)
+    channel = PickleChannel(channel)
+    basetemp = None
+    if gateway.spec.popen:
+        popenbase = config.ensuretemp("popen")
+        basetemp = py.path.local.make_numbered_dir(prefix="slave-", 
+            keep=0, rootdir=popenbase)
+        basetemp = str(basetemp)
+    channel.send((config, basetemp))
+    return channel
+
+class SlaveNode(object):
+    def __init__(self, channel):
+        self.channel = channel
+
+    def __repr__(self):
+        return "<%s channel=%s>" %(self.__class__.__name__, self.channel)
+
+    def sendevent(self, eventname, *args, **kwargs):
+        self.channel.send((eventname, args, kwargs))
+
+    def pytest_runtest_logreport(self, report):
+        self.sendevent("pytest_runtest_logreport", report=report)
+
+    def run(self):
+        channel = self.channel
+        self.config, basetemp = channel.receive()
+        if basetemp:
+            self.config.basetemp = py.path.local(basetemp)
+        self.config.pluginmanager.do_configure(self.config)
+        self.config.pluginmanager.register(self)
+        self.runner = self.config.pluginmanager.getplugin("pytest_runner")
+        self.sendevent("slaveready")
+        try:
+            while 1:
+                task = channel.receive()
+                if task is None: 
+                    self.sendevent("slavefinished")
+                    break
+                if isinstance(task, list):
+                    for item in task:
+                        self.run_single(item=item)
+                else:
+                    self.run_single(item=task)
+        except KeyboardInterrupt:
+            raise
+        except:
+            er = py.code.ExceptionInfo().getrepr(funcargs=True, showlocals=True)
+            self.sendevent("pytest_internalerror", excrepr=er)
+            raise
+
+    def run_single(self, item):
+        call = self.runner.CallInfo(item._checkcollectable, when='setup')
+        if call.excinfo:
+            # likely it is not collectable here because of
+            # platform/import-dependency induced skips 
+            # we fake a setup-error report with the obtained exception
+            # and do not care about capturing or non-runner hooks 
+            rep = self.runner.pytest_runtest_makereport(item=item, call=call)
+            self.pytest_runtest_logreport(rep)
+            return
+        item.config.hook.pytest_runtest_protocol(item=item) 

Added: pypy/trunk/py/impl/test/funcargs.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/funcargs.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,194 @@
+import py
+
+def getfuncargnames(function):
+    argnames = py.std.inspect.getargs(py.code.getrawcode(function))[0]
+    startindex = py.std.inspect.ismethod(function) and 1 or 0
+    defaults = getattr(function, 'func_defaults', 
+                       getattr(function, '__defaults__', None)) or ()
+    numdefaults = len(defaults)
+    if numdefaults:
+        return argnames[startindex:-numdefaults]
+    return argnames[startindex:]
+    
+def fillfuncargs(function):
+    """ fill missing funcargs. """ 
+    request = FuncargRequest(pyfuncitem=function)
+    request._fillfuncargs()
+
+
+_notexists = object()
+class CallSpec:
+    def __init__(self, funcargs, id, param):
+        self.funcargs = funcargs 
+        self.id = id
+        if param is not _notexists:
+            self.param = param 
+    def __repr__(self):
+        return "<CallSpec id=%r param=%r funcargs=%r>" %(
+            self.id, getattr(self, 'param', '?'), self.funcargs)
+
+class Metafunc:
+    def __init__(self, function, config=None, cls=None, module=None):
+        self.config = config
+        self.module = module 
+        self.function = function
+        self.funcargnames = getfuncargnames(function)
+        self.cls = cls
+        self.module = module
+        self._calls = []
+        self._ids = py.builtin.set()
+
+    def addcall(self, funcargs=None, id=_notexists, param=_notexists):
+        assert funcargs is None or isinstance(funcargs, dict)
+        if id is None:
+            raise ValueError("id=None not allowed") 
+        if id is _notexists:
+            id = len(self._calls)
+        id = str(id)
+        if id in self._ids:
+            raise ValueError("duplicate id %r" % id)
+        self._ids.add(id)
+        self._calls.append(CallSpec(funcargs, id, param))
+
+class FunctionCollector(py.test.collect.Collector):
+    def __init__(self, name, parent, calls):
+        super(FunctionCollector, self).__init__(name, parent)
+        self.calls = calls 
+        self.obj = getattr(self.parent.obj, name) 
+       
+    def collect(self):
+        l = []
+        for callspec in self.calls:
+            name = "%s[%s]" %(self.name, callspec.id)
+            function = self.parent.Function(name=name, parent=self, 
+                callspec=callspec, callobj=self.obj)
+            l.append(function)
+        return l
+
+    def reportinfo(self):
+        try:
+            return self._fslineno, self.name
+        except AttributeError:
+            pass        
+        fspath, lineno = py.code.getfslineno(self.obj)
+        self._fslineno = fspath, lineno
+        return fspath, lineno, self.name
+    
+
+class FuncargRequest:
+    _argprefix = "pytest_funcarg__"
+    _argname = None
+
+    class Error(LookupError):
+        """ error on performing funcarg request. """ 
+
+    def __init__(self, pyfuncitem):
+        self._pyfuncitem = pyfuncitem
+        self.function = pyfuncitem.obj
+        self.module = pyfuncitem.getparent(py.test.collect.Module).obj
+        clscol = pyfuncitem.getparent(py.test.collect.Class)
+        self.cls = clscol and clscol.obj or None
+        self.instance = py.builtin._getimself(self.function)
+        self.config = pyfuncitem.config
+        self.fspath = pyfuncitem.fspath
+        if hasattr(pyfuncitem, '_requestparam'):
+            self.param = pyfuncitem._requestparam 
+        self._plugins = self.config.pluginmanager.getplugins()
+        self._plugins.append(self.module)
+        if self.instance is not None:
+            self._plugins.append(self.instance)
+        self._funcargs  = self._pyfuncitem.funcargs.copy()
+        self._name2factory = {}
+        self._currentarg = None
+
+    def _fillfuncargs(self):
+        argnames = getfuncargnames(self.function)
+        if argnames:
+            assert not getattr(self._pyfuncitem, '_args', None), (
+                "yielded functions cannot have funcargs")
+        for argname in argnames:
+            if argname not in self._pyfuncitem.funcargs:
+                self._pyfuncitem.funcargs[argname] = self.getfuncargvalue(argname)
+
+    def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
+        """ cache and return result of calling setup().  
+
+        The requested argument name, the scope and the ``extrakey`` 
+        determine the cache key.  The scope also determines when 
+        teardown(result) will be called.  valid scopes are: 
+        scope == 'function': when the single test function run finishes. 
+        scope == 'module': when tests in a different module are run
+        scope == 'session': when tests of the session have run. 
+        """
+        if not hasattr(self.config, '_setupcache'):
+            self.config._setupcache = {} # XXX weakref? 
+        cachekey = (self._currentarg, self._getscopeitem(scope), extrakey)
+        cache = self.config._setupcache
+        try:
+            val = cache[cachekey]
+        except KeyError:
+            val = setup()
+            cache[cachekey] = val 
+            if teardown is not None:
+                def finalizer():
+                    del cache[cachekey]
+                    teardown(val)
+                self._addfinalizer(finalizer, scope=scope)
+        return val 
+
+    def getfuncargvalue(self, argname):
+        try:
+            return self._funcargs[argname]
+        except KeyError:
+            pass
+        if argname not in self._name2factory:
+            self._name2factory[argname] = self.config.pluginmanager.listattr(
+                    plugins=self._plugins, 
+                    attrname=self._argprefix + str(argname)
+            )
+        #else: we are called recursively  
+        if not self._name2factory[argname]:
+            self._raiselookupfailed(argname)
+        funcargfactory = self._name2factory[argname].pop()
+        oldarg = self._currentarg
+        self._currentarg = argname 
+        try:
+            self._funcargs[argname] = res = funcargfactory(request=self)
+        finally:
+            self._currentarg = oldarg
+        return res
+
+    def _getscopeitem(self, scope):
+        if scope == "function":
+            return self._pyfuncitem
+        elif scope == "module":
+            return self._pyfuncitem.getparent(py.test.collect.Module)
+        elif scope == "session":
+            return None
+        raise ValueError("unknown finalization scope %r" %(scope,))
+
+    def _addfinalizer(self, finalizer, scope):
+        colitem = self._getscopeitem(scope)
+        self.config._setupstate.addfinalizer(
+            finalizer=finalizer, colitem=colitem)
+
+    def addfinalizer(self, finalizer):
+        """ call the given finalizer after test function finished execution. """ 
+        self._addfinalizer(finalizer, scope="function") 
+
+    def __repr__(self):
+        return "<FuncargRequest for %r>" %(self._pyfuncitem)
+
+    def _raiselookupfailed(self, argname):
+        available = []
+        for plugin in self._plugins:
+            for name in vars(plugin):
+                if name.startswith(self._argprefix):
+                    name = name[len(self._argprefix):]
+                    if name not in available:
+                        available.append(name) 
+        fspath, lineno, msg = self._pyfuncitem.reportinfo()
+        line = "%s:%s" %(fspath, lineno)
+        msg = "funcargument %r not found for: %s" %(argname, line)
+        msg += "\n available funcargs: %s" %(", ".join(available),)
+        raise self.Error(msg)

Added: pypy/trunk/py/impl/test/looponfail/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/looponfail/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1 @@
+#

Added: pypy/trunk/py/impl/test/looponfail/remote.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/looponfail/remote.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,162 @@
+"""
+    LooponfailingSession and Helpers. 
+
+    NOTE that one really has to avoid loading and depending on 
+    application modules within the controlling process 
+    (the one that starts repeatedly test processes)
+    otherwise changes to source code can crash 
+    the controlling process which should never happen. 
+"""
+import py
+import sys
+import execnet
+from py.impl.test.session import Session
+from py.impl.test.dist.mypickle import PickleChannel
+from py.impl.test.looponfail import util
+
+class LooponfailingSession(Session):
+    def __init__(self, config):
+        super(LooponfailingSession, self).__init__(config=config)
+        self.rootdirs = [self.config.topdir] # xxx dist_rsync_roots? 
+        self.statrecorder = util.StatRecorder(self.rootdirs) 
+        self.remotecontrol = RemoteControl(self.config)
+        self.out = py.io.TerminalWriter()
+
+    def main(self, initialitems=None):
+        try:
+            self.loopstate = loopstate = LoopState(initialitems)
+            self.remotecontrol.setup()
+            while 1:
+                self.loop_once(loopstate)
+                if not loopstate.colitems and loopstate.wasfailing:
+                    continue # the last failures passed, let's rerun all
+                self.statrecorder.waitonchange(checkinterval=2.0) 
+        except KeyboardInterrupt:
+            print
+
+    def loop_once(self, loopstate):
+        colitems = loopstate.colitems
+        loopstate.wasfailing = colitems and len(colitems)
+        loopstate.colitems = self.remotecontrol.runsession(colitems or ())
+        self.remotecontrol.setup()
+
+class LoopState:
+    def __init__(self, colitems=None):
+        self.colitems = colitems
+
+class RemoteControl(object):
+    def __init__(self, config):
+        self.config = config
+
+    def trace(self, *args):
+        if self.config.option.debug:
+            msg = " ".join([str(x) for x in args])
+            py.builtin.print_("RemoteControl:", msg)
+
+    def initgateway(self):
+        return execnet.PopenGateway()
+
+    def setup(self, out=None):
+        if out is None:
+            out = py.io.TerminalWriter()
+        if hasattr(self, 'gateway'):
+            raise ValueError("already have gateway %r" % self.gateway)
+        self.trace("setting up slave session")
+        old = self.config.topdir.chdir()
+        try:
+            self.gateway = self.initgateway()
+        finally:
+            old.chdir()
+        channel = self.gateway.remote_exec(source="""
+            from py.impl.test.dist.mypickle import PickleChannel
+            from py.impl.test.looponfail.remote import slave_runsession
+            outchannel = channel.gateway.newchannel()
+            channel.send(outchannel)
+            channel = PickleChannel(channel)
+            config, fullwidth, hasmarkup = channel.receive()
+            import sys
+            sys.stdout = sys.stderr = outchannel.makefile('w')
+            slave_runsession(channel, config, fullwidth, hasmarkup) 
+        """)
+        remote_outchannel = channel.receive()
+        def write(s):
+            out._file.write(s)
+            out._file.flush()
+        remote_outchannel.setcallback(write)
+        channel = self.channel = PickleChannel(channel)
+        channel.send((self.config, out.fullwidth, out.hasmarkup))
+        self.trace("set up of slave session complete")
+
+    def ensure_teardown(self):
+        if hasattr(self, 'channel'):
+            if not self.channel.isclosed():
+                self.trace("closing", self.channel)
+                self.channel.close()
+            del self.channel
+        if hasattr(self, 'gateway'):
+            self.trace("exiting", self.gateway)
+            self.gateway.exit()
+            del self.gateway
+
+    def runsession(self, colitems=()):
+        try:
+            self.trace("sending", colitems)
+            trails = colitems
+            self.channel.send(trails)
+            try:
+                return self.channel.receive()
+            except self.channel.RemoteError:
+                e = sys.exc_info()[1]
+                self.trace("ERROR", e)
+                raise
+        finally:
+            self.ensure_teardown()
+
+def slave_runsession(channel, config, fullwidth, hasmarkup):
+    """ we run this on the other side. """
+    if config.option.debug:
+        def DEBUG(*args): 
+            print(" ".join(map(str, args)))
+    else:
+        def DEBUG(*args): pass
+
+    DEBUG("SLAVE: received configuration, using topdir:", config.topdir)
+    #config.option.session = None
+    config.option.looponfail = False 
+    config.option.usepdb = False 
+    trails = channel.receive()
+    config.pluginmanager.do_configure(config)
+    DEBUG("SLAVE: initsession()")
+    session = config.initsession()
+    # XXX configure the reporter object's terminal writer more directly
+    # XXX and write a test for this remote-terminal setting logic 
+    config.pytest_terminal_hasmarkup = hasmarkup
+    config.pytest_terminal_fullwidth = fullwidth
+    if trails:
+        colitems = []
+        for trail in trails:
+            try:
+                colitem = py.test.collect.Collector._fromtrail(trail, config)
+            except AssertionError:  
+                #XXX send info for "test disappeared" or so
+                continue 
+            colitems.append(colitem)
+    else:
+        colitems = None
+    session.shouldclose = channel.isclosed 
+   
+    class Failures(list):
+        def pytest_runtest_logreport(self, report):
+            if report.failed:
+                self.append(report)
+        pytest_collectreport = pytest_runtest_logreport
+        
+    failreports = Failures()
+    session.pluginmanager.register(failreports)
+
+    DEBUG("SLAVE: starting session.main()")
+    session.main(colitems)
+    session.config.hook.pytest_looponfailinfo(
+        failreports=list(failreports), 
+        rootdirs=[config.topdir])
+    channel.send([rep.getnode()._totrail() for rep in failreports])

Added: pypy/trunk/py/impl/test/looponfail/util.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/looponfail/util.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,53 @@
+import py
+
+class StatRecorder:
+    def __init__(self, rootdirlist):
+        self.rootdirlist = rootdirlist
+        self.statcache = {}
+        self.check() # snapshot state
+
+    def fil(self, p): 
+        return p.ext in ('.py', '.txt', '.c', '.h')
+    def rec(self, p):
+        return p.check(dotfile=0)
+
+    def waitonchange(self, checkinterval=1.0):
+        while 1:
+            changed = self.check()
+            if changed:
+                return
+            py.std.time.sleep(checkinterval)
+
+    def check(self, removepycfiles=True):
+        changed = False
+        statcache = self.statcache
+        newstat = {}
+        for rootdir in self.rootdirlist:
+            for path in rootdir.visit(self.fil, self.rec):
+                oldstat = statcache.get(path, None)
+                if oldstat is not None:
+                    del statcache[path]
+                try:
+                    newstat[path] = curstat = path.stat()
+                except py.error.ENOENT:
+                    if oldstat:
+                        del statcache[path]
+                        changed = True
+                else:
+                    if oldstat:
+                       if oldstat.mtime != curstat.mtime or \
+                          oldstat.size != curstat.size:
+                            changed = True
+                            py.builtin.print_("# MODIFIED", path)
+                            if removepycfiles and path.ext == ".py":
+                                pycfile = path + "c"
+                                if pycfile.check():
+                                    pycfile.remove()
+                                
+                    else:
+                        changed = True
+        if statcache:
+            changed = True
+        self.statcache = newstat
+        return changed
+

Added: pypy/trunk/py/impl/test/outcome.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/outcome.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,122 @@
+"""
+    Test OutcomeExceptions and helpers for creating them. 
+    py.test.skip|fail|raises helper implementations 
+
+"""
+
+import py
+import sys
+
+class OutcomeException(Exception): 
+    """ OutcomeException and its subclass instances indicate and 
+        contain info about test and collection outcomes. 
+    """ 
+    def __init__(self, msg=None, excinfo=None): 
+        self.msg = msg 
+        self.excinfo = excinfo
+
+    def __repr__(self):
+        if self.msg: 
+            return repr(self.msg) 
+        return "<%s instance>" %(self.__class__.__name__,)
+    __str__ = __repr__
+
+class Passed(OutcomeException): 
+    pass
+
+class Skipped(OutcomeException): 
+    # XXX slighly hackish: on 3k we fake to live in the builtins 
+    # in order to have Skipped exception printing shorter/nicer
+    __module__ = 'builtins'
+
+class Failed(OutcomeException): 
+    pass
+
+class ExceptionFailure(Failed): 
+    def __init__(self, expr, expected, msg=None, excinfo=None): 
+        Failed.__init__(self, msg=msg, excinfo=excinfo) 
+        self.expr = expr 
+        self.expected = expected
+
+class Exit(KeyboardInterrupt):
+    """ for immediate program exits without tracebacks and reporter/summary. """
+    def __init__(self, msg="unknown reason"):
+        self.msg = msg 
+        KeyboardInterrupt.__init__(self, msg)
+
+# exposed helper methods 
+
+def exit(msg): 
+    """ exit testing process immediately. """ 
+    __tracebackhide__ = True
+    raise Exit(msg)
+
+def skip(msg=""):
+    """ skip with the given message. """
+    __tracebackhide__ = True
+    raise Skipped(msg=msg) 
+
+def fail(msg="unknown failure"):
+    """ fail with the given Message. """
+    __tracebackhide__ = True
+    raise Failed(msg=msg) 
+
+def raises(ExpectedException, *args, **kwargs):
+    """ raise AssertionError, if target code does not raise the expected
+        exception.
+    """
+    __tracebackhide__ = True 
+    assert args
+    if isinstance(args[0], str):
+        code, = args
+        assert isinstance(code, str)
+        frame = sys._getframe(1)
+        loc = frame.f_locals.copy()
+        loc.update(kwargs)
+        #print "raises frame scope: %r" % frame.f_locals
+        try:
+            code = py.code.Source(code).compile()
+            py.builtin.exec_(code, frame.f_globals, loc)
+            # XXX didn'T mean f_globals == f_locals something special?
+            #     this is destroyed here ...
+        except ExpectedException:
+            return py.code.ExceptionInfo()
+    else:
+        func = args[0]
+        try:
+            func(*args[1:], **kwargs)
+        except ExpectedException:
+            return py.code.ExceptionInfo()
+        k = ", ".join(["%s=%r" % x for x in kwargs.items()])
+        if k:
+            k = ', ' + k
+        expr = '%s(%r%s)' %(func.__name__, args, k)
+    raise ExceptionFailure(msg="DID NOT RAISE", 
+                           expr=args, expected=ExpectedException) 
+
+def importorskip(modname, minversion=None):
+    """ return imported module or perform a dynamic skip() """
+    compile(modname, '', 'eval') # to catch syntaxerrors
+    try:
+        mod = __import__(modname, None, None, ['__doc__'])
+    except ImportError:
+        py.test.skip("could not import %r" %(modname,))
+    if minversion is None:
+        return mod
+    verattr = getattr(mod, '__version__', None)
+    if isinstance(minversion, str):
+        minver = minversion.split(".")
+    else:
+        minver = list(minversion)
+    if verattr is None or verattr.split(".") < minver:
+        py.test.skip("module %r has __version__ %r, required is: %r" %(
+                     modname, verattr, minversion))
+    return mod
+
+
+# exitcodes for the command line
+EXIT_OK = 0
+EXIT_TESTSFAILED = 1
+EXIT_INTERRUPTED = 2
+EXIT_INTERNALERROR = 3
+EXIT_NOHOSTS = 4

Added: pypy/trunk/py/impl/test/parseopt.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/parseopt.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,103 @@
+"""
+thin wrapper around Python's optparse.py  
+adding some extra checks and ways to systematically
+have Environment variables provide default values
+for options.  basic usage:
+
+   >>> parser = Parser()
+   >>> parser.addoption("--hello", action="store_true", dest="hello")
+   >>> option, args = parser.parse(['--hello'])
+   >>> option.hello 
+   True
+   >>> args
+   []
+    
+"""
+import py
+import optparse 
+
+class Parser:
+    """ Parser for command line arguments. """ 
+
+    def __init__(self, usage=None, processopt=None):
+        self._anonymous = OptionGroup("custom options", parser=self)
+        self._groups = []
+        self._processopt = processopt
+        self._usage = usage 
+        self.epilog = "" 
+
+    def processoption(self, option):
+        if self._processopt:
+            if option.dest:
+                self._processopt(option)
+
+    def addnote(self, note):
+        self._notes.append(note)
+
+    def getgroup(self, name, description="", after=None):
+        for group in self._groups:
+            if group.name == name:
+                return group
+        group = OptionGroup(name, description, parser=self)
+        i = 0
+        for i, grp in enumerate(self._groups):
+            if grp.name == after:
+                break
+        self._groups.insert(i+1, group)
+        return group 
+
+    addgroup = getgroup 
+    def addgroup(self, name, description=""):
+        py.log._apiwarn("1.1", "use getgroup() which gets-or-creates")
+        return self.getgroup(name, description)
+
+    def addoption(self, *opts, **attrs):
+        """ add an optparse-style option. """
+        self._anonymous.addoption(*opts, **attrs)
+
+    def parse(self, args):
+        optparser = optparse.OptionParser(usage=self._usage)
+        # make sure anaonymous group is at the end 
+        optparser.epilog = self.epilog
+        groups = self._groups + [self._anonymous]
+        for group in groups:
+            if group.options:
+                desc = group.description or group.name 
+                optgroup = optparse.OptionGroup(optparser, desc)
+                optgroup.add_options(group.options)
+                optparser.add_option_group(optgroup)
+        return optparser.parse_args([str(x) for x in args])
+
+    def parse_setoption(self, args, option):
+        parsedoption, args = self.parse(args)
+        for name, value in parsedoption.__dict__.items():
+            setattr(option, name, value)
+        return args
+
+
+class OptionGroup:
+    def __init__(self, name, description="", parser=None):
+        self.name = name
+        self.description = description
+        self.options = []
+        self.parser = parser 
+
+    def addoption(self, *optnames, **attrs):
+        """ add an option to this group. """
+        option = optparse.Option(*optnames, **attrs)
+        self._addoption_instance(option, shortupper=False)
+
+    def _addoption(self, *optnames, **attrs):
+        option = optparse.Option(*optnames, **attrs)
+        self._addoption_instance(option, shortupper=True)
+
+    def _addoption_instance(self, option, shortupper=False):
+        if not shortupper:
+            for opt in option._short_opts:
+                if opt[0] == '-' and opt[1].islower(): 
+                    raise ValueError("lowercase shortoptions reserved")
+        if self.parser:
+            self.parser.processoption(option)
+        self.options.append(option)
+
+        

Added: pypy/trunk/py/impl/test/pluginmanager.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/pluginmanager.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,300 @@
+"""
+managing loading and interacting with pytest plugins. 
+"""
+import py
+from py.plugin import hookspec
+from py.impl.test.outcome import Skipped
+
+def check_old_use(mod, modname):
+    clsname = modname[len('pytest_'):].capitalize() + "Plugin" 
+    assert not hasattr(mod, clsname), (mod, clsname)
+
+class PluginManager(object):
+    class Error(Exception):
+        """signals a plugin specific error."""
+    def __init__(self, comregistry=None):
+        if comregistry is None: 
+            comregistry = py._com.Registry()
+        self.comregistry = comregistry 
+        self._name2plugin = {}
+
+        self.hook = py._com.HookRelay(
+            hookspecs=hookspec, 
+            registry=self.comregistry) 
+
+    def _getpluginname(self, plugin, name):
+        if name is None:
+            if hasattr(plugin, '__name__'):
+                name = plugin.__name__.split(".")[-1]
+            else:
+                name = id(plugin) 
+        return name 
+
+    def register(self, plugin, name=None):
+        assert not self.isregistered(plugin)
+        name = self._getpluginname(plugin, name)
+        if name in self._name2plugin:
+            return False
+        self._name2plugin[name] = plugin
+        self.hook.pytest_plugin_registered(plugin=plugin)
+        self._checkplugin(plugin)
+        self.comregistry.register(plugin)
+        return True
+
+    def unregister(self, plugin):
+        self.hook.pytest_plugin_unregistered(plugin=plugin)
+        self.comregistry.unregister(plugin)
+        for name, value in list(self._name2plugin.items()):
+            if value == plugin:
+                del self._name2plugin[name]
+
+    def isregistered(self, plugin, name=None):
+        if self._getpluginname(plugin, name) in self._name2plugin:
+            return True
+        for val in self._name2plugin.values():
+            if plugin == val:
+                return True
+
+    def getplugins(self):
+        return list(self.comregistry)
+
+    def getplugin(self, name):
+        try:
+            return self._name2plugin[name]
+        except KeyError:
+            impname = canonical_importname(name)
+            return self._name2plugin[impname]
+
+    # API for bootstrapping 
+    #
+    def _envlist(self, varname):
+        val = py.std.os.environ.get(varname, None)
+        if val is not None:
+            return val.split(',')
+        return ()
+    
+    def consider_env(self):
+        for spec in self._envlist("PYTEST_PLUGINS"):
+            self.import_plugin(spec)
+
+    def consider_preparse(self, args):
+        for opt1,opt2 in zip(args, args[1:]):
+            if opt1 == "-p": 
+                self.import_plugin(opt2)
+
+    def consider_conftest(self, conftestmodule):
+        cls = getattr(conftestmodule, 'ConftestPlugin', None)
+        if cls is not None:
+            raise ValueError("%r: 'ConftestPlugins' only existed till 1.0.0b1, "
+                "were removed in 1.0.0b2" % (cls,))
+        if self.register(conftestmodule, name=conftestmodule.__file__):
+            self.consider_module(conftestmodule)
+
+    def consider_module(self, mod):
+        attr = getattr(mod, "pytest_plugins", ())
+        if attr:
+            if not isinstance(attr, (list, tuple)):
+                attr = (attr,)
+            for spec in attr:
+                self.import_plugin(spec) 
+
+    def import_plugin(self, spec):
+        assert isinstance(spec, str)
+        modname = canonical_importname(spec)
+        if modname in self._name2plugin:
+            return
+        try:
+            mod = importplugin(modname)
+        except KeyboardInterrupt:
+            raise
+        except Skipped:
+            e = py.std.sys.exc_info()[1]
+            self._warn("could not import plugin %r, reason: %r" %(
+                (modname, e.msg)))
+        else:
+            check_old_use(mod, modname) 
+            self.register(mod)
+            self.consider_module(mod)
+
+    def _warn(self, msg):
+        print ("===WARNING=== %s" % (msg,))
+
+    def _checkplugin(self, plugin):
+        # =====================================================
+        # check plugin hooks 
+        # =====================================================
+        methods = collectattr(plugin)
+        hooks = collectattr(hookspec)
+        stringio = py.io.TextIO()
+        def Print(*args):
+            if args:
+                stringio.write(" ".join(map(str, args)))
+            stringio.write("\n")
+
+        fail = False
+        while methods:
+            name, method = methods.popitem()
+            #print "checking", name
+            if isgenerichook(name):
+                continue
+            if name not in hooks: 
+                Print("found unknown hook:", name)
+                fail = True
+            else:
+                method_args = getargs(method)
+                if '__multicall__' in method_args:
+                    method_args.remove('__multicall__')
+                hook = hooks[name]
+                hookargs = getargs(hook)
+                for arg in method_args:
+                    if arg not in hookargs:
+                        Print("argument %r not available"  %(arg, ))
+                        Print("actual definition: %s" %(formatdef(method)))
+                        Print("available hook arguments: %s" % 
+                                ", ".join(hookargs))
+                        fail = True
+                        break 
+                #if not fail:
+                #    print "matching hook:", formatdef(method)
+            if fail:
+                name = getattr(plugin, '__name__', plugin)
+                raise self.Error("%s:\n%s" %(name, stringio.getvalue()))
+    # 
+    #
+    # API for interacting with registered and instantiated plugin objects 
+    #
+    # 
+    def listattr(self, attrname, plugins=None, extra=()):
+        return self.comregistry.listattr(attrname, plugins=plugins, extra=extra)
+
+    def notify_exception(self, excinfo=None):
+        if excinfo is None:
+            excinfo = py.code.ExceptionInfo()
+        excrepr = excinfo.getrepr(funcargs=True, showlocals=True)
+        return self.hook.pytest_internalerror(excrepr=excrepr)
+
+    def do_addoption(self, parser):
+        mname = "pytest_addoption"
+        methods = self.comregistry.listattr(mname, reverse=True)
+        mc = py._com.MultiCall(methods, {'parser': parser})
+        mc.execute()
+
+    def pytest_plugin_registered(self, plugin):
+        if hasattr(self, '_config'):
+            self.call_plugin(plugin, "pytest_addoption", 
+                {'parser': self._config._parser})
+            self.call_plugin(plugin, "pytest_configure", 
+                {'config': self._config})
+            #dic = self.call_plugin(plugin, "pytest_namespace")
+            #self._updateext(dic)
+
+    def call_plugin(self, plugin, methname, kwargs):
+        return py._com.MultiCall(
+                methods=self.listattr(methname, plugins=[plugin]), 
+                kwargs=kwargs, firstresult=True).execute()
+
+    def _updateext(self, dic):
+        if dic:
+            for name, value in dic.items():
+                setattr(py.test, name, value)
+
+    def do_configure(self, config):
+        assert not hasattr(self, '_config')
+        config.pluginmanager.register(self)
+        self._config = config
+        config.hook.pytest_configure(config=self._config)
+        for dic in config.hook.pytest_namespace() or []:
+            self._updateext(dic)
+
+    def do_unconfigure(self, config):
+        config = self._config 
+        del self._config 
+        config.hook.pytest_unconfigure(config=config)
+        config.pluginmanager.unregister(self)
+
+# 
+#  XXX old code to automatically load classes
+#
+def canonical_importname(name):
+    name = name.lower()
+    modprefix = "pytest_"
+    if not name.startswith(modprefix):
+        name = modprefix + name 
+    return name 
+
+def importplugin(importspec):
+    try:
+        return __import__(importspec) 
+    except ImportError:
+        e = py.std.sys.exc_info()[1]
+        if str(e).find(importspec) == -1:
+            raise
+        try:
+            return __import__("py.plugin.%s" %(importspec), 
+                None, None, '__doc__')
+        except ImportError:
+            e = py.std.sys.exc_info()[1]
+            if str(e).find(importspec) == -1:
+                raise
+            #print "syspath:", py.std.sys.path
+            #print "curdir:", py.std.os.getcwd()
+            return __import__(importspec)  # show the original exception
+
+
+
+def isgenerichook(name):
+    return name == "pytest_plugins" or \
+           name.startswith("pytest_funcarg__")
+
+def getargs(func):
+    args = py.std.inspect.getargs(py.code.getrawcode(func))[0]
+    startindex = py.std.inspect.ismethod(func) and 1 or 0
+    return args[startindex:]
+
+def collectattr(obj, prefixes=("pytest_",)):
+    methods = {}
+    for apiname in dir(obj):
+        for prefix in prefixes:
+            if apiname.startswith(prefix):
+                methods[apiname] = getattr(obj, apiname) 
+    return methods 
+
+def formatdef(func):
+    return "%s%s" %(
+        func.__name__, 
+        py.std.inspect.formatargspec(*py.std.inspect.getargspec(func))
+    )
+
+if __name__ == "__main__":
+    import py.plugin
+    basedir = py._dir.join('_plugin')
+    name2text = {}
+    for p in basedir.listdir("pytest_*"):
+        if p.ext == ".py" or (
+           p.check(dir=1) and p.join("__init__.py").check()):
+            impname = p.purebasename 
+            if impname.find("__") != -1:
+                continue
+            try:
+                plugin = importplugin(impname)
+            except (ImportError, py.impl.test.outcome.Skipped):
+                name2text[impname] = "IMPORT ERROR"
+            else:
+                doc = plugin.__doc__ or ""
+                doc = doc.strip()
+                name2text[impname] = doc
+           
+    for name in sorted(name2text.keys()):
+        text = name2text[name]
+        if name[0] == "_":
+            continue
+        print ("%-20s %s" % (name, text.split("\n")[0]))
+
+        #text = py.std.textwrap.wrap(name2text[name], 
+        #    width = 80,
+        #    initial_indent="%s: " % name, 
+        #    replace_whitespace = False)
+        #for line in text:
+        #    print line
+     
+

Added: pypy/trunk/py/impl/test/pycollect.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/pycollect.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,371 @@
+"""
+Python related collection nodes.  Here is an example of 
+a tree of collectors and test items that this modules provides:: 
+
+        Module                  # File
+            Class 
+                Instance   
+                    Function  
+                    Generator 
+                        ... 
+            Function 
+            Generator 
+                Function 
+
+        DoctestFile              # File
+            DoctestFileContent   # acts as Item 
+
+""" 
+import py
+import inspect
+from py.impl.test.collect import configproperty, warnoldcollect
+from py.impl.test import funcargs
+
+class PyobjMixin(object):
+    def obj(): 
+        def fget(self):
+            try: 
+                return self._obj   
+            except AttributeError: 
+                self._obj = obj = self._getobj() 
+                return obj 
+        def fset(self, value): 
+            self._obj = value 
+        return property(fget, fset, None, "underlying python object") 
+    obj = obj()
+
+    def _getobj(self):
+        return getattr(self.parent.obj, self.name)
+
+    def getmodpath(self, stopatmodule=True, includemodule=False):
+        """ return python path relative to the containing module. """
+        chain = self.listchain()
+        chain.reverse()
+        parts = []
+        for node in chain:
+            if isinstance(node, Instance):
+                continue
+            name = node.name 
+            if isinstance(node, Module):
+                assert name.endswith(".py")
+                name = name[:-3]
+                if stopatmodule:
+                    if includemodule:
+                        parts.append(name)
+                    break
+            parts.append(name)
+        parts.reverse()
+        s = ".".join(parts)
+        return s.replace(".[", "[")
+
+    def _getfslineno(self):
+        try:
+            return self._fslineno
+        except AttributeError:
+            pass
+        obj = self.obj
+        # xxx let decorators etc specify a sane ordering
+        if hasattr(obj, 'place_as'):
+            obj = obj.place_as
+
+        self._fslineno = py.code.getfslineno(obj)
+        return self._fslineno
+
+    def reportinfo(self):
+        fspath, lineno = self._getfslineno()
+        modpath = self.getmodpath()
+        return fspath, lineno, modpath 
+
+class PyCollectorMixin(PyobjMixin, py.test.collect.Collector): 
+    Class = configproperty('Class')
+    Instance = configproperty('Instance')
+    Function = configproperty('Function')
+    Generator = configproperty('Generator')
+    
+    def funcnamefilter(self, name): 
+        return name.startswith('test') 
+    def classnamefilter(self, name): 
+        return name.startswith('Test')
+
+    def collect(self):
+        l = self._deprecated_collect()
+        if l is not None:
+            return l
+        name2items = self._buildname2items()
+        colitems = list(name2items.values())
+        colitems.sort(key=lambda item: item.reportinfo()[:2])
+        return colitems
+
+    def _buildname2items(self): 
+        # NB. we avoid random getattrs and peek in the __dict__ instead
+        d = {}
+        dicts = [getattr(self.obj, '__dict__', {})]
+        for basecls in inspect.getmro(self.obj.__class__):
+            dicts.append(basecls.__dict__)
+        seen = {}
+        for dic in dicts:
+            for name, obj in dic.items():
+                if name in seen:
+                    continue
+                seen[name] = True
+                if name[0] != "_":
+                    res = self.makeitem(name, obj)
+                    if res is not None:
+                        d[name] = res 
+        return d
+
+    def _deprecated_join(self, name):
+        if self.__class__.join != py.test.collect.Collector.join:
+            warnoldcollect()
+            return self.join(name)
+
+    def makeitem(self, name, obj):
+        return self.config.hook.pytest_pycollect_makeitem(
+            collector=self, name=name, obj=obj)
+
+    def _istestclasscandidate(self, name, obj):
+        if self.classnamefilter(name) and \
+           inspect.isclass(obj):
+            if hasinit(obj):
+                # XXX WARN 
+                return False
+            return True
+
+    def _genfunctions(self, name, funcobj):
+        module = self.getparent(Module).obj
+        clscol = self.getparent(Class)
+        cls = clscol and clscol.obj or None
+        metafunc = funcargs.Metafunc(funcobj, config=self.config, 
+            cls=cls, module=module)
+        gentesthook = self.config.hook._makecall(
+            "pytest_generate_tests", extralookup=module)
+        gentesthook(metafunc=metafunc)
+        if not metafunc._calls:
+            return self.Function(name, parent=self)
+        return funcargs.FunctionCollector(name=name, 
+            parent=self, calls=metafunc._calls)
+
+        
+class Module(py.test.collect.File, PyCollectorMixin):
+    def _getobj(self):
+        return self._memoizedcall('_obj', self._importtestmodule)
+
+    def _importtestmodule(self):
+        # we assume we are only called once per module 
+        mod = self.fspath.pyimport()
+        #print "imported test module", mod
+        self.config.pluginmanager.consider_module(mod)
+        return mod
+
+    def setup(self): 
+        if getattr(self.obj, 'disabled', 0):
+            py.test.skip("%r is disabled" %(self.obj,))
+        if hasattr(self.obj, 'setup_module'): 
+            #XXX: nose compat hack, move to nose plugin
+            # if it takes a positional arg, its probably a py.test style one
+            # so we pass the current module object
+            if inspect.getargspec(self.obj.setup_module)[0]:
+                self.obj.setup_module(self.obj)
+            else:
+                self.obj.setup_module()
+
+    def teardown(self): 
+        if hasattr(self.obj, 'teardown_module'): 
+            #XXX: nose compat hack, move to nose plugin
+            # if it takes a positional arg, its probably a py.test style one
+            # so we pass the current module object
+            if inspect.getargspec(self.obj.teardown_module)[0]:
+                self.obj.teardown_module(self.obj)
+            else:
+                self.obj.teardown_module()
+
+class Class(PyCollectorMixin, py.test.collect.Collector): 
+
+    def collect(self):
+        l = self._deprecated_collect()
+        if l is not None:
+            return l
+        return [self.Instance(name="()", parent=self)]
+
+    def setup(self): 
+        if getattr(self.obj, 'disabled', 0):
+            py.test.skip("%r is disabled" %(self.obj,))
+        setup_class = getattr(self.obj, 'setup_class', None)
+        if setup_class is not None: 
+            setup_class = getattr(setup_class, 'im_func', setup_class) 
+            setup_class(self.obj) 
+
+    def teardown(self): 
+        teardown_class = getattr(self.obj, 'teardown_class', None) 
+        if teardown_class is not None: 
+            teardown_class = getattr(teardown_class, 'im_func', teardown_class) 
+            teardown_class(self.obj) 
+
+class Instance(PyCollectorMixin, py.test.collect.Collector): 
+    def _getobj(self): 
+        return self.parent.obj()  
+    def Function(self): 
+        return getattr(self.obj, 'Function', 
+                       PyCollectorMixin.Function.__get__(self)) # XXX for python 2.2
+    def _keywords(self):
+        return []
+    Function = property(Function)
+
+    #def __repr__(self):
+    #    return "<%s of '%s'>" %(self.__class__.__name__, 
+    #                         self.parent.obj.__name__)
+
+    def newinstance(self):  
+        self.obj = self._getobj()
+        return self.obj
+
+class FunctionMixin(PyobjMixin):
+    """ mixin for the code common to Function and Generator.
+    """
+
+    def setup(self): 
+        """ perform setup for this test function. """
+        if inspect.ismethod(self.obj):
+            name = 'setup_method' 
+        else: 
+            name = 'setup_function' 
+        if isinstance(self.parent, Instance):
+            obj = self.parent.newinstance()
+            self.obj = self._getobj()
+        else:
+            obj = self.parent.obj 
+        setup_func_or_method = getattr(obj, name, None)
+        if setup_func_or_method is not None: 
+            setup_func_or_method(self.obj) 
+
+    def teardown(self): 
+        """ perform teardown for this test function. """
+        if inspect.ismethod(self.obj):
+            name = 'teardown_method' 
+        else: 
+            name = 'teardown_function' 
+        obj = self.parent.obj 
+        teardown_func_or_meth = getattr(obj, name, None)
+        if teardown_func_or_meth is not None: 
+            teardown_func_or_meth(self.obj) 
+
+    def _prunetraceback(self, traceback):
+        if hasattr(self, '_obj') and not self.config.option.fulltrace: 
+            code = py.code.Code(self.obj) 
+            path, firstlineno = code.path, code.firstlineno 
+            ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
+            if ntraceback == traceback:
+                ntraceback = ntraceback.cut(path=path)
+                if ntraceback == traceback:
+                    ntraceback = ntraceback.cut(excludepath=py._dir)
+            traceback = ntraceback.filter()
+        return traceback 
+
+    def repr_failure(self, excinfo, outerr=None):
+        assert outerr is None, "XXX outerr usage is deprecated"
+        return self._repr_failure_py(excinfo)
+
+    shortfailurerepr = "F"
+
+class Generator(FunctionMixin, PyCollectorMixin, py.test.collect.Collector): 
+    def collect(self):
+        # test generators are seen as collectors but they also 
+        # invoke setup/teardown on popular request 
+        # (induced by the common "test_*" naming shared with normal tests)
+        self.config._setupstate.prepare(self) 
+        l = []
+        seen = {}
+        for i, x in enumerate(self.obj()): 
+            name, call, args = self.getcallargs(x)
+            if not py.builtin.callable(call): 
+                raise TypeError("%r yielded non callable test %r" %(self.obj, call,))
+            if name is None:
+                name = "[%d]" % i
+            else:
+                name = "['%s']" % name
+            if name in seen:
+                raise ValueError("%r generated tests with non-unique name %r" %(self, name))
+            seen[name] = True
+            l.append(self.Function(name, self, args=args, callobj=call))
+        return l
+        
+    def getcallargs(self, obj):
+        if not isinstance(obj, (tuple, list)):
+            obj = (obj,)
+        # explict naming
+        if isinstance(obj[0], py.builtin._basestring):
+            name = obj[0]
+            obj = obj[1:]
+        else:
+            name = None
+        call, args = obj[0], obj[1:]
+        return name, call, args 
+    
+
+#
+#  Test Items 
+#
+_dummy = object()
+class Function(FunctionMixin, py.test.collect.Item): 
+    """ a Function Item is responsible for setting up  
+        and executing a Python callable test object.
+    """
+    _genid = None
+    def __init__(self, name, parent=None, args=None, 
+                 callspec=None, callobj=_dummy):
+        super(Function, self).__init__(name, parent)
+        self._args = args 
+        if self._isyieldedfunction():
+            assert not callspec, "yielded functions (deprecated) cannot have funcargs" 
+        else:
+            if callspec is not None:
+                self.funcargs = callspec.funcargs or {}
+                self._genid = callspec.id 
+                if hasattr(callspec, "param"):
+                    self._requestparam = callspec.param
+            else:
+                self.funcargs = {}
+        if callobj is not _dummy: 
+            self._obj = callobj 
+
+    def _isyieldedfunction(self):
+        return self._args is not None
+
+    def readkeywords(self):
+        d = super(Function, self).readkeywords()
+        d.update(py.builtin._getfuncdict(self.obj))
+        return d
+
+    def runtest(self):
+        """ execute the underlying test function. """
+        self.config.hook.pytest_pyfunc_call(pyfuncitem=self)
+
+    def setup(self):
+        super(Function, self).setup()
+        if hasattr(self, 'funcargs'): 
+            funcargs.fillfuncargs(self)
+
+    def __eq__(self, other):
+        try:
+            return (self.name == other.name and 
+                    self._args == other._args and
+                    self.parent == other.parent and
+                    self.obj == other.obj and 
+                    getattr(self, '_genid', None) == 
+                    getattr(other, '_genid', None) 
+            )
+        except AttributeError:
+            pass
+        return False
+
+    def __ne__(self, other):
+        return not self == other
+    
+    def __hash__(self):
+        return hash((self.parent, self.name))
+
+def hasinit(obj):
+    init = getattr(obj, '__init__', None)
+    if init:
+        if not isinstance(init, type(object.__init__)):
+            return True

Added: pypy/trunk/py/impl/test/session.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/test/session.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,126 @@
+""" basic test session implementation. 
+
+* drives collection of tests 
+* triggers executions of tests   
+* produces events used by reporting 
+"""
+
+import py
+from py.impl.test import outcome
+
+# imports used for genitems()
+Item = py.test.collect.Item
+Collector = py.test.collect.Collector
+
+class Session(object): 
+    """ 
+        Session drives the collection and running of tests
+        and generates test events for reporters. 
+    """ 
+    def __init__(self, config):
+        self.config = config
+        self.pluginmanager = config.pluginmanager # shortcut 
+        self.pluginmanager.register(self)
+        self._testsfailed = False
+        self._nomatch = False
+        self.shouldstop = False
+
+    def genitems(self, colitems, keywordexpr=None):
+        """ yield Items from iterating over the given colitems. """
+        while colitems: 
+            next = colitems.pop(0)
+            if isinstance(next, (tuple, list)):
+                colitems[:] = list(next) + colitems 
+                continue
+            assert self.pluginmanager is next.config.pluginmanager
+            if isinstance(next, Item):
+                remaining = self.filteritems([next])
+                if remaining:
+                    self.config.hook.pytest_itemstart(item=next)
+                    yield next 
+            else:
+                assert isinstance(next, Collector)
+                self.config.hook.pytest_collectstart(collector=next)
+                rep = self.config.hook.pytest_make_collect_report(collector=next)
+                if rep.passed:
+                    for x in self.genitems(rep.result, keywordexpr):
+                        yield x 
+                self.config.hook.pytest_collectreport(report=rep)
+            if self.shouldstop:
+                break
+
+    def filteritems(self, colitems):
+        """ return items to process (some may be deselected)"""
+        keywordexpr = self.config.option.keyword 
+        if not keywordexpr or self._nomatch:
+            return colitems
+        if keywordexpr[-1] == ":": 
+            keywordexpr = keywordexpr[:-1]
+        remaining = []
+        deselected = []
+        for colitem in colitems:
+            if isinstance(colitem, Item):
+                if colitem._skipbykeyword(keywordexpr):
+                    deselected.append(colitem)
+                    continue
+            remaining.append(colitem)
+        if deselected: 
+            self.config.hook.pytest_deselected(items=deselected)
+            if self.config.option.keyword.endswith(":"):
+                self._nomatch = True
+        return remaining 
+
+    def collect(self, colitems): 
+        keyword = self.config.option.keyword
+        for x in self.genitems(colitems, keyword):
+            yield x
+
+    def sessionstarts(self):
+        """ setup any neccessary resources ahead of the test run. """
+        self.config.hook.pytest_sessionstart(session=self)
+        
+    def pytest_runtest_logreport(self, report):
+        if report.failed:
+            self._testsfailed = True
+            if self.config.option.exitfirst:
+                self.shouldstop = True
+    pytest_collectreport = pytest_runtest_logreport
+
+    def sessionfinishes(self, exitstatus):
+        """ teardown any resources after a test run. """ 
+        self.config.hook.pytest_sessionfinish(
+            session=self, 
+            exitstatus=exitstatus, 
+        )
+
+    def getinitialitems(self, colitems):
+        if colitems is None:
+            colitems = [self.config.getfsnode(arg) 
+                            for arg in self.config.args]
+        return colitems
+
+    def main(self, colitems=None):
+        """ main loop for running tests. """
+        colitems = self.getinitialitems(colitems)
+        self.shouldstop = False 
+        self.sessionstarts()
+        exitstatus = outcome.EXIT_OK
+        captured_excinfo = None
+        try:
+            for item in self.collect(colitems): 
+                if self.shouldstop: 
+                    break 
+                if not self.config.option.collectonly: 
+                    item.config.hook.pytest_runtest_protocol(item=item)
+        except KeyboardInterrupt:
+            excinfo = py.code.ExceptionInfo()
+            self.config.hook.pytest_keyboard_interrupt(excinfo=excinfo)
+            exitstatus = outcome.EXIT_INTERRUPTED
+        except:
+            excinfo = py.code.ExceptionInfo()
+            self.config.pluginmanager.notify_exception(captured_excinfo)
+            exitstatus = outcome.EXIT_INTERNALERROR
+        if exitstatus == 0 and self._testsfailed:
+            exitstatus = outcome.EXIT_TESTSFAILED
+        self.sessionfinishes(exitstatus=exitstatus)
+        return exitstatus

Added: pypy/trunk/py/impl/xmlgen.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/impl/xmlgen.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,243 @@
+"""
+module for generating and serializing xml and html structures
+by using simple python objects. 
+
+(c) holger krekel, holger at merlinux eu. 2009
+""" 
+import py
+import sys, re
+
+if sys.version_info >= (3,0):
+    def u(s): 
+        return s
+    def unicode(x):
+        if hasattr(x, '__unicode__'):
+            return x.__unicode__()
+        return str(x)
+else:
+    def u(s):
+        return unicode(s)
+    unicode = unicode 
+    
+
+class NamespaceMetaclass(type): 
+    def __getattr__(self, name): 
+        if name[:1] == '_': 
+            raise AttributeError(name) 
+        if self == Namespace: 
+            raise ValueError("Namespace class is abstract") 
+        tagspec = self.__tagspec__
+        if tagspec is not None and name not in tagspec: 
+            raise AttributeError(name) 
+        classattr = {}
+        if self.__stickyname__: 
+            classattr['xmlname'] = name 
+        cls = type(name, (self.__tagclass__,), classattr) 
+        setattr(self, name, cls) 
+        return cls 
+
+class Tag(list):
+    class Attr(object): 
+        def __init__(self, **kwargs): 
+            self.__dict__.update(kwargs) 
+
+    def __init__(self, *args, **kwargs):
+        super(Tag, self).__init__(args)
+        self.attr = self.Attr(**kwargs) 
+
+    def __unicode__(self):
+        return self.unicode(indent=0) 
+    __str__ = __unicode__
+
+    def unicode(self, indent=2):
+        l = []
+        SimpleUnicodeVisitor(l.append, indent).visit(self) 
+        return "".join(l) 
+
+    def __repr__(self):
+        name = self.__class__.__name__ 
+        return "<%r tag object %d>" % (name, id(self))
+    
+Namespace = NamespaceMetaclass('Namespace', (object, ), {
+    '__tagspec__': None, 
+    '__tagclass__': Tag, 
+    '__stickyname__': False, 
+})
+
+class HtmlTag(Tag): 
+    def unicode(self, indent=2):
+        l = []
+        HtmlVisitor(l.append, indent, shortempty=False).visit(self) 
+        return u("").join(l) 
+
+# exported plain html namespace 
+class html(Namespace):
+    __tagclass__ = HtmlTag
+    __stickyname__ = True 
+    __tagspec__ = dict([(x,1) for x in ( 
+        'a,abbr,acronym,address,applet,area,b,bdo,big,blink,'
+        'blockquote,body,br,button,caption,center,cite,code,col,'
+        'colgroup,comment,dd,del,dfn,dir,div,dl,dt,em,embed,'
+        'fieldset,font,form,frameset,h1,h2,h3,h4,h5,h6,head,html,'
+        'i,iframe,img,input,ins,kbd,label,legend,li,link,listing,'
+        'map,marquee,menu,meta,multicol,nobr,noembed,noframes,'
+        'noscript,object,ol,optgroup,option,p,pre,q,s,script,'
+        'select,small,span,strike,strong,style,sub,sup,table,'
+        'tbody,td,textarea,tfoot,th,thead,title,tr,tt,u,ul,xmp,'
+        'base,basefont,frame,hr,isindex,param,samp,var'
+    ).split(',') if x])
+
+    class Style(object): 
+        def __init__(self, **kw): 
+            for x, y in kw.items():
+                x = x.replace('_', '-')
+                setattr(self, x, y) 
+
+
+class raw(object):
+    """just a box that can contain a unicode string that will be
+    included directly in the output"""
+    def __init__(self, uniobj):
+        self.uniobj = uniobj
+
+class SimpleUnicodeVisitor(object):
+    """ recursive visitor to write unicode. """
+    def __init__(self, write, indent=0, curindent=0, shortempty=True): 
+        self.write = write
+        self.cache = {}
+        self.visited = {} # for detection of recursion
+        self.indent = indent 
+        self.curindent = curindent
+        self.parents = []
+        self.shortempty = shortempty  # short empty tags or not 
+
+    def visit(self, node): 
+        """ dispatcher on node's class/bases name. """
+        cls = node.__class__
+        try:
+            visitmethod = self.cache[cls]   
+        except KeyError:
+            for subclass in cls.__mro__: 
+                visitmethod = getattr(self, subclass.__name__, None)
+                if visitmethod is not None:
+                    break
+            else:
+                visitmethod = self.object 
+            self.cache[cls] = visitmethod
+        visitmethod(node) 
+
+    def object(self, obj):
+        #self.write(obj) 
+        self.write(escape(unicode(obj)))
+
+    def raw(self, obj):
+        self.write(obj.uniobj) 
+
+    def list(self, obj):  
+        assert id(obj) not in self.visited
+        self.visited[id(obj)] = 1
+        map(self.visit, obj) 
+
+    def Tag(self, tag):
+        assert id(tag) not in self.visited
+        try: 
+            tag.parent = self.parents[-1]
+        except IndexError: 
+            tag.parent = None 
+        self.visited[id(tag)] = 1
+        tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
+        if self.curindent and not self._isinline(tagname):
+            self.write("\n" + u(' ') * self.curindent) 
+        if tag:
+            self.curindent += self.indent 
+            self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
+            self.parents.append(tag) 
+            for x in tag:
+                self.visit(x)
+            self.parents.pop() 
+            self.write(u('</%s>') % tagname) 
+            self.curindent -= self.indent 
+        else:
+            nameattr = tagname+self.attributes(tag) 
+            if self._issingleton(tagname): 
+                self.write(u('<%s/>') % (nameattr,))
+            else: 
+                self.write(u('<%s></%s>') % (nameattr, tagname))
+
+    def attributes(self, tag):
+        # serialize attributes
+        attrlist = dir(tag.attr) 
+        attrlist.sort() 
+        l = []
+        for name in attrlist: 
+            res = self.repr_attribute(tag.attr, name)
+            if res is not None: 
+                l.append(res) 
+        l.extend(self.getstyle(tag))
+        return u("").join(l)
+
+    def repr_attribute(self, attrs, name): 
+        if name[:2] != '__': 
+            value = getattr(attrs, name) 
+            if name.endswith('_'): 
+                name = name[:-1]
+            return ' %s="%s"' % (name, escape(unicode(value)))
+
+    def getstyle(self, tag): 
+        """ return attribute list suitable for styling. """ 
+        try: 
+            styledict = tag.style.__dict__
+        except AttributeError: 
+            return [] 
+        else: 
+            stylelist = [x+': ' + y for x,y in styledict.items()]
+            return [u(' style="%s"') % u('; ').join(stylelist)]
+
+    def _issingleton(self, tagname):
+        """can (and will) be overridden in subclasses"""
+        return self.shortempty
+
+    def _isinline(self, tagname):
+        """can (and will) be overridden in subclasses"""
+        return False
+
+class HtmlVisitor(SimpleUnicodeVisitor): 
+    
+    single = dict([(x, 1) for x in 
+                ('br,img,area,param,col,hr,meta,link,base,'
+                    'input,frame').split(',')])
+    inline = dict([(x, 1) for x in
+                ('a abbr acronym b basefont bdo big br cite code dfn em font '
+                 'i img input kbd label q s samp select small span strike '
+                 'strong sub sup textarea tt u var'.split(' '))])
+
+    def repr_attribute(self, attrs, name): 
+        if name == 'class_':
+            value = getattr(attrs, name) 
+            if value is None: 
+                return
+        return super(HtmlVisitor, self).repr_attribute(attrs, name) 
+
+    def _issingleton(self, tagname):
+        return tagname in self.single
+
+    def _isinline(self, tagname):
+        return tagname in self.inline
+
+       
+class _escape:
+    def __init__(self):
+        self.escape = {
+            u('"') : u('&quot;'), u('<') : u('&lt;'), u('>') : u('&gt;'), 
+            u('&') : u('&amp;'), u("'") : u('&apos;'),
+            }
+        self.charef_rex = re.compile(u("|").join(self.escape.keys()))
+
+    def _replacer(self, match):
+        return self.escape[match.group(0)]
+
+    def __call__(self, ustring):
+        """ xml-escape the given unicode string. """
+        return self.charef_rex.sub(self._replacer, ustring)
+
+escape = _escape()

Added: pypy/trunk/py/plugin/__init__.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/__init__.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1 @@
+#

Added: pypy/trunk/py/plugin/hookspec.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/hookspec.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,175 @@
+"""
+hook specifications for py.test plugins 
+"""
+
+# -------------------------------------------------------------------------
+# Command line and configuration 
+# -------------------------------------------------------------------------
+
+def pytest_addoption(parser):
+    """ called before commandline parsing.  """
+
+def pytest_namespace():
+    """ return dict of name->object which will get stored at py.test. namespace"""
+
+def pytest_configure(config):
+    """ called after command line options have been parsed. 
+        and all plugins and initial conftest files been loaded. 
+    """
+
+def pytest_unconfigure(config):
+    """ called before test process is exited.  """
+
+# -------------------------------------------------------------------------
+# collection hooks
+# -------------------------------------------------------------------------
+
+def pytest_collect_directory(path, parent):
+    """ return Collection node or None for the given path. """
+
+def pytest_collect_file(path, parent):
+    """ return Collection node or None for the given path. """
+
+def pytest_collectstart(collector):
+    """ collector starts collecting. """
+
+def pytest_collectreport(report):
+    """ collector finished collecting. """
+
+def pytest_deselected(items):
+    """ called for test items deselected by keyword. """
+
+def pytest_make_collect_report(collector):
+    """ perform a collection and return a collection. """ 
+pytest_make_collect_report.firstresult = True
+
+# XXX rename to item_collected()?  meaning in distribution context? 
+def pytest_itemstart(item, node=None):
+    """ test item gets collected. """
+
+# -------------------------------------------------------------------------
+# Python test function related hooks
+# -------------------------------------------------------------------------
+
+def pytest_pycollect_makeitem(collector, name, obj):
+    """ return custom item/collector for a python object in a module, or None.  """
+pytest_pycollect_makeitem.firstresult = True
+
+def pytest_pyfunc_call(pyfuncitem):
+    """ perform function call to the with the given function arguments. """ 
+pytest_pyfunc_call.firstresult = True
+
+def pytest_generate_tests(metafunc):
+    """ generate (multiple) parametrized calls to a test function."""
+
+# -------------------------------------------------------------------------
+# generic runtest related hooks 
+# -------------------------------------------------------------------------
+
+def pytest_runtest_protocol(item):
+    """ implement fixture, run and report protocol. """
+pytest_runtest_protocol.firstresult = True
+
+def pytest_runtest_setup(item):
+    """ called before pytest_runtest_call(). """ 
+
+def pytest_runtest_call(item):
+    """ execute test item. """ 
+
+def pytest_runtest_teardown(item):
+    """ called after pytest_runtest_call(). """ 
+
+def pytest_runtest_makereport(item, call):
+    """ make ItemTestReport for the given item and call outcome. """ 
+pytest_runtest_makereport.firstresult = True
+
+def pytest_runtest_logreport(report):
+    """ process item test report. """ 
+
+# special handling for final teardown - somewhat internal for now
+def pytest__teardown_final(session):
+    """ called before test session finishes. """
+pytest__teardown_final.firstresult = True
+
+def pytest__teardown_final_logerror(report):
+    """ called if runtest_teardown_final failed. """ 
+
+# -------------------------------------------------------------------------
+# test session related hooks 
+# -------------------------------------------------------------------------
+
+def pytest_sessionstart(session):
+    """ before session.main() is called. """
+
+def pytest_sessionfinish(session, exitstatus):
+    """ whole test run finishes. """
+
+# -------------------------------------------------------------------------
+# hooks for influencing reporting (invoked from pytest_terminal)
+# -------------------------------------------------------------------------
+
+def pytest_report_teststatus(report):
+    """ return result-category, shortletter and verbose word for reporting."""
+pytest_report_teststatus.firstresult = True
+
+def pytest_terminal_summary(terminalreporter):
+    """ add additional section in terminal summary reporting. """
+
+def pytest_report_iteminfo(item):
+    """ return (fspath, lineno, name) for the item.
+        the information is used for result display and to sort tests
+    """
+pytest_report_iteminfo.firstresult = True
+
+# -------------------------------------------------------------------------
+# doctest hooks 
+# -------------------------------------------------------------------------
+
+def pytest_doctest_prepare_content(content):
+    """ return processed content for a given doctest"""
+pytest_doctest_prepare_content.firstresult = True
+
+# -------------------------------------------------------------------------
+# distributed testing 
+# -------------------------------------------------------------------------
+
+def pytest_gwmanage_newgateway(gateway, platinfo):
+    """ called on new raw gateway creation. """ 
+
+def pytest_gwmanage_rsyncstart(source, gateways):
+    """ called before rsyncing a directory to remote gateways takes place. """
+
+def pytest_gwmanage_rsyncfinish(source, gateways):
+    """ called after rsyncing a directory to remote gateways takes place. """
+
+def pytest_testnodeready(node):
+    """ Test Node is ready to operate. """
+
+def pytest_testnodedown(node, error):
+    """ Test Node is down. """
+
+def pytest_rescheduleitems(items):
+    """ reschedule Items from a node that went down. """
+
+def pytest_looponfailinfo(failreports, rootdirs):
+    """ info for repeating failing tests. """
+
+
+# -------------------------------------------------------------------------
+# error handling and internal debugging hooks 
+# -------------------------------------------------------------------------
+
+def pytest_plugin_registered(plugin):
+    """ a new py lib plugin got registered. """
+
+def pytest_plugin_unregistered(plugin):
+    """ a py lib plugin got unregistered. """
+
+def pytest_internalerror(excrepr):
+    """ called for internal errors. """
+
+def pytest_keyboard_interrupt(excinfo):
+    """ called for keyboard interrupt. """
+
+def pytest_trace(category, msg):
+    """ called for debug info. """ 

Added: pypy/trunk/py/plugin/pytest__pytest.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest__pytest.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,101 @@
+import py
+
+def pytest_funcarg___pytest(request):
+    return PytestArg(request)
+
+class PytestArg:
+    def __init__(self, request):
+        self.request = request 
+        self.monkeypatch = self.request.getfuncargvalue("monkeypatch")
+        self.comregistry = py._com.Registry()
+        self.monkeypatch.setattr(py._com, 'comregistry', self.comregistry)
+
+    def gethookrecorder(self, hookspecs, registry=None):
+        if registry is not None:
+            self.monkeypatch.setattr(py._com, 'comregistry', registry) 
+            self.comregistry = registry 
+        hookrecorder = HookRecorder(self.comregistry) 
+        hookrecorder.start_recording(hookspecs)
+        self.request.addfinalizer(hookrecorder.finish_recording)
+        return hookrecorder 
+
+class ParsedCall:
+    def __init__(self, name, locals):
+        assert '_name' not in locals 
+        self.__dict__.update(locals)
+        self.__dict__.pop('self')
+        self._name = name 
+
+    def __repr__(self):
+        d = self.__dict__.copy()
+        del d['_name']
+        return "<ParsedCall %r(**%r)>" %(self._name, d)
+
+class HookRecorder:
+    def __init__(self, comregistry):
+        self._comregistry = comregistry
+        self.calls = []
+        self._recorders = {}
+        
+    def start_recording(self, hookspecs):
+        assert hookspecs not in self._recorders 
+        class RecordCalls: 
+            _recorder = self 
+        for name, method in vars(hookspecs).items():
+            if name[0] != "_":
+                setattr(RecordCalls, name, self._makecallparser(method))
+        recorder = RecordCalls()
+        self._recorders[hookspecs] = recorder
+        self._comregistry.register(recorder)
+        self.hook = py._com.HookRelay(hookspecs, registry=self._comregistry)
+
+    def finish_recording(self):
+        for recorder in self._recorders.values():
+            self._comregistry.unregister(recorder)
+        self._recorders.clear()
+
+    def _makecallparser(self, method):
+        name = method.__name__
+        args, varargs, varkw, default = py.std.inspect.getargspec(method)
+        if not args or args[0] != "self":
+            args.insert(0, 'self') 
+        fspec = py.std.inspect.formatargspec(args, varargs, varkw, default)
+        # we use exec because we want to have early type
+        # errors on wrong input arguments, using
+        # *args/**kwargs delays this and gives errors
+        # elsewhere
+        exec (py.code.compile("""
+            def %(name)s%(fspec)s: 
+                        self._recorder.calls.append(
+                            ParsedCall(%(name)r, locals()))
+        """ % locals()))
+        return locals()[name]
+
+    def getcalls(self, names):
+        if isinstance(names, str):
+            names = names.split()
+        for name in names:
+            for cls in self._recorders:
+                if name in vars(cls):
+                    break
+            else:
+                raise ValueError("callname %r not found in %r" %(
+                name, self._recorders.keys()))
+        l = []
+        for call in self.calls:
+            if call._name in names:
+                l.append(call)
+        return l
+
+    def popcall(self, name):
+        for i, call in enumerate(self.calls):
+            if call._name == name:
+                del self.calls[i]
+                return call 
+        raise ValueError("could not find call %r" %(name, ))
+
+    def getcall(self, name):
+        l = self.getcalls(name)
+        assert len(l) == 1, (name, l)
+        return l[0]
+

Added: pypy/trunk/py/plugin/pytest_assertion.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_assertion.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,31 @@
+import py
+import sys
+
+def pytest_addoption(parser):
+    group = parser.getgroup("debugconfig")
+    group._addoption('--no-assert', action="store_true", default=False, 
+        dest="noassert", 
+        help="disable python assert expression reinterpretation."),
+
+def pytest_configure(config):
+    #if sys.platform.startswith("java"):
+    #    return # XXX assertions don't work yet with jython 2.5.1
+
+    if not config.getvalue("noassert") and not config.getvalue("nomagic"):
+        warn_about_missing_assertion()
+        config._oldassertion = py.builtin.builtins.AssertionError
+        py.builtin.builtins.AssertionError = py.code._AssertionError 
+
+def pytest_unconfigure(config):
+    if hasattr(config, '_oldassertion'):
+        py.builtin.builtins.AssertionError = config._oldassertion
+        del config._oldassertion
+
+def warn_about_missing_assertion():
+    try:
+        assert False
+    except AssertionError:
+        pass
+    else:
+        py.std.warnings.warn("Assertions are turned off!"
+                             " (are you using python -O?)")

Added: pypy/trunk/py/plugin/pytest_capture.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_capture.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,277 @@
+"""
+configurable per-test stdout/stderr capturing mechanisms. 
+
+This plugin captures stdout/stderr output for each test separately. 
+In case of test failures this captured output is shown grouped 
+togtther with the test. 
+
+The plugin also provides test function arguments that help to
+assert stdout/stderr output from within your tests, see the 
+`funcarg example`_. 
+
+
+Capturing of input/output streams during tests 
+---------------------------------------------------
+
+By default ``sys.stdout`` and ``sys.stderr`` are substituted with
+temporary streams during the execution of tests and setup/teardown code.  
+During the whole testing process it will re-use the same temporary 
+streams allowing to play well with the logging module which easily
+takes ownership on these streams. 
+
+Also, 'sys.stdin' is substituted with a file-like "null" object that 
+does not return any values.  This is to immediately error out
+on tests that wait on reading something from stdin. 
+
+You can influence output capturing mechanisms from the command line::
+
+    py.test -s            # disable all capturing
+    py.test --capture=sys # replace sys.stdout/stderr with in-mem files
+    py.test --capture=fd  # point filedescriptors 1 and 2 to temp file
+
+If you set capturing values in a conftest file like this::
+
+    # conftest.py
+    option_capture = 'fd'
+
+then all tests in that directory will execute with "fd" style capturing. 
+
+sys-level capturing 
+------------------------------------------
+
+Capturing on 'sys' level means that ``sys.stdout`` and ``sys.stderr`` 
+will be replaced with in-memory files (``py.io.TextIO`` to be precise)  
+that capture writes and decode non-unicode strings to a unicode object
+(using a default, usually, UTF-8, encoding). 
+
+FD-level capturing and subprocesses
+------------------------------------------
+
+The ``fd`` based method means that writes going to system level files
+based on the standard file descriptors will be captured, for example 
+writes such as ``os.write(1, 'hello')`` will be captured properly. 
+Capturing on fd-level will include output generated from 
+any subprocesses created during a test. 
+
+.. _`funcarg example`:
+
+Example Usage of the capturing Function arguments
+---------------------------------------------------
+
+You can use the `capsys funcarg`_ and `capfd funcarg`_ to 
+capture writes to stdout and stderr streams.  Using the
+funcargs frees your test from having to care about setting/resetting 
+the old streams and also interacts well with py.test's own 
+per-test capturing.  Here is an example test function:
+
+.. sourcecode:: python
+
+    def test_myoutput(capsys):
+        print ("hello")
+        sys.stderr.write("world\\n")
+        out, err = capsys.readouterr()
+        assert out == "hello\\n"
+        assert err == "world\\n"
+        print "next"
+        out, err = capsys.readouterr()
+        assert out == "next\\n" 
+
+The ``readouterr()`` call snapshots the output so far - 
+and capturing will be continued.  After the test 
+function finishes the original streams will 
+be restored.  If you want to capture on 
+the filedescriptor level you can use the ``capfd`` function
+argument which offers the same interface. 
+"""
+
+import py
+import os
+
+def pytest_addoption(parser):
+    group = parser.getgroup("general")
+    group._addoption('--capture', action="store", default=None,
+        metavar="method", type="choice", choices=['fd', 'sys', 'no'],
+        help="set capturing method during tests: fd (default)|sys|no.")
+    group._addoption('-s', action="store_const", const="no", dest="capture", 
+        help="shortcut for --capture=no.")
+
+def addouterr(rep, outerr):
+    repr = getattr(rep, 'longrepr', None)
+    if not hasattr(repr, 'addsection'):
+        return
+    for secname, content in zip(["out", "err"], outerr):
+        if content:
+            repr.addsection("Captured std%s" % secname, content.rstrip())
+
+def pytest_configure(config):
+    config.pluginmanager.register(CaptureManager(), 'capturemanager')
+
+class CaptureManager:
+    def __init__(self):
+        self._method2capture = {}
+
+    def _maketempfile(self):
+        f = py.std.tempfile.TemporaryFile()
+        newf = py.io.dupfile(f, encoding="UTF-8") 
+        return newf
+
+    def _makestringio(self):
+        return py.io.TextIO() 
+
+    def _startcapture(self, method):
+        if method == "fd": 
+            return py.io.StdCaptureFD(
+                out=self._maketempfile(), err=self._maketempfile()
+            )
+        elif method == "sys":
+            return py.io.StdCapture(
+                out=self._makestringio(), err=self._makestringio()
+            )
+        else:
+            raise ValueError("unknown capturing method: %r" % method)
+
+    def _getmethod(self, config, fspath):
+        if config.option.capture:
+            method = config.option.capture
+        else:
+            try: 
+                method = config._conftest.rget("option_capture", path=fspath)
+            except KeyError:
+                method = "fd"
+        if method == "fd" and not hasattr(os, 'dup'): # e.g. jython 
+            method = "sys" 
+        return method
+
+    def resumecapture_item(self, item):
+        method = self._getmethod(item.config, item.fspath)
+        if not hasattr(item, 'outerr'):
+            item.outerr = ('', '') # we accumulate outerr on the item
+        return self.resumecapture(method)
+
+    def resumecapture(self, method):
+        if hasattr(self, '_capturing'):
+            raise ValueError("cannot resume, already capturing with %r" % 
+                (self._capturing,))
+        if method != "no":
+            cap = self._method2capture.get(method)
+            if cap is None:
+                cap = self._startcapture(method)
+                self._method2capture[method] = cap 
+            else:
+                cap.resume()
+        self._capturing = method 
+
+    def suspendcapture(self):
+        self.deactivate_funcargs()
+        method = self._capturing
+        if method != "no":
+            cap = self._method2capture[method]
+            outerr = cap.suspend()
+        else:
+            outerr = "", ""
+        del self._capturing
+        return outerr 
+
+    def activate_funcargs(self, pyfuncitem):
+        if not hasattr(pyfuncitem, 'funcargs'):
+            return
+        assert not hasattr(self, '_capturing_funcargs')
+        l = []
+        for name, obj in pyfuncitem.funcargs.items():
+            if name in ('capsys', 'capfd'):
+                obj._start()
+                l.append(obj)
+        if l:
+            self._capturing_funcargs = l
+
+    def deactivate_funcargs(self):
+        if hasattr(self, '_capturing_funcargs'):
+            for capfuncarg in self._capturing_funcargs:
+                capfuncarg._finalize()
+            del self._capturing_funcargs
+
+    def pytest_make_collect_report(self, __multicall__, collector):
+        method = self._getmethod(collector.config, collector.fspath)
+        self.resumecapture(method)
+        try:
+            rep = __multicall__.execute()
+        finally:
+            outerr = self.suspendcapture()
+        addouterr(rep, outerr)
+        return rep
+
+    def pytest_runtest_setup(self, item):
+        self.resumecapture_item(item)
+
+    def pytest_runtest_call(self, item):
+        self.resumecapture_item(item)
+        self.activate_funcargs(item)
+
+    def pytest_runtest_teardown(self, item):
+        self.resumecapture_item(item)
+
+    def pytest_runtest_teardown(self, item):
+        self.resumecapture_item(item)
+
+    def pytest__teardown_final(self, __multicall__, session):
+        method = self._getmethod(session.config, None)
+        self.resumecapture(method)
+        try:
+            rep = __multicall__.execute()
+        finally:
+            outerr = self.suspendcapture()
+        if rep:
+            addouterr(rep, outerr)
+        return rep
+
+    def pytest_keyboard_interrupt(self, excinfo):
+        if hasattr(self, '_capturing'):
+            self.suspendcapture()
+
+    def pytest_runtest_makereport(self, __multicall__, item, call):
+        self.deactivate_funcargs()
+        rep = __multicall__.execute()
+        outerr = self.suspendcapture()
+        outerr = (item.outerr[0] + outerr[0], item.outerr[1] + outerr[1])
+        if not rep.passed:
+            addouterr(rep, outerr)
+        if not rep.passed or rep.when == "teardown":
+            outerr = ('', '')
+        item.outerr = outerr 
+        return rep
+
+def pytest_funcarg__capsys(request):
+    """captures writes to sys.stdout/sys.stderr and makes 
+    them available successively via a ``capsys.readouterr()`` method 
+    which returns a ``(out, err)`` tuple of captured snapshot strings. 
+    """ 
+    return CaptureFuncarg(request, py.io.StdCapture)
+
+def pytest_funcarg__capfd(request):
+    """captures writes to file descriptors 1 and 2 and makes 
+    snapshotted ``(out, err)`` string tuples available 
+    via the ``capsys.readouterr()`` method. 
+    """ 
+    return CaptureFuncarg(request, py.io.StdCaptureFD)
+
+
+class CaptureFuncarg:
+    def __init__(self, request, captureclass):
+        self._cclass = captureclass
+        #request.addfinalizer(self._finalize)
+
+    def _start(self):
+        self.capture = self._cclass()
+
+    def _finalize(self):
+        if hasattr(self, 'capture'):
+            self.capture.reset()
+            del self.capture 
+
+    def readouterr(self):
+        return self.capture.readouterr()
+
+    def close(self):
+        self.capture.reset()
+        del self.capture
+

Added: pypy/trunk/py/plugin/pytest_default.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_default.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,149 @@
+""" default hooks and general py.test options. """ 
+
+import sys
+import py
+
+try:
+    import execnet
+except ImportError:
+    execnet = None
+
+def pytest_pyfunc_call(__multicall__, pyfuncitem):
+    if not __multicall__.execute():
+        testfunction = pyfuncitem.obj 
+        if pyfuncitem._isyieldedfunction():
+            testfunction(*pyfuncitem._args)
+        else:
+            funcargs = pyfuncitem.funcargs
+            testfunction(**funcargs)
+
+def pytest_collect_file(path, parent):
+    ext = path.ext 
+    pb = path.purebasename
+    if pb.startswith("test_") or pb.endswith("_test") or \
+       path in parent.config.args:
+        if ext == ".py":
+            return parent.Module(path, parent=parent) 
+
+def pytest_collect_directory(path, parent):
+    # XXX reconsider the following comment 
+    # not use parent.Directory here as we generally 
+    # want dir/conftest.py to be able to 
+    # define Directory(dir) already 
+    if not parent.recfilter(path): # by default special ".cvs", ... 
+        # check if cmdline specified this dir or a subdir directly
+        for arg in parent.config.args:
+            if path == arg or arg.relto(path):
+                break
+        else:
+            return 
+    Directory = parent.config.getvalue('Directory', path) 
+    return Directory(path, parent=parent)
+
+def pytest_report_iteminfo(item):
+    return item.reportinfo()
+
+def pytest_addoption(parser):
+    group = parser.getgroup("general", "running and selection options")
+    group._addoption('-x', '--exitfirst',
+               action="store_true", dest="exitfirst", default=False,
+               help="exit instantly on first error or failed test."),
+    group._addoption('-k',
+        action="store", dest="keyword", default='',
+        help="only run test items matching the given "
+             "space separated keywords.  precede a keyword with '-' to negate. "
+             "Terminate the expression with ':' to treat a match as a signal "
+             "to run all subsequent tests. ")
+    group._addoption('-p', action="append", dest="plugins", default = [],
+               help=("load the specified plugin after command line parsing. "))
+    if execnet:
+        group._addoption('-f', '--looponfail',
+                   action="store_true", dest="looponfail", default=False,
+                   help="run tests, re-run failing test set until all pass.")
+
+    group = parser.getgroup("debugconfig", 
+        "test process debugging and configuration")
+    group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir",
+               help="base temporary directory for this test run.")
+
+    if execnet:
+        add_dist_options(parser)
+    else:
+        parser.epilog = (
+        "'execnet' package required for --looponfailing / distributed testing.")
+
+def add_dist_options(parser):
+    #  see http://pytest.org/help/dist")
+    group = parser.getgroup("dist", "distributed testing") 
+    group._addoption('--dist', metavar="distmode", 
+               action="store", choices=['load', 'each', 'no'], 
+               type="choice", dest="dist", default="no", 
+               help=("set mode for distributing tests to exec environments.\n\n"
+                     "each: send each test to each available environment.\n\n"
+                     "load: send each test to available environment.\n\n"
+                     "(default) no: run tests inprocess, don't distribute."))
+    group._addoption('--tx', dest="tx", action="append", default=[], metavar="xspec",
+               help=("add a test execution environment. some examples: "
+                     "--tx popen//python=python2.5 --tx socket=192.168.1.102:8888 "
+                     "--tx ssh=user at codespeak.net//chdir=testcache"))
+    group._addoption('-d', 
+               action="store_true", dest="distload", default=False,
+               help="load-balance tests.  shortcut for '--dist=load'")
+    group._addoption('-n', dest="numprocesses", metavar="numprocesses", 
+               action="store", type="int", 
+               help="shortcut for '--dist=load --tx=NUM*popen'")
+    group.addoption('--rsyncdir', action="append", default=[], metavar="dir1", 
+               help="add directory for rsyncing to remote tx nodes.")
+
+def pytest_configure(config):
+    fixoptions(config)
+    setsession(config)
+
+def fixoptions(config):
+    if execnet:
+        if config.option.numprocesses:
+            config.option.dist = "load"
+            config.option.tx = ['popen'] * int(config.option.numprocesses)
+        if config.option.distload:
+            config.option.dist = "load"
+
+def setsession(config):
+    val = config.getvalue
+    if val("collectonly"):
+        from py.impl.test.session import Session
+        config.setsessionclass(Session)
+    elif execnet:
+        if val("looponfail"):
+            from py.impl.test.looponfail.remote import LooponfailingSession
+            config.setsessionclass(LooponfailingSession)
+        elif val("dist") != "no":
+            from py.impl.test.dist.dsession import  DSession
+            config.setsessionclass(DSession)
+      
+# pycollect related hooks and code, should move to pytest_pycollect.py
+ 
+def pytest_pycollect_makeitem(__multicall__, collector, name, obj):
+    res = __multicall__.execute()
+    if res is not None:
+        return res
+    if collector._istestclasscandidate(name, obj):
+        res = collector._deprecated_join(name)
+        if res is not None:
+            return res 
+        return collector.Class(name, parent=collector)
+    elif collector.funcnamefilter(name) and hasattr(obj, '__call__'):
+        res = collector._deprecated_join(name)
+        if res is not None:
+            return res 
+        if is_generator(obj):
+            # XXX deprecation warning 
+            return collector.Generator(name, parent=collector)
+        else:
+            return collector._genfunctions(name, obj) 
+
+def is_generator(func):
+    try:
+        return py.code.getrawcode(func).co_flags & 32 # generator function 
+    except AttributeError: # builtin functions have no bytecode
+        # assume them to not be generators
+        return False 

Added: pypy/trunk/py/plugin/pytest_doctest.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_doctest.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,86 @@
+"""
+collect and execute doctests from modules and test files. 
+
+Usage
+-------------
+
+By default all files matching the ``test_*.txt`` pattern will 
+be run with the ``doctest`` module.  If you issue::
+
+    py.test --doctest-modules
+
+all python files in your projects will be doctest-run 
+as well. 
+"""
+
+import py
+from py.impl.code.code import TerminalRepr, ReprFileLocation
+import doctest
+
+def pytest_addoption(parser):
+    group = parser.getgroup("doctest options")
+    group.addoption("--doctest-modules", 
+        action="store_true", default=False,
+        help="search all python files for doctests", 
+        dest="doctestmodules")
+    
+def pytest_collect_file(path, parent):
+    if path.ext == ".py":
+        if parent.config.getvalue("doctestmodules"):
+            return DoctestModule(path, parent)
+    if path.check(fnmatch="test_*.txt"):
+        return DoctestTextfile(path, parent)
+
+class ReprFailDoctest(TerminalRepr):
+    def __init__(self, reprlocation, lines):
+        self.reprlocation = reprlocation
+        self.lines = lines
+    def toterminal(self, tw):
+        for line in self.lines:
+            tw.line(line)
+        self.reprlocation.toterminal(tw)
+             
+class DoctestItem(py.test.collect.Item):
+    def __init__(self, path, parent):
+        name = self.__class__.__name__ + ":" + path.basename
+        super(DoctestItem, self).__init__(name=name, parent=parent)
+        self.fspath = path 
+
+    def repr_failure(self, excinfo):
+        if excinfo.errisinstance(doctest.DocTestFailure):
+            doctestfailure = excinfo.value
+            example = doctestfailure.example
+            test = doctestfailure.test
+            filename = test.filename 
+            lineno = test.lineno + example.lineno + 1
+            message = excinfo.type.__name__
+            reprlocation = ReprFileLocation(filename, lineno, message)
+            checker = doctest.OutputChecker() 
+            REPORT_UDIFF = doctest.REPORT_UDIFF
+            filelines = py.path.local(filename).readlines(cr=0)
+            i = max(test.lineno, max(0, lineno - 10)) # XXX? 
+            lines = []
+            for line in filelines[i:lineno]:
+                lines.append("%03d %s" % (i+1, line))
+                i += 1
+            lines += checker.output_difference(example, 
+                    doctestfailure.got, REPORT_UDIFF).split("\n")
+            return ReprFailDoctest(reprlocation, lines)
+        elif excinfo.errisinstance(doctest.UnexpectedException):
+            excinfo = py.code.ExceptionInfo(excinfo.value.exc_info)
+            return super(DoctestItem, self).repr_failure(excinfo)
+        else: 
+            return super(DoctestItem, self).repr_failure(excinfo)
+
+class DoctestTextfile(DoctestItem):
+    def runtest(self):
+        if not self._deprecated_testexecution():
+            failed, tot = doctest.testfile(
+                str(self.fspath), module_relative=False, 
+                raise_on_error=True, verbose=0)
+
+class DoctestModule(DoctestItem):
+    def runtest(self):
+        module = self.fspath.pyimport()
+        failed, tot = doctest.testmod(
+            module, raise_on_error=True, verbose=0)

Added: pypy/trunk/py/plugin/pytest_figleaf.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_figleaf.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,51 @@
+"""
+write and report coverage data with 'figleaf'. 
+
+"""
+import py
+
+py.test.importorskip("figleaf.annotate_html")
+import figleaf
+
+def pytest_addoption(parser):
+    group = parser.getgroup('figleaf options')
+    group.addoption('-F', action='store_true', default=False,
+            dest = 'figleaf',
+            help=('trace python coverage with figleaf and write HTML '
+                 'for files below the current working dir'))
+    group.addoption('--figleaf-data', action='store', default='.figleaf',
+            dest='figleafdata',
+            help='path to coverage tracing file.')
+    group.addoption('--figleaf-html', action='store', default='html',
+            dest='figleafhtml', 
+            help='path to the coverage html dir.')
+
+def pytest_configure(config):
+    figleaf.start()
+
+def pytest_terminal_summary(terminalreporter):
+    config = terminalreporter.config
+    datafile = py.path.local(config.getvalue('figleafdata'))
+    tw = terminalreporter._tw
+    tw.sep('-', 'figleaf')
+    tw.line('Writing figleaf data to %s' % (datafile))
+    figleaf.stop()
+    figleaf.write_coverage(str(datafile))
+    coverage = get_coverage(datafile, config)
+    reportdir = py.path.local(config.getvalue('figleafhtml'))
+    tw.line('Writing figleaf html to file://%s' % (reportdir))
+    figleaf.annotate_html.prepare_reportdir(str(reportdir))
+    exclude = []
+    figleaf.annotate_html.report_as_html(coverage, 
+            str(reportdir), exclude, {})
+
+def get_coverage(datafile, config):
+    # basepath = config.topdir
+    basepath = py.path.local()
+    data = figleaf.read_coverage(str(datafile))
+    d = {}
+    coverage = figleaf.combine_coverage(d, data)
+    for path in coverage.keys():
+        if not py.path.local(path).relto(basepath):
+            del coverage[path]
+    return coverage

Added: pypy/trunk/py/plugin/pytest_helpconfig.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_helpconfig.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,63 @@
+""" provide version info, conftest/environment config names. 
+"""
+import py
+import sys
+
+def pytest_addoption(parser):
+    group = parser.getgroup('debugconfig')
+    group.addoption("--help-config", action="store_true", dest="helpconfig", 
+            help="show available conftest.py and ENV-variable names.")
+    group.addoption('--version', action="store_true", 
+            help="display py lib version and import information.")
+
+def pytest_configure(__multicall__, config):
+    if config.option.version:
+        p = py.path.local(py.__file__).dirpath()
+        sys.stderr.write("This is py.test version %s, imported from %s\n" % 
+            (py.__version__, p))
+        sys.exit(0)
+    if not config.option.helpconfig:
+        return
+    __multicall__.execute()
+    options = []
+    for group in config._parser._groups:
+        options.extend(group.options)
+    widths = [0] * 10 
+    tw = py.io.TerminalWriter()
+    tw.sep("-")
+    tw.line("%-13s | %-18s | %-25s | %s" %(
+            "cmdline name", "conftest.py name", "ENV-variable name", "help"))
+    tw.sep("-")
+
+    options = [opt for opt in options if opt._long_opts]
+    options.sort(key=lambda x: x._long_opts)
+    for opt in options:
+        if not opt._long_opts:
+            continue
+        optstrings = list(opt._long_opts) # + list(opt._short_opts)
+        optstrings = filter(None, optstrings)
+        optstring = "|".join(optstrings)
+        line = "%-13s | %-18s | %-25s | %s" %(
+            optstring, 
+            "option_%s" % opt.dest, 
+            "PYTEST_OPTION_%s" % opt.dest.upper(),
+            opt.help and opt.help or "", 
+            )
+        tw.line(line[:tw.fullwidth])
+    for name, help in conftest_options:
+        line = "%-13s | %-18s | %-25s | %s" %(
+            "", 
+            name, 
+            "",
+            help, 
+            )
+        tw.line(line[:tw.fullwidth])
+        
+    tw.sep("-")
+    sys.exit(0)
+
+conftest_options = (
+    ('pytest_plugins', 'list of plugin names to load'),
+    ('collect_ignore', '(relative) paths ignored during collection'), 
+    ('rsyncdirs', 'to-be-rsynced directories for dist-testing'), 
+)

Added: pypy/trunk/py/plugin/pytest_hooklog.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_hooklog.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,33 @@
+""" log invocations of extension hooks to a file. """ 
+import py
+
+def pytest_addoption(parser):
+    parser.addoption("--hooklog", dest="hooklog", default=None, 
+        help="write hook calls to the given file.")
+
+def pytest_configure(config):
+    hooklog = config.getvalue("hooklog")
+    if hooklog:
+        config._hooklogfile = open(hooklog, 'w')
+        config._hooklog_oldperformcall = config.hook._performcall
+        config.hook._performcall = (lambda name, multicall: 
+            logged_call(name=name, multicall=multicall, config=config))
+
+def logged_call(name, multicall, config):
+    f = config._hooklogfile
+    f.write("%s(**%s)\n" % (name, multicall.kwargs))
+    try:
+        res = config._hooklog_oldperformcall(name=name, multicall=multicall)
+    except:
+        f.write("-> exception")
+        raise
+    f.write("-> %r" % (res,))
+    return res
+
+def pytest_unconfigure(config):
+    try:
+        del config.hook.__dict__['_performcall'] 
+    except KeyError:
+        pass
+    else:
+        config._hooklogfile.close()

Added: pypy/trunk/py/plugin/pytest_mark.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_mark.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,148 @@
+"""
+generic mechanism for marking python functions. 
+
+By using the ``py.test.mark`` helper you can instantiate
+decorators that will set named meta data on test functions. 
+
+Marking a single function 
+----------------------------------------------------
+
+You can "mark" a test function with meta data like this::
+
+    @py.test.mark.webtest
+    def test_send_http():
+        ... 
+
+This will set a "Marker" instance as a function attribute named "webtest". 
+You can also specify parametrized meta data like this::
+
+    @py.test.mark.webtest(firefox=30)
+    def test_receive():
+        ...
+
+The named marker can be accessed like this later::
+
+    test_receive.webtest.kwargs['firefox'] == 30
+
+In addition to set key-value pairs you can also use positional arguments::
+
+    @py.test.mark.webtest("triangular")
+    def test_receive():
+        ...
+
+and later access it with ``test_receive.webtest.args[0] == 'triangular``.
+
+.. _`scoped-marking`:
+
+Marking classes or modules 
+----------------------------------------------------
+
+To mark all methods of a class set a ``pytestmark`` attribute like this::
+
+    import py
+
+    class TestClass:
+        pytestmark = py.test.mark.webtest
+
+You can re-use the same markers that you would use for decorating
+a function - in fact this marker decorator will be applied
+to all test methods of the class. 
+
+You can also set a module level marker::
+
+    import py
+    pytestmark = py.test.mark.webtest
+
+in which case then the marker decorator will be applied to all functions and 
+methods defined in the module.  
+
+The order in which marker functions are called is this::
+
+    per-function (upon import of module already) 
+    per-class
+    per-module 
+
+Later called markers may overwrite previous key-value settings. 
+Positional arguments are all appended to the same 'args' list 
+of the Marker object. 
+
+Using "-k MARKNAME" to select tests
+----------------------------------------------------
+
+You can use the ``-k`` command line option to select
+tests::
+
+    py.test -k webtest  # will only run tests marked as webtest
+
+"""
+import py
+
+def pytest_namespace():
+    return {'mark': Mark()}
+
+
+class Mark(object):
+    def __getattr__(self, name):
+        if name[0] == "_":
+            raise AttributeError(name)
+        return MarkerDecorator(name)
+
+class MarkerDecorator:
+    """ decorator for setting function attributes. """
+    def __init__(self, name):
+        self.markname = name
+        self.kwargs = {}
+        self.args = []
+
+    def __repr__(self):
+        d = self.__dict__.copy()
+        name = d.pop('markname')
+        return "<MarkerDecorator %r %r>" %(name, d)
+
+    def __call__(self, *args, **kwargs):
+        if args:
+            if len(args) == 1 and hasattr(args[0], '__call__'):
+                func = args[0]
+                holder = getattr(func, self.markname, None)
+                if holder is None:
+                    holder = Marker(self.markname, self.args, self.kwargs)
+                    setattr(func, self.markname, holder)
+                else:
+                    holder.kwargs.update(self.kwargs)
+                    holder.args.extend(self.args)
+                return func
+            else:
+                self.args.extend(args)
+        self.kwargs.update(kwargs)
+        return self
+        
+class Marker:
+    def __init__(self, name, args, kwargs):
+        self._name = name
+        self.args = args
+        self.kwargs = kwargs
+
+    def __getattr__(self, name):
+        if name[0] != '_' and name in self.kwargs:
+            py.log._apiwarn("1.1", "use .kwargs attribute to access key-values")
+            return self.kwargs[name]
+        raise AttributeError(name)
+
+    def __repr__(self):
+        return "<Marker %r args=%r kwargs=%r>" % (
+                self._name, self.args, self.kwargs)
+            
+
+def pytest_pycollect_makeitem(__multicall__, collector, name, obj):
+    item = __multicall__.execute()
+    if isinstance(item, py.test.collect.Function):
+        cls = collector.getparent(py.test.collect.Class)
+        mod = collector.getparent(py.test.collect.Module)
+        func = item.obj
+        func = getattr(func, '__func__', func) # py3
+        func = getattr(func, 'im_func', func)  # py2
+        for parent in [x for x in (mod, cls) if x]:
+            marker = getattr(parent.obj, 'pytestmark', None)
+            if isinstance(marker, MarkerDecorator):
+                marker(func)
+    return item

Added: pypy/trunk/py/plugin/pytest_monkeypatch.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_monkeypatch.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,142 @@
+"""
+safely patch object attributes, dicts and environment variables. 
+
+Usage 
+----------------
+
+Use the `monkeypatch funcarg`_ to tweak your global test environment 
+for running a particular test.  You can safely set/del an attribute, 
+dictionary item or environment variable by respective methods
+on the monkeypatch funcarg.  If you want e.g. to set an ENV1 variable 
+and have os.path.expanduser return a particular directory, you can 
+write it down like this:
+
+.. sourcecode:: python 
+
+    def test_mytest(monkeypatch):
+        monkeypatch.setenv('ENV1', 'myval')
+        monkeypatch.setattr(os.path, 'expanduser', lambda x: '/tmp/xyz')
+        ... # your test code that uses those patched values implicitely
+
+After the test function finished all modifications will be undone, 
+because the ``monkeypatch.undo()`` method is registered as a finalizer. 
+
+``monkeypatch.setattr/delattr/delitem/delenv()`` all 
+by default raise an Exception if the target does not exist. 
+Pass ``raising=False`` if you want to skip this check. 
+
+prepending to PATH or other environment variables 
+---------------------------------------------------------
+
+To prepend a value to an already existing environment parameter:
+
+.. sourcecode:: python 
+
+    def test_mypath_finding(monkeypatch):
+        monkeypatch.setenv('PATH', 'x/y', prepend=":")
+        # in bash language: export PATH=x/y:$PATH 
+
+calling "undo" finalization explicitely
+-----------------------------------------
+
+At the end of function execution py.test invokes
+a teardown hook which undoes all monkeypatch changes. 
+If you do not want to wait that long you can call 
+finalization explicitely::
+
+    monkeypatch.undo()  
+
+This will undo previous changes.  This call consumes the
+undo stack.  Calling it a second time has no effect unless
+you  start monkeypatching after the undo call. 
+
+.. _`monkeypatch blog post`: http://tetamap.wordpress.com/2009/03/03/monkeypatching-in-unit-tests-done-right/
+"""
+
+import py, os, sys
+
+def pytest_funcarg__monkeypatch(request):
+    """The returned ``monkeypatch`` funcarg provides these 
+    helper methods to modify objects, dictionaries or os.environ::
+
+        monkeypatch.setattr(obj, name, value, raising=True)  
+        monkeypatch.delattr(obj, name, raising=True)
+        monkeypatch.setitem(mapping, name, value) 
+        monkeypatch.delitem(obj, name, raising=True)
+        monkeypatch.setenv(name, value, prepend=False) 
+        monkeypatch.delenv(name, value, raising=True)
+        monkeypatch.syspath_prepend(path)
+
+    All modifications will be undone when the requesting 
+    test function finished its execution.  For the ``del`` 
+    methods the ``raising`` parameter determines if a
+    KeyError or AttributeError will be raised if the
+    deletion has no target. 
+    """
+    monkeypatch = MonkeyPatch()
+    request.addfinalizer(monkeypatch.undo)
+    return monkeypatch
+
+notset = object()
+
+class MonkeyPatch:
+    def __init__(self):
+        self._setattr = []
+        self._setitem = []
+
+    def setattr(self, obj, name, value, raising=True):
+        oldval = getattr(obj, name, notset)
+        if raising and oldval is notset:
+            raise AttributeError("%r has no attribute %r" %(obj, name))
+        self._setattr.insert(0, (obj, name, oldval))
+        setattr(obj, name, value)
+
+    def delattr(self, obj, name, raising=True):
+        if not hasattr(obj, name):
+            if raising:
+                raise AttributeError(name) 
+        else:
+            self._setattr.insert(0, (obj, name, getattr(obj, name, notset)))
+            delattr(obj, name)
+
+    def setitem(self, dic, name, value):
+        self._setitem.insert(0, (dic, name, dic.get(name, notset)))
+        dic[name] = value
+
+    def delitem(self, dic, name, raising=True):
+        if name not in dic:
+            if raising:
+                raise KeyError(name) 
+        else:    
+            self._setitem.insert(0, (dic, name, dic.get(name, notset)))
+            del dic[name]
+
+    def setenv(self, name, value, prepend=None):
+        value = str(value)
+        if prepend and name in os.environ:
+            value = value + prepend + os.environ[name]
+        self.setitem(os.environ, name, value)
+
+    def delenv(self, name, raising=True):
+        self.delitem(os.environ, name, raising=raising)
+
+    def syspath_prepend(self, path):
+        if not hasattr(self, '_savesyspath'):
+            self._savesyspath = sys.path[:]
+        sys.path.insert(0, str(path))
+
+    def undo(self):
+        for obj, name, value in self._setattr:
+            if value is not notset:
+                setattr(obj, name, value)
+            else:
+                delattr(obj, name)
+        self._setattr[:] = []
+        for dictionary, name, value in self._setitem:
+            if value is notset:
+                del dictionary[name]
+            else:
+                dictionary[name] = value
+        self._setitem[:] = []
+        if hasattr(self, '_savesyspath'):
+            sys.path[:] = self._savesyspath

Added: pypy/trunk/py/plugin/pytest_nose.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_nose.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,98 @@
+"""nose-compatibility plugin: allow to run nose test suites natively. 
+
+This is an experimental plugin for allowing to run tests written 
+in 'nosetests style with py.test.   
+
+Usage
+-------------
+
+type::
+
+    py.test  # instead of 'nosetests'
+
+and you should be able to run nose style tests and at the same
+time can make full use of py.test's capabilities.  
+
+Supported nose Idioms
+----------------------
+
+* setup and teardown at module/class/method level
+* SkipTest exceptions and markers 
+* setup/teardown decorators
+* yield-based tests and their setup 
+* general usage of nose utilities 
+
+Unsupported idioms / issues
+----------------------------------
+
+- nose-style doctests are not collected and executed correctly,
+  also fixtures don't work. 
+
+- no nose-configuration is recognized 
+
+If you find other issues or have suggestions please run:: 
+
+    py.test --pastebin=all 
+
+and send the resulting URL to a py.test contact channel,
+at best to the mailing list. 
+"""
+import py
+import inspect
+import sys
+
+def pytest_runtest_makereport(__multicall__, item, call):
+    SkipTest = getattr(sys.modules.get('nose', None), 'SkipTest', None)
+    if SkipTest:
+        if call.excinfo and call.excinfo.errisinstance(SkipTest):
+            # let's substitute the excinfo with a py.test.skip one 
+            call2 = call.__class__(lambda: py.test.skip(str(call.excinfo.value)), call.when)
+            call.excinfo = call2.excinfo 
+
+def pytest_report_iteminfo(item):
+    # nose 0.11.1 uses decorators for "raises" and other helpers. 
+    # for reporting progress by filename we fish for the filename 
+    if isinstance(item, py.test.collect.Function):
+        obj = item.obj
+        if hasattr(obj, 'compat_co_firstlineno'):
+            fn = sys.modules[obj.__module__].__file__ 
+            if fn.endswith(".pyc"):
+                fn = fn[:-1]
+            #assert 0
+            #fn = inspect.getsourcefile(obj) or inspect.getfile(obj)
+            lineno = obj.compat_co_firstlineno    
+            return py.path.local(fn), lineno, obj.__module__
+    
+def pytest_runtest_setup(item):
+    if isinstance(item, (py.test.collect.Function)):
+        if isinstance(item.parent, py.test.collect.Generator):
+            gen = item.parent 
+            if not hasattr(gen, '_nosegensetup'):
+                call_optional(gen.obj, 'setup')
+                if isinstance(gen.parent, py.test.collect.Instance):
+                    call_optional(gen.parent.obj, 'setup')
+                gen._nosegensetup = True
+        if not call_optional(item.obj, 'setup'):
+            # call module level setup if there is no object level one
+            call_optional(item.parent.obj, 'setup')
+
+def pytest_runtest_teardown(item):
+    if isinstance(item, py.test.collect.Function):
+        if not call_optional(item.obj, 'teardown'):
+            call_optional(item.parent.obj, 'teardown')
+        #if hasattr(item.parent, '_nosegensetup'):
+        #    #call_optional(item._nosegensetup, 'teardown')
+        #    del item.parent._nosegensetup
+
+def pytest_make_collect_report(collector):
+    if isinstance(collector, py.test.collect.Generator):
+        call_optional(collector.obj, 'setup')
+
+def call_optional(obj, name):
+    method = getattr(obj, name, None)
+    if method:
+        ismethod = inspect.ismethod(method)
+        rawcode = py.code.getrawcode(method)
+        if not rawcode.co_varnames[ismethod:]:
+            method()
+            return True

Added: pypy/trunk/py/plugin/pytest_pastebin.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_pastebin.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,83 @@
+"""
+submit failure or test session information to a pastebin service. 
+
+Usage
+----------
+
+**Creating a URL for each test failure**::
+
+    py.test --pastebin=failed 
+
+This will submit test run information to a remote Paste service and
+provide a URL for each failure.  You may select tests as usual or add
+for example ``-x`` if you only want to send one particular failure. 
+
+**Creating a URL for a whole test session log**::
+
+    py.test --pastebin=all 
+
+Currently only pasting to the http://paste.pocoo.org service is implemented.  
+
+"""
+import py, sys
+
+class url:
+    base = "http://paste.pocoo.org"
+    xmlrpc = base + "/xmlrpc/"
+    show = base + "/show/"
+
+def pytest_addoption(parser):
+    group = parser.getgroup("general")
+    group._addoption('--pastebin', metavar="mode",
+        action='store', dest="pastebin", default=None, 
+        type="choice", choices=['failed', 'all'], 
+        help="send failed|all info to Pocoo pastebin service.")
+
+def pytest_configure(__multicall__, config):
+    import tempfile
+    __multicall__.execute()
+    if config.option.pastebin == "all":
+        config._pastebinfile = tempfile.TemporaryFile('w+')
+        tr = config.pluginmanager.getplugin('terminalreporter')
+        oldwrite = tr._tw.write 
+        def tee_write(s, **kwargs):
+            oldwrite(s, **kwargs)
+            config._pastebinfile.write(str(s))
+        tr._tw.write = tee_write 
+
+def pytest_unconfigure(config): 
+    if hasattr(config, '_pastebinfile'):
+        config._pastebinfile.seek(0)
+        sessionlog = config._pastebinfile.read()
+        config._pastebinfile.close()
+        del config._pastebinfile
+        proxyid = getproxy().newPaste("python", sessionlog)
+        pastebinurl = "%s%s" % (url.show, proxyid)
+        sys.stderr.write("pastebin session-log: %s\n" % pastebinurl)
+        tr = config.pluginmanager.getplugin('terminalreporter')
+        del tr._tw.__dict__['write']
+        
+def getproxy():
+    return py.std.xmlrpclib.ServerProxy(url.xmlrpc).pastes
+
+def pytest_terminal_summary(terminalreporter):
+    if terminalreporter.config.option.pastebin != "failed":
+        return
+    tr = terminalreporter
+    if 'failed' in tr.stats:
+        terminalreporter.write_sep("=", "Sending information to Paste Service")
+        if tr.config.option.debug:
+            terminalreporter.write_line("xmlrpcurl: %s" %(url.xmlrpc,))
+        serverproxy = getproxy()
+        for rep in terminalreporter.stats.get('failed'):
+            try:
+                msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc
+            except AttributeError:
+                msg = tr._getfailureheadline(rep)
+            tw = py.io.TerminalWriter(stringio=True)
+            rep.toterminal(tw)
+            s = tw.stringio.getvalue()
+            assert len(s)
+            proxyid = serverproxy.newPaste("python", s)
+            pastebinurl = "%s%s" % (url.show, proxyid)
+            tr.write_line("%s --> %s" %(msg, pastebinurl))

Added: pypy/trunk/py/plugin/pytest_pdb.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_pdb.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,114 @@
+"""
+interactive debugging with the Python Debugger.
+"""
+import py
+import pdb, sys, linecache
+from py.impl.test.outcome import Skipped
+try:
+    import execnet
+except ImportError:
+    execnet = None
+
+def pytest_addoption(parser):
+    group = parser.getgroup("general") 
+    group._addoption('--pdb',
+               action="store_true", dest="usepdb", default=False,
+               help="start pdb (the Python debugger) on errors.")
+
+
+def pytest_configure(config):
+    if config.option.usepdb:
+        if execnet:
+            if config.getvalue("looponfail"):
+                raise config.Error("--pdb incompatible with --looponfail.")
+            if config.option.dist != "no":
+                raise config.Error("--pdb incompatible with distributing tests.")
+        config.pluginmanager.register(PdbInvoke())
+
+class PdbInvoke:
+    def pytest_runtest_makereport(self, item, call):
+        if call.excinfo and not call.excinfo.errisinstance(Skipped): 
+            # play well with capturing, slightly hackish
+            capman = item.config.pluginmanager.getplugin('capturemanager')
+            capman.suspendcapture() 
+
+            tw = py.io.TerminalWriter()
+            repr = call.excinfo.getrepr()
+            repr.toterminal(tw) 
+            post_mortem(call.excinfo._excinfo[2])
+
+            capman.resumecapture_item(item)
+
+class Pdb(py.std.pdb.Pdb):
+    def do_list(self, arg):
+        self.lastcmd = 'list'
+        last = None
+        if arg:
+            try:
+                x = eval(arg, {}, {})
+                if type(x) == type(()):
+                    first, last = x
+                    first = int(first)
+                    last = int(last)
+                    if last < first:
+                        # Assume it's a count
+                        last = first + last
+                else:
+                    first = max(1, int(x) - 5)
+            except:
+                print ('*** Error in argument: %s' % repr(arg))
+                return
+        elif self.lineno is None:
+            first = max(1, self.curframe.f_lineno - 5)
+        else:
+            first = self.lineno + 1
+        if last is None:
+            last = first + 10
+        filename = self.curframe.f_code.co_filename
+        breaklist = self.get_file_breaks(filename)
+        try:
+            for lineno in range(first, last+1):
+                # start difference from normal do_line
+                line = self._getline(filename, lineno)
+                # end difference from normal do_line
+                if not line:
+                    print ('[EOF]')
+                    break
+                else:
+                    s = repr(lineno).rjust(3)
+                    if len(s) < 4: s = s + ' '
+                    if lineno in breaklist: s = s + 'B'
+                    else: s = s + ' '
+                    if lineno == self.curframe.f_lineno:
+                        s = s + '->'
+                    sys.stdout.write(s + '\t' + line)
+                    self.lineno = lineno
+        except KeyboardInterrupt:
+            pass
+    do_l = do_list
+
+    def _getline(self, filename, lineno):
+        if hasattr(filename, "__source__"):
+            try:
+                return filename.__source__.lines[lineno - 1] + "\n"
+            except IndexError:
+                return None
+        return linecache.getline(filename, lineno)
+
+    def get_stack(self, f, t):
+        # Modified from bdb.py to be able to walk the stack beyond generators,
+        # which does not work in the normal pdb :-(
+        stack, i = pdb.Pdb.get_stack(self, f, t)
+        if f is None:
+            i = max(0, len(stack) - 1)
+        return stack, i
+
+def post_mortem(t):
+    # modified from pdb.py for the new get_stack() implementation
+    p = Pdb()
+    p.reset()
+    p.interaction(None, t)
+
+def set_trace():
+    # again, a copy of the version in pdb.py
+    Pdb().set_trace(sys._getframe().f_back)

Added: pypy/trunk/py/plugin/pytest_pylint.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_pylint.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,36 @@
+"""pylint plugin
+
+XXX: Currently in progress, NOT IN WORKING STATE.
+"""
+import py
+
+pylint = py.test.importorskip("pylint.lint") 
+
+def pytest_addoption(parser):
+    group = parser.getgroup('pylint options')
+    group.addoption('--pylint', action='store_true',
+                    default=False, dest='pylint',
+                    help='run pylint on python files.')
+
+def pytest_collect_file(path, parent):
+    if path.ext == ".py":
+        if parent.config.getvalue('pylint'):
+            return PylintItem(path, parent)
+
+#def pytest_terminal_summary(terminalreporter):
+#    print 'placeholder for pylint output'
+
+class PylintItem(py.test.collect.Item):
+    def runtest(self):
+        capture = py.io.StdCaptureFD()
+        try:
+            linter = pylint.lint.PyLinter()
+            linter.check(str(self.fspath))
+        finally:
+            out, err = capture.reset()
+        rating = out.strip().split('\n')[-1]
+        sys.stdout.write(">>>")
+        print(rating)
+        assert 0
+
+

Added: pypy/trunk/py/plugin/pytest_pytester.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_pytester.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,459 @@
+"""
+funcargs and support code for testing py.test's own functionality. 
+"""
+
+import py
+import sys, os
+import inspect
+from py.impl.test.config import Config as pytestConfig
+from py.plugin import hookspec
+from py.builtin import print_
+
+pytest_plugins = '_pytest'
+
+def pytest_funcarg__linecomp(request):
+    return LineComp()
+
+def pytest_funcarg__LineMatcher(request):
+    return LineMatcher
+
+def pytest_funcarg__testdir(request):
+    tmptestdir = TmpTestdir(request)
+    return tmptestdir
+
+def pytest_funcarg__reportrecorder(request):
+    reprec = ReportRecorder(py._com.comregistry)
+    request.addfinalizer(lambda: reprec.comregistry.unregister(reprec))
+    return reprec
+
+class RunResult:
+    def __init__(self, ret, outlines, errlines):
+        self.ret = ret
+        self.outlines = outlines
+        self.errlines = errlines
+        self.stdout = LineMatcher(outlines)
+        self.stderr = LineMatcher(errlines)
+
+class TmpTestdir:
+    def __init__(self, request):
+        self.request = request
+        self._pytest = request.getfuncargvalue("_pytest")
+        # XXX remove duplication with tmpdir plugin 
+        basetmp = request.config.ensuretemp("testdir")
+        name = request.function.__name__
+        for i in range(100):
+            try:
+                tmpdir = basetmp.mkdir(name + str(i))
+            except py.error.EEXIST:
+                continue
+            break
+        # we need to create another subdir
+        # because Directory.collect() currently loads
+        # conftest.py from sibling directories
+        self.tmpdir = tmpdir.mkdir(name)
+        self.plugins = []
+        self._syspathremove = []
+        self.chdir() # always chdir
+        assert hasattr(self, '_olddir')
+        self.request.addfinalizer(self.finalize)
+
+    def __repr__(self):
+        return "<TmpTestdir %r>" % (self.tmpdir,)
+
+    def Config(self, comregistry=None, topdir=None):
+        if topdir is None:
+            topdir = self.tmpdir.dirpath()
+        return pytestConfig(comregistry, topdir=topdir)
+
+    def finalize(self):
+        for p in self._syspathremove:
+            py.std.sys.path.remove(p)
+        if hasattr(self, '_olddir'):
+            self._olddir.chdir()
+        # delete modules that have been loaded from tmpdir
+        for name, mod in list(sys.modules.items()):
+            if mod:
+                fn = getattr(mod, '__file__', None)
+                if fn and fn.startswith(str(self.tmpdir)):
+                    del sys.modules[name]
+
+    def getreportrecorder(self, obj):
+        if isinstance(obj, py._com.Registry):
+            registry = obj
+        elif hasattr(obj, 'comregistry'):
+            registry = obj.comregistry
+        elif hasattr(obj, 'pluginmanager'):
+            registry = obj.pluginmanager.comregistry
+        elif hasattr(obj, 'config'):
+            registry = obj.config.pluginmanager.comregistry
+        else:
+            raise ValueError("obj %r provides no comregistry" %(obj,))
+        assert isinstance(registry, py._com.Registry)
+        reprec = ReportRecorder(registry)
+        reprec.hookrecorder = self._pytest.gethookrecorder(hookspec, registry)
+        reprec.hook = reprec.hookrecorder.hook
+        return reprec
+
+    def chdir(self):
+        old = self.tmpdir.chdir()
+        if not hasattr(self, '_olddir'):
+            self._olddir = old 
+
+    def _makefile(self, ext, args, kwargs):
+        items = list(kwargs.items())
+        if args:
+            source = "\n".join(map(str, args))
+            basename = self.request.function.__name__
+            items.insert(0, (basename, source))
+        ret = None
+        for name, value in items:
+            p = self.tmpdir.join(name).new(ext=ext)
+            source = py.code.Source(value)
+            p.write(str(py.code.Source(value)).lstrip())
+            if ret is None:
+                ret = p
+        return ret 
+
+
+    def makefile(self, ext, *args, **kwargs):
+        return self._makefile(ext, args, kwargs)
+
+    def makeconftest(self, source):
+        return self.makepyfile(conftest=source)
+
+    def makepyfile(self, *args, **kwargs):
+        return self._makefile('.py', args, kwargs)
+
+    def maketxtfile(self, *args, **kwargs):
+        return self._makefile('.txt', args, kwargs)
+
+    def syspathinsert(self, path=None):
+        if path is None:
+            path = self.tmpdir
+        py.std.sys.path.insert(0, str(path))
+        self._syspathremove.append(str(path))
+            
+    def mkdir(self, name):
+        return self.tmpdir.mkdir(name)
+
+    def mkpydir(self, name):
+        p = self.mkdir(name)
+        p.ensure("__init__.py")
+        return p
+
+    def genitems(self, colitems):
+        return list(self.session.genitems(colitems))
+
+    def inline_genitems(self, *args):
+        #config = self.parseconfig(*args)
+        config = self.parseconfig(*args)
+        session = config.initsession()
+        rec = self.getreportrecorder(config)
+        colitems = [config.getfsnode(arg) for arg in config.args]
+        items = list(session.genitems(colitems))
+        return items, rec 
+
+    def runitem(self, source):
+        # used from runner functional tests 
+        item = self.getitem(source)
+        # the test class where we are called from wants to provide the runner 
+        testclassinstance = py.builtin._getimself(self.request.function)
+        runner = testclassinstance.getrunner()
+        return runner(item)
+
+    def inline_runsource(self, source, *cmdlineargs):
+        p = self.makepyfile(source)
+        l = list(cmdlineargs) + [p]
+        return self.inline_run(*l)
+
+    def inline_runsource1(self, *args):
+        args = list(args)
+        source = args.pop()
+        p = self.makepyfile(source)
+        l = list(args) + [p]
+        reprec = self.inline_run(*l)
+        reports = reprec.getreports("pytest_runtest_logreport")
+        assert len(reports) == 1, reports 
+        return reports[0]
+
+    def inline_run(self, *args):
+        config = self.parseconfig(*args)
+        config.pluginmanager.do_configure(config)
+        session = config.initsession()
+        reprec = self.getreportrecorder(config)
+        session.main()
+        config.pluginmanager.do_unconfigure(config)
+        return reprec 
+
+    def config_preparse(self):
+        config = self.Config()
+        for plugin in self.plugins:
+            if isinstance(plugin, str):
+                config.pluginmanager.import_plugin(plugin)
+            else:
+                if isinstance(plugin, dict):
+                    plugin = PseudoPlugin(plugin) 
+                if not config.pluginmanager.isregistered(plugin):
+                    config.pluginmanager.register(plugin)
+        return config
+
+    def parseconfig(self, *args):
+        if not args:
+            args = (self.tmpdir,)
+        config = self.config_preparse()
+        args = list(args) + ["--basetemp=%s" % self.tmpdir.dirpath('basetemp')]
+        config.parse(args)
+        return config 
+
+    def parseconfigure(self, *args):
+        config = self.parseconfig(*args)
+        config.pluginmanager.do_configure(config)
+        return config
+
+    def getitem(self,  source, funcname="test_func"):
+        modcol = self.getmodulecol(source)
+        moditems = modcol.collect()
+        for item in modcol.collect():
+            if item.name == funcname:
+                return item 
+        else:
+            assert 0, "%r item not found in module:\n%s" %(funcname, source)
+
+    def getitems(self,  source):
+        modcol = self.getmodulecol(source)
+        return list(modcol.config.initsession().genitems([modcol]))
+        #assert item is not None, "%r item not found in module:\n%s" %(funcname, source)
+        #return item 
+
+    def getfscol(self,  path, configargs=()):
+        self.config = self.parseconfig(path, *configargs)
+        self.session = self.config.initsession()
+        return self.config.getfsnode(path)
+
+    def getmodulecol(self,  source, configargs=(), withinit=False):
+        kw = {self.request.function.__name__: py.code.Source(source).strip()}
+        path = self.makepyfile(**kw)
+        if withinit:
+            self.makepyfile(__init__ = "#")
+        self.config = self.parseconfig(path, *configargs)
+        self.session = self.config.initsession()
+        #self.config.pluginmanager.do_configure(config=self.config)
+        # XXX 
+        self.config.pluginmanager.import_plugin("runner") 
+        plugin = self.config.pluginmanager.getplugin("runner") 
+        plugin.pytest_configure(config=self.config)
+
+        return self.config.getfsnode(path)
+
+    def prepare(self):
+        p = self.tmpdir.join("conftest.py") 
+        if not p.check():
+            plugins = [x for x in self.plugins if isinstance(x, str)]
+            if not plugins:
+                return
+            p.write("import py ; pytest_plugins = %r" % plugins)
+        else:
+            if self.plugins:
+                print ("warning, ignoring reusing existing %s" % p)
+
+    def popen(self, cmdargs, stdout, stderr, **kw):
+        if not hasattr(py.std, 'subprocess'):
+            py.test.skip("no subprocess module")
+        env = os.environ.copy()
+        env['PYTHONPATH'] = ":".join(filter(None, [
+            str(os.getcwd()), env.get('PYTHONPATH', '')]))
+        kw['env'] = env
+        #print "env", env
+        return py.std.subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw)
+
+    def run(self, *cmdargs):
+        self.prepare()
+        old = self.tmpdir.chdir()
+        #print "chdir", self.tmpdir
+        try:
+            return self._run(*cmdargs)
+        finally:
+            old.chdir()
+
+    def _run(self, *cmdargs):
+        cmdargs = [str(x) for x in cmdargs]
+        p1 = py.path.local("stdout")
+        p2 = py.path.local("stderr")
+        print_("running", cmdargs, "curdir=", py.path.local())
+        f1 = p1.open("w")
+        f2 = p2.open("w")
+        popen = self.popen(cmdargs, stdout=f1, stderr=f2, 
+            close_fds=(sys.platform != "win32"))
+        ret = popen.wait()
+        f1.close()
+        f2.close()
+        out, err = p1.readlines(cr=0), p2.readlines(cr=0)
+        if err:
+            for line in err: 
+                py.builtin.print_(line, file=sys.stderr)
+        if out:
+            for line in out: 
+                py.builtin.print_(line, file=sys.stdout)
+        return RunResult(ret, out, err)
+
+    def runpybin(self, scriptname, *args):
+        fullargs = self._getpybinargs(scriptname) + args
+        return self.run(*fullargs)
+
+    def _getpybinargs(self, scriptname):
+        bindir = py._dir.dirpath('bin')
+        if not bindir.check():
+            script = py.path.local.sysfind(scriptname)
+        else:
+            script = bindir.join(scriptname)
+        assert script.check()
+        return py.std.sys.executable, script
+
+    def runpython(self, script):
+        return self.run(py.std.sys.executable, script)
+
+    def runpytest(self, *args):
+        p = py.path.local.make_numbered_dir(prefix="runpytest-", 
+            keep=None, rootdir=self.tmpdir)
+        args = ('--basetemp=%s' % p, ) + args 
+        return self.runpybin("py.test", *args)
+
+    def spawn_pytest(self, string, expect_timeout=10.0):
+        pexpect = py.test.importorskip("pexpect", "2.3")
+        basetemp = self.tmpdir.mkdir("pexpect")
+        invoke = "%s %s" % self._getpybinargs("py.test")
+        cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string)
+        child = pexpect.spawn(cmd, logfile=basetemp.join("spawn.out").open("w"))
+        child.timeout = expect_timeout
+        return child
+
+class PseudoPlugin:
+    def __init__(self, vars):
+        self.__dict__.update(vars) 
+
+class ReportRecorder(object):
+    def __init__(self, comregistry):
+        self.comregistry = comregistry
+        comregistry.register(self)
+
+    def getcall(self, name):
+        return self.hookrecorder.getcall(name)
+
+    def popcall(self, name):
+        return self.hookrecorder.popcall(name)
+
+    def getcalls(self, names):
+        """ return list of ParsedCall instances matching the given eventname. """
+        return self.hookrecorder.getcalls(names)
+
+    # functionality for test reports 
+
+    def getreports(self, names="pytest_runtest_logreport pytest_collectreport"):
+        return [x.report for x in self.getcalls(names)]
+
+    def matchreport(self, inamepart="", names="pytest_runtest_logreport pytest_collectreport"):
+        """ return a testreport whose dotted import path matches """
+        l = []
+        for rep in self.getreports(names=names):
+            colitem = rep.getnode()
+            if not inamepart or inamepart in colitem.listnames():
+                l.append(rep)
+        if not l:
+            raise ValueError("could not find test report matching %r: no test reports at all!" %
+                (inamepart,))
+        if len(l) > 1:
+            raise ValueError("found more than one testreport matching %r: %s" %(
+                             inamepart, l))
+        return l[0]
+
+    def getfailures(self, names='pytest_runtest_logreport pytest_collectreport'):
+        return [rep for rep in self.getreports(names) if rep.failed]
+
+    def getfailedcollections(self):
+        return self.getfailures('pytest_collectreport')
+
+    def listoutcomes(self):
+        passed = []
+        skipped = []
+        failed = []
+        for rep in self.getreports("pytest_runtest_logreport"):
+            if rep.passed: 
+                if rep.when == "call": 
+                    passed.append(rep) 
+            elif rep.skipped: 
+                skipped.append(rep) 
+            elif rep.failed:
+                failed.append(rep) 
+        return passed, skipped, failed 
+
+    def countoutcomes(self):
+        return [len(x) for x in self.listoutcomes()]
+
+    def assertoutcome(self, passed=0, skipped=0, failed=0):
+        realpassed, realskipped, realfailed = self.listoutcomes()
+        assert passed == len(realpassed)
+        assert skipped == len(realskipped)
+        assert failed == len(realfailed)
+
+    def clear(self):
+        self.hookrecorder.calls[:] = []
+
+    def unregister(self):
+        self.comregistry.unregister(self)
+        self.hookrecorder.finish_recording()
+
+class LineComp:
+    def __init__(self):
+        self.stringio = py.io.TextIO()
+
+    def assert_contains_lines(self, lines2):
+        """ assert that lines2 are contained (linearly) in lines1. 
+            return a list of extralines found.
+        """
+        __tracebackhide__ = True
+        val = self.stringio.getvalue()
+        self.stringio.truncate(0)  # remove what we got 
+        lines1 = val.split("\n")
+        return LineMatcher(lines1).fnmatch_lines(lines2)
+            
+class LineMatcher:
+    def __init__(self,  lines):
+        self.lines = lines
+
+    def str(self):
+        return "\n".join(self.lines)
+
+    def fnmatch_lines(self, lines2):
+        if isinstance(lines2, str):
+            lines2 = py.code.Source(lines2)
+        if isinstance(lines2, py.code.Source):
+            lines2 = lines2.strip().lines
+
+        from fnmatch import fnmatch
+        __tracebackhide__ = True
+        lines1 = self.lines[:]
+        nextline = None
+        extralines = []
+        for line in lines2:
+            nomatchprinted = False
+            while lines1:
+                nextline = lines1.pop(0)
+                if line == nextline:
+                    print_("exact match:", repr(line))
+                    break 
+                elif fnmatch(nextline, line):
+                    print_("fnmatch:", repr(line))
+                    print_("   with:", repr(nextline))
+                    break
+                else:
+                    if not nomatchprinted:
+                        print_("nomatch:", repr(line))
+                        nomatchprinted = True
+                    print_("    and:", repr(nextline))
+                extralines.append(nextline)
+            else:
+                if line != nextline:
+                    #__tracebackhide__ = True
+                    raise AssertionError("expected line not found: %r" % line)
+        extralines.extend(lines1)
+        return extralines 

Added: pypy/trunk/py/plugin/pytest_recwarn.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_recwarn.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,120 @@
+"""
+helpers for asserting deprecation and other warnings. 
+
+Example usage 
+---------------------
+
+You can use the ``recwarn`` funcarg to track 
+warnings within a test function:
+
+.. sourcecode:: python
+
+    def test_hello(recwarn):
+        from warnings import warn
+        warn("hello", DeprecationWarning)
+        w = recwarn.pop(DeprecationWarning)
+        assert issubclass(w.category, DeprecationWarning)
+        assert 'hello' in str(w.message)
+        assert w.filename
+        assert w.lineno
+
+You can also call a global helper for checking
+taht a certain function call yields a Deprecation
+warning:
+
+.. sourcecode:: python
+
+    import py
+            
+    def test_global():
+        py.test.deprecated_call(myfunction, 17)
+        
+        
+"""
+
+import py
+import os
+
+def pytest_funcarg__recwarn(request):
+    """Return a WarningsRecorder instance that provides these methods:
+
+    * ``pop(category=None)``: return last warning matching the category.
+    * ``clear()``: clear list of warnings 
+    """
+    warnings = WarningsRecorder()
+    request.addfinalizer(warnings.finalize)
+    return warnings
+
+def pytest_namespace():
+    return {'deprecated_call': deprecated_call}
+
+def deprecated_call(func, *args, **kwargs):
+    """ assert that calling func(*args, **kwargs)
+        triggers a DeprecationWarning. 
+    """ 
+    warningmodule = py.std.warnings
+    l = []
+    oldwarn_explicit = getattr(warningmodule, 'warn_explicit')
+    def warn_explicit(*args, **kwargs): 
+        l.append(args) 
+        oldwarn_explicit(*args, **kwargs)
+    oldwarn = getattr(warningmodule, 'warn')
+    def warn(*args, **kwargs): 
+        l.append(args) 
+        oldwarn(*args, **kwargs)
+        
+    warningmodule.warn_explicit = warn_explicit
+    warningmodule.warn = warn
+    try:
+        ret = func(*args, **kwargs)
+    finally:
+        warningmodule.warn_explicit = warn_explicit
+        warningmodule.warn = warn
+    if not l:
+        #print warningmodule
+        raise AssertionError("%r did not produce DeprecationWarning" %(func,))
+    return ret
+
+
+class RecordedWarning:
+    def __init__(self, message, category, filename, lineno, line):
+        self.message = message
+        self.category = category
+        self.filename = filename
+        self.lineno = lineno
+        self.line = line
+
+class WarningsRecorder:
+    def __init__(self):
+        warningmodule = py.std.warnings
+        self.list = []
+        def showwarning(message, category, filename, lineno, line=0):
+            self.list.append(RecordedWarning(
+                message, category, filename, lineno, line))
+            try:
+                self.old_showwarning(message, category, 
+                    filename, lineno, line=line)
+            except TypeError:
+                # < python2.6 
+                self.old_showwarning(message, category, filename, lineno)
+        self.old_showwarning = warningmodule.showwarning
+        warningmodule.showwarning = showwarning
+
+    def pop(self, cls=Warning):
+        """ pop the first recorded warning, raise exception if not exists."""
+        for i, w in enumerate(self.list):
+            if issubclass(w.category, cls):
+                return self.list.pop(i)
+        __tracebackhide__ = True
+        assert 0, "%r not found in %r" %(cls, self.list)
+
+    #def resetregistry(self):
+    #    import warnings
+    #    warnings.onceregistry.clear()
+    #    warnings.__warningregistry__.clear()
+
+    def clear(self): 
+        self.list[:] = []
+
+    def finalize(self):
+        py.std.warnings.showwarning = self.old_showwarning

Added: pypy/trunk/py/plugin/pytest_restdoc.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_restdoc.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,432 @@
+"""
+perform ReST syntax, local and remote reference tests on .rst/.txt files. 
+"""
+import py
+import sys, os, re
+
+def pytest_addoption(parser):
+    group = parser.getgroup("ReST", "ReST documentation check options")
+    group.addoption('-R', '--urlcheck',
+           action="store_true", dest="urlcheck", default=False, 
+           help="urlopen() remote links found in ReST text files.") 
+    group.addoption('--urltimeout', action="store", metavar="secs",
+        type="int", dest="urlcheck_timeout", default=5,
+        help="timeout in seconds for remote urlchecks")
+    group.addoption('--forcegen',
+           action="store_true", dest="forcegen", default=False,
+           help="force generation of html files.")
+
+def pytest_collect_file(path, parent):
+    if path.ext in (".txt", ".rst"):
+        project = getproject(path)
+        if project is not None:
+            return ReSTFile(path, parent=parent, project=project)
+
+def getproject(path):
+    for parent in path.parts(reverse=True):
+        confrest = parent.join("confrest.py")
+        if confrest.check():
+            Project = confrest.pyimport().Project
+            return Project(parent)
+
+class ReSTFile(py.test.collect.File):
+    def __init__(self, fspath, parent, project=None):
+        super(ReSTFile, self).__init__(fspath=fspath, parent=parent)
+        if project is None:
+            project = getproject(fspath)
+            assert project is not None
+        self.project = project
+
+    def collect(self):
+        return [
+            ReSTSyntaxTest(self.project, "ReSTSyntax", parent=self),
+            LinkCheckerMaker("checklinks", parent=self),
+            DoctestText("doctest", parent=self),
+        ]
+
+def deindent(s, sep='\n'):
+    leastspaces = -1
+    lines = s.split(sep)
+    for line in lines:
+        if not line.strip():
+            continue
+        spaces = len(line) - len(line.lstrip())
+        if leastspaces == -1 or spaces < leastspaces:
+            leastspaces = spaces
+    if leastspaces == -1:
+        return s
+    for i, line in enumerate(lines):
+        if not line.strip():
+            lines[i] = ''
+        else:
+            lines[i] = line[leastspaces:]
+    return sep.join(lines)
+
+class ReSTSyntaxTest(py.test.collect.Item): 
+    def __init__(self, project, *args, **kwargs):
+        super(ReSTSyntaxTest, self).__init__(*args, **kwargs)
+        self.project = project
+
+    def reportinfo(self):
+        return self.fspath, None, "syntax check"
+
+    def runtest(self):
+        self.restcheck(py.path.svnwc(self.fspath))
+
+    def restcheck(self, path):
+        py.test.importorskip("docutils")
+        self.register_linkrole()
+        from docutils.utils import SystemMessage
+        try: 
+            self._checkskip(path, self.project.get_htmloutputpath(path))
+            self.project.process(path)
+        except KeyboardInterrupt: 
+            raise 
+        except SystemMessage: 
+            # we assume docutils printed info on stdout 
+            py.test.fail("docutils processing failed, see captured stderr") 
+
+    def register_linkrole(self):
+        #directive.register_linkrole('api', self.resolve_linkrole)
+        #directive.register_linkrole('source', self.resolve_linkrole)
+#
+#        # XXX fake sphinx' "toctree" and refs
+#        directive.register_linkrole('ref', self.resolve_linkrole)
+        
+        from docutils.parsers.rst import directives
+        def toctree_directive(name, arguments, options, content, lineno,
+                      content_offset, block_text, state, state_machine):
+            return []
+        toctree_directive.content = 1
+        toctree_directive.options = {'maxdepth': int, 'glob': directives.flag,
+                             'hidden': directives.flag}
+        directives.register_directive('toctree', toctree_directive)
+        self.register_pygments()
+
+    def register_pygments(self):
+        # taken from pygments-main/external/rst-directive.py 
+        from docutils.parsers.rst import directives
+        try:
+            from pygments.formatters import HtmlFormatter
+        except ImportError:
+            def pygments_directive(name, arguments, options, content, lineno,
+                                   content_offset, block_text, state, state_machine):
+                return []
+            pygments_directive.options = {}
+        else:
+            # The default formatter
+            DEFAULT = HtmlFormatter(noclasses=True)
+            # Add name -> formatter pairs for every variant you want to use
+            VARIANTS = {
+                # 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True),
+            }
+
+            from docutils import nodes
+
+            from pygments import highlight
+            from pygments.lexers import get_lexer_by_name, TextLexer
+
+            def pygments_directive(name, arguments, options, content, lineno,
+                                   content_offset, block_text, state, state_machine):
+                try:
+                    lexer = get_lexer_by_name(arguments[0])
+                except ValueError:
+                    # no lexer found - use the text one instead of an exception
+                    lexer = TextLexer()
+                # take an arbitrary option if more than one is given
+                formatter = options and VARIANTS[options.keys()[0]] or DEFAULT
+                parsed = highlight('\n'.join(content), lexer, formatter)
+                return [nodes.raw('', parsed, format='html')]
+
+            pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS])
+
+        pygments_directive.arguments = (1, 0, 1)
+        pygments_directive.content = 1
+        directives.register_directive('sourcecode', pygments_directive)
+
+    def resolve_linkrole(self, name, text, check=True):
+        apigen_relpath = self.project.apigen_relpath
+    
+        if name == 'api':
+            if text == 'py':
+                return ('py', apigen_relpath + 'api/index.html')
+            else:
+                assert text.startswith('py.'), (
+                    'api link "%s" does not point to the py package') % (text,)
+                dotted_name = text
+                if dotted_name.find('(') > -1:
+                    dotted_name = dotted_name[:text.find('(')]
+                # remove pkg root
+                path = dotted_name.split('.')[1:]
+                dotted_name = '.'.join(path)
+                obj = py
+                if check:
+                    for chunk in path:
+                        try:
+                            obj = getattr(obj, chunk)
+                        except AttributeError:
+                            raise AssertionError(
+                                'problem with linkrole :api:`%s`: can not resolve '
+                                'dotted name %s' % (text, dotted_name,))
+                return (text, apigen_relpath + 'api/%s.html' % (dotted_name,))
+        elif name == 'source':
+            assert text.startswith('py/'), ('source link "%s" does not point '
+                                            'to the py package') % (text,)
+            relpath = '/'.join(text.split('/')[1:])
+            if check:
+                pkgroot = py._impldir
+                abspath = pkgroot.join(relpath)
+                assert pkgroot.join(relpath).check(), (
+                        'problem with linkrole :source:`%s`: '
+                        'path %s does not exist' % (text, relpath))
+            if relpath.endswith('/') or not relpath:
+                relpath += 'index.html'
+            else:
+                relpath += '.html'
+            return (text, apigen_relpath + 'source/%s' % (relpath,))
+        elif name == 'ref':
+            return ("", "") 
+
+    def _checkskip(self, lpath, htmlpath=None):
+        if not self.config.getvalue("forcegen"):
+            lpath = py.path.local(lpath)
+            if htmlpath is not None:
+                htmlpath = py.path.local(htmlpath)
+            if lpath.ext == '.txt': 
+                htmlpath = htmlpath or lpath.new(ext='.html')
+                if htmlpath.check(file=1) and htmlpath.mtime() >= lpath.mtime(): 
+                    py.test.skip("html file is up to date, use --forcegen to regenerate")
+                    #return [] # no need to rebuild 
+
+class DoctestText(py.test.collect.Item): 
+    def reportinfo(self):
+        return self.fspath, None, "doctest"
+
+    def runtest(self): 
+        content = self._normalize_linesep()
+        newcontent = self.config.hook.pytest_doctest_prepare_content(content=content)
+        if newcontent is not None:
+            content = newcontent 
+        s = content 
+        l = []
+        prefix = '.. >>> '
+        mod = py.std.types.ModuleType(self.fspath.purebasename) 
+        skipchunk = False
+        for line in deindent(s).split('\n'):
+            stripped = line.strip()
+            if skipchunk and line.startswith(skipchunk):
+                py.builtin.print_("skipping", line)
+                continue
+            skipchunk = False 
+            if stripped.startswith(prefix):
+                try:
+                    py.builtin.exec_(py.code.Source(
+                            stripped[len(prefix):]).compile(),  mod.__dict__)
+                except ValueError:
+                    e = sys.exc_info()[1]
+                    if e.args and e.args[0] == "skipchunk":
+                        skipchunk = " " * (len(line) - len(line.lstrip()))
+                    else:
+                        raise
+            else:
+                l.append(line)
+        docstring = "\n".join(l)
+        mod.__doc__ = docstring 
+        failed, tot = py.std.doctest.testmod(mod, verbose=1)
+        if failed: 
+            py.test.fail("doctest %s: %s failed out of %s" %(
+                         self.fspath, failed, tot))
+
+    def _normalize_linesep(self):
+        # XXX quite nasty... but it works (fixes win32 issues)
+        s = self.fspath.read()
+        linesep = '\n'
+        if '\r' in s:
+            if '\n' not in s:
+                linesep = '\r'
+            else:
+                linesep = '\r\n'
+        s = s.replace(linesep, '\n')
+        return s
+        
+class LinkCheckerMaker(py.test.collect.Collector): 
+    def collect(self):
+        return list(self.genlinkchecks())
+
+    def genlinkchecks(self):
+        path = self.fspath
+        # generating functions + args as single tests 
+        timeout = self.config.getvalue("urlcheck_timeout")
+        for lineno, line in enumerate(path.readlines()): 
+            line = line.strip()
+            if line.startswith('.. _'): 
+                if line.startswith('.. _`'):
+                    delim = '`:'
+                else:
+                    delim = ':'
+                l = line.split(delim, 1)
+                if len(l) != 2: 
+                    continue
+                tryfn = l[1].strip() 
+                name = "%s:%d" %(tryfn, lineno)
+                if tryfn.startswith('http:') or tryfn.startswith('https'): 
+                    if self.config.getvalue("urlcheck"):
+                        yield CheckLink(name, parent=self, 
+                            args=(tryfn, path, lineno, timeout), checkfunc=urlcheck)
+                elif tryfn.startswith('webcal:'):
+                    continue
+                else: 
+                    i = tryfn.find('#') 
+                    if i != -1: 
+                        checkfn = tryfn[:i]
+                    else: 
+                        checkfn = tryfn 
+                    if checkfn.strip() and (1 or checkfn.endswith('.html')): 
+                        yield CheckLink(name, parent=self, 
+                            args=(tryfn, path, lineno), checkfunc=localrefcheck)
+        
+class CheckLink(py.test.collect.Item):
+    def __init__(self, name, parent, args, checkfunc):
+        super(CheckLink, self).__init__(name, parent)
+        self.args = args
+        self.checkfunc = checkfunc
+
+    def runtest(self):
+        return self.checkfunc(*self.args)
+
+    def reportinfo(self, basedir=None):
+        return (self.fspath, self.args[2], "checklink: %s" % self.args[0])
+
+def urlcheck(tryfn, path, lineno, TIMEOUT_URLOPEN): 
+    old = py.std.socket.getdefaulttimeout()
+    py.std.socket.setdefaulttimeout(TIMEOUT_URLOPEN)
+    try:
+        try: 
+            py.builtin.print_("trying remote", tryfn)
+            py.std.urllib2.urlopen(tryfn)
+        finally:
+            py.std.socket.setdefaulttimeout(old)
+    except (py.std.urllib2.URLError, py.std.urllib2.HTTPError): 
+        e = sys.exc_info()[1]
+        if getattr(e, 'code', None) in (401, 403): # authorization required, forbidden
+            py.test.skip("%s: %s" %(tryfn, str(e)))
+        else:
+            py.test.fail("remote reference error %r in %s:%d\n%s" %(
+                         tryfn, path.basename, lineno+1, e))
+
+def localrefcheck(tryfn, path, lineno): 
+    # assume it should be a file 
+    i = tryfn.find('#')
+    if tryfn.startswith('javascript:'):
+        return # don't check JS refs
+    if i != -1: 
+        anchor = tryfn[i+1:]
+        tryfn = tryfn[:i]
+    else: 
+        anchor = ''
+    fn = path.dirpath(tryfn) 
+    ishtml = fn.ext == '.html' 
+    fn = ishtml and fn.new(ext='.txt') or fn
+    py.builtin.print_("filename is", fn)
+    if not fn.check(): # not ishtml or not fn.check(): 
+        if not py.path.local(tryfn).check(): # the html could be there 
+            py.test.fail("reference error %r in %s:%d" %(
+                          tryfn, path.basename, lineno+1))
+    if anchor: 
+        source = unicode(fn.read(), 'latin1')
+        source = source.lower().replace('-', ' ') # aehem
+
+        anchor = anchor.replace('-', ' ') 
+        match2 = ".. _`%s`:" % anchor 
+        match3 = ".. _%s:" % anchor 
+        candidates = (anchor, match2, match3)
+        py.builtin.print_("candidates", repr(candidates))
+        for line in source.split('\n'): 
+            line = line.strip()
+            if line in candidates: 
+                break 
+        else: 
+            py.test.fail("anchor reference error %s#%s in %s:%d" %(
+                tryfn, anchor, path.basename, lineno+1))
+
+if hasattr(sys.stdout, 'fileno') and os.isatty(sys.stdout.fileno()):
+    def log(msg):
+        print(msg)
+else:
+    def log(msg):
+        pass
+
+def convert_rest_html(source, source_path, stylesheet=None, encoding='latin1'):
+    """ return html latin1-encoded document for the given input. 
+        source  a ReST-string
+        sourcepath where to look for includes (basically)
+        stylesheet path (to be used if any)
+    """
+    from docutils.core import publish_string
+    kwargs = {
+        'stylesheet' : stylesheet, 
+        'stylesheet_path': None,
+        'traceback' : 1, 
+        'embed_stylesheet': 0,
+        'output_encoding' : encoding, 
+        #'halt' : 0, # 'info',
+        'halt_level' : 2, 
+    }
+    # docutils uses os.getcwd() :-(
+    source_path = os.path.abspath(str(source_path))
+    prevdir = os.getcwd()
+    try:
+        #os.chdir(os.path.dirname(source_path))
+        return publish_string(source, source_path, writer_name='html',
+                              settings_overrides=kwargs)
+    finally:
+        os.chdir(prevdir)
+
+def process(txtpath, encoding='latin1'):
+    """ process a textfile """
+    log("processing %s" % txtpath)
+    assert txtpath.check(ext='.txt')
+    if isinstance(txtpath, py.path.svnwc):
+        txtpath = txtpath.localpath
+    htmlpath = txtpath.new(ext='.html')
+    #svninfopath = txtpath.localpath.new(ext='.svninfo')
+
+    style = txtpath.dirpath('style.css')
+    if style.check():
+        stylesheet = style.basename
+    else:
+        stylesheet = None
+    content = unicode(txtpath.read(), encoding)
+    doc = convert_rest_html(content, txtpath, stylesheet=stylesheet, encoding=encoding)
+    htmlpath.open('wb').write(doc)
+    #log("wrote %r" % htmlpath)
+    #if txtpath.check(svnwc=1, versioned=1): 
+    #    info = txtpath.info()
+    #    svninfopath.dump(info) 
+
+if sys.version_info > (3, 0):
+    def _uni(s): return s
+else:
+    def _uni(s):
+        return unicode(s)
+
+rex1 = re.compile(r'.*<body>(.*)</body>.*', re.MULTILINE | re.DOTALL)
+rex2 = re.compile(r'.*<div class="document">(.*)</div>.*', re.MULTILINE | re.DOTALL)
+
+def strip_html_header(string, encoding='utf8'):
+    """ return the content of the body-tag """ 
+    uni = unicode(string, encoding)
+    for rex in rex1,rex2: 
+        match = rex.search(uni) 
+        if not match: 
+            break 
+        uni = match.group(1) 
+    return uni 
+
+class Project: # used for confrest.py files 
+    def __init__(self, sourcepath):
+        self.sourcepath = sourcepath
+    def process(self, path):
+        return process(path)
+    def get_htmloutputpath(self, path):
+        return path.new(ext='html')

Added: pypy/trunk/py/plugin/pytest_resultlog.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_resultlog.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,94 @@
+"""resultlog plugin for machine-readable logging of test results. 
+   Useful for buildbot integration code. 
+""" 
+
+import py
+from py.builtin import print_
+
+def pytest_addoption(parser):
+    group = parser.getgroup("resultlog", "resultlog plugin options")
+    group.addoption('--resultlog', action="store", dest="resultlog", metavar="path", default=None,
+           help="path for machine-readable result log.")
+
+def pytest_configure(config):
+    resultlog = config.option.resultlog
+    if resultlog:
+        logfile = open(resultlog, 'w', 1) # line buffered
+        config._resultlog = ResultLog(config, logfile) 
+        config.pluginmanager.register(config._resultlog)
+
+def pytest_unconfigure(config):
+    resultlog = getattr(config, '_resultlog', None)
+    if resultlog:
+        resultlog.logfile.close()
+        del config._resultlog 
+        config.pluginmanager.unregister(resultlog)
+
+def generic_path(item):
+    chain = item.listchain()
+    gpath = [chain[0].name]
+    fspath = chain[0].fspath
+    fspart = False
+    for node in chain[1:]:
+        newfspath = node.fspath
+        if newfspath == fspath:
+            if fspart:
+                gpath.append(':')
+                fspart = False
+            else:
+                gpath.append('.')            
+        else:
+            gpath.append('/')
+            fspart = True
+        name = node.name
+        if name[0] in '([':
+            gpath.pop()
+        gpath.append(name)
+        fspath = newfspath
+    return ''.join(gpath)
+        
+class ResultLog(object):
+    def __init__(self, config, logfile):
+        self.config = config
+        self.logfile = logfile # preferably line buffered
+
+    def write_log_entry(self, testpath, shortrepr, longrepr):
+        print_("%s %s" % (shortrepr, testpath), file=self.logfile)
+        for line in longrepr.splitlines():
+            print_(" %s" % line, file=self.logfile)
+
+    def log_outcome(self, node, shortrepr, longrepr):
+        testpath = generic_path(node)
+        self.write_log_entry(testpath, shortrepr, longrepr) 
+
+    def pytest_runtest_logreport(self, report):
+        res = self.config.hook.pytest_report_teststatus(report=report)
+        if res is not None:
+            code = res[1]
+        else:
+            code = report.shortrepr
+        if code == 'x':
+            longrepr = str(report.longrepr)
+        elif code == 'P':
+            longrepr = ''
+        elif report.passed:
+            longrepr = ""
+        elif report.failed:
+            longrepr = str(report.longrepr) 
+        elif report.skipped:
+            longrepr = str(report.longrepr.reprcrash.message)
+        self.log_outcome(report.item, code, longrepr) 
+
+    def pytest_collectreport(self, report):
+        if not report.passed:
+            if report.failed: 
+                code = "F"
+            else:
+                assert report.skipped
+                code = "S"
+            longrepr = str(report.longrepr.reprcrash)
+            self.log_outcome(report.collector, code, longrepr)    
+
+    def pytest_internalerror(self, excrepr):
+        path = excrepr.reprcrash.path 
+        self.write_log_entry(path, '!', str(excrepr))

Added: pypy/trunk/py/plugin/pytest_runner.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_runner.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,294 @@
+""" 
+collect and run test items and create reports. 
+"""
+
+import py
+from py.impl.test.outcome import Skipped
+
+#
+# pytest plugin hooks 
+
+def pytest_addoption(parser):
+    group = parser.getgroup("general") 
+    group.addoption('--boxed',
+               action="store_true", dest="boxed", default=False,
+               help="box each test run in a separate process") 
+
+# XXX move to pytest_sessionstart and fix py.test owns tests 
+def pytest_configure(config):
+    config._setupstate = SetupState()
+
+def pytest_sessionfinish(session, exitstatus):
+    if hasattr(session.config, '_setupstate'):
+        hook = session.config.hook
+        rep = hook.pytest__teardown_final(session=session)
+        if rep:
+            hook.pytest__teardown_final_logerror(report=rep)
+
+def pytest_make_collect_report(collector):
+    result = excinfo = None
+    try:
+        result = collector._memocollect()
+    except KeyboardInterrupt:
+        raise
+    except:
+        excinfo = py.code.ExceptionInfo()
+    return CollectReport(collector, result, excinfo)
+
+def pytest_runtest_protocol(item):
+    if item.config.getvalue("boxed"):
+        reports = forked_run_report(item) 
+        for rep in reports:
+            item.config.hook.pytest_runtest_logreport(report=rep)
+    else:
+        runtestprotocol(item)
+    return True
+
+def runtestprotocol(item, log=True):
+    rep = call_and_report(item, "setup", log)
+    reports = [rep]
+    if rep.passed:
+        reports.append(call_and_report(item, "call", log))
+    reports.append(call_and_report(item, "teardown", log))
+    return reports
+
+def pytest_runtest_setup(item):
+    item.config._setupstate.prepare(item)
+
+def pytest_runtest_call(item):
+    if not item._deprecated_testexecution():
+        item.runtest()
+
+def pytest_runtest_makereport(item, call):
+    return ItemTestReport(item, call.excinfo, call.when)
+
+def pytest_runtest_teardown(item):
+    item.config._setupstate.teardown_exact(item)
+
+def pytest__teardown_final(session):
+    call = CallInfo(session.config._setupstate.teardown_all, when="teardown")
+    if call.excinfo:
+        rep = TeardownErrorReport(call.excinfo)
+        return rep 
+
+def pytest_report_teststatus(report):
+    if report.when in ("setup", "teardown"):
+        if report.failed:
+            #      category, shortletter, verbose-word 
+            return "error", "E", "ERROR"
+        elif report.skipped:
+            return "skipped", "s", "SKIPPED"
+        else:
+            return "", "", ""
+#
+# Implementation
+
+def call_and_report(item, when, log=True):
+    call = call_runtest_hook(item, when)
+    hook = item.config.hook
+    report = hook.pytest_runtest_makereport(item=item, call=call)
+    if log and (when == "call" or not report.passed):
+        hook.pytest_runtest_logreport(report=report) 
+    return report
+
+def call_runtest_hook(item, when):
+    hookname = "pytest_runtest_" + when 
+    hook = getattr(item.config.hook, hookname)
+    return CallInfo(lambda: hook(item=item), when=when)
+
+class CallInfo:
+    excinfo = None 
+    def __init__(self, func, when):
+        self.when = when 
+        try:
+            self.result = func()
+        except KeyboardInterrupt:
+            raise
+        except:
+            self.excinfo = py.code.ExceptionInfo()
+
+    def __repr__(self):
+        if self.excinfo:
+            status = "exception: %s" % str(self.excinfo.value)
+        else:
+            status = "result: %r" % (self.result,)
+        return "<CallInfo when=%r %s>" % (self.when, status)
+
+def forked_run_report(item):
+    # for now, we run setup/teardown in the subprocess 
+    # XXX optionally allow sharing of setup/teardown 
+    EXITSTATUS_TESTEXIT = 4
+    from py.impl.test.dist.mypickle import ImmutablePickler
+    ipickle = ImmutablePickler(uneven=0)
+    ipickle.selfmemoize(item.config)
+    # XXX workaround the issue that 2.6 cannot pickle 
+    # instances of classes defined in global conftest.py files
+    ipickle.selfmemoize(item) 
+    def runforked():
+        try:
+            reports = runtestprotocol(item, log=False)
+        except KeyboardInterrupt: 
+            py.std.os._exit(EXITSTATUS_TESTEXIT)
+        return ipickle.dumps(reports)
+
+    ff = py.process.ForkedFunc(runforked)
+    result = ff.waitfinish()
+    if result.retval is not None:
+        return ipickle.loads(result.retval)
+    else:
+        if result.exitstatus == EXITSTATUS_TESTEXIT:
+            py.test.exit("forked test item %s raised Exit" %(item,))
+        return [report_process_crash(item, result)]
+
+def report_process_crash(item, result):
+    path, lineno = item._getfslineno()
+    info = "%s:%s: running the test CRASHED with signal %d" %(
+            path, lineno, result.signal)
+    return ItemTestReport(item, excinfo=info, when="???")
+
+class BaseReport(object):
+    def __repr__(self):
+        l = ["%s=%s" %(key, value)
+           for key, value in self.__dict__.items()]
+        return "<%s %s>" %(self.__class__.__name__, " ".join(l),)
+
+    def toterminal(self, out):
+        longrepr = self.longrepr 
+        if hasattr(longrepr, 'toterminal'):
+            longrepr.toterminal(out)
+        else:
+            out.line(str(longrepr))
+   
+class ItemTestReport(BaseReport):
+    failed = passed = skipped = False
+
+    def __init__(self, item, excinfo=None, when=None):
+        self.item = item 
+        self.when = when
+        if item and when != "setup":
+            self.keywords = item.readkeywords() 
+        else:
+            # if we fail during setup it might mean 
+            # we are not able to access the underlying object
+            # this might e.g. happen if we are unpickled 
+            # and our parent collector did not collect us 
+            # (because it e.g. skipped for platform reasons)
+            self.keywords = {}  
+        if not excinfo:
+            self.passed = True
+            self.shortrepr = "." 
+        else:
+            if not isinstance(excinfo, py.code.ExceptionInfo):
+                self.failed = True
+                shortrepr = "?"
+                longrepr = excinfo 
+            elif excinfo.errisinstance(Skipped):
+                self.skipped = True 
+                shortrepr = "s"
+                longrepr = self.item._repr_failure_py(excinfo)
+            else:
+                self.failed = True
+                shortrepr = self.item.shortfailurerepr
+                if self.when == "call":
+                    longrepr = self.item.repr_failure(excinfo)
+                else: # exception in setup or teardown 
+                    longrepr = self.item._repr_failure_py(excinfo)
+                    shortrepr = shortrepr.lower()
+            self.shortrepr = shortrepr 
+            self.longrepr = longrepr 
+
+    def __repr__(self):
+        status = (self.passed and "passed" or 
+                  self.skipped and "skipped" or 
+                  self.failed and "failed" or 
+                  "CORRUPT")
+        l = [repr(self.item.name), "when=%r" % self.when, "outcome %r" % status,]
+        if hasattr(self, 'node'):
+            l.append("txnode=%s" % self.node.gateway.id)
+        info = " " .join(map(str, l))
+        return "<ItemTestReport %s>" % info 
+
+    def getnode(self):
+        return self.item 
+
+class CollectReport(BaseReport):
+    skipped = failed = passed = False 
+
+    def __init__(self, collector, result, excinfo=None):
+        self.collector = collector 
+        if not excinfo:
+            self.passed = True
+            self.result = result 
+        else:
+            self.longrepr = self.collector._repr_failure_py(excinfo)
+            if excinfo.errisinstance(Skipped):
+                self.skipped = True
+                self.reason = str(excinfo.value)
+            else:
+                self.failed = True
+
+    def getnode(self):
+        return self.collector 
+
+class TeardownErrorReport(BaseReport):
+    skipped = passed = False 
+    failed = True
+    when = "teardown"
+    def __init__(self, excinfo):
+        self.longrepr = excinfo.getrepr(funcargs=True)
+
+class SetupState(object):
+    """ shared state for setting up/tearing down test items or collectors. """
+    def __init__(self):
+        self.stack = []
+        self._finalizers = {}
+
+    def addfinalizer(self, finalizer, colitem):
+        """ attach a finalizer to the given colitem. 
+        if colitem is None, this will add a finalizer that 
+        is called at the end of teardown_all(). 
+        """
+        assert hasattr(finalizer, '__call__')
+        #assert colitem in self.stack
+        self._finalizers.setdefault(colitem, []).append(finalizer)
+
+    def _pop_and_teardown(self):
+        colitem = self.stack.pop()
+        self._teardown_with_finalization(colitem)
+
+    def _callfinalizers(self, colitem):
+        finalizers = self._finalizers.pop(colitem, None)
+        while finalizers:
+            fin = finalizers.pop()
+            fin()
+
+    def _teardown_with_finalization(self, colitem): 
+        self._callfinalizers(colitem) 
+        if colitem: 
+            colitem.teardown()
+        for colitem in self._finalizers:
+            assert colitem is None or colitem in self.stack
+
+    def teardown_all(self): 
+        while self.stack: 
+            self._pop_and_teardown()
+        self._teardown_with_finalization(None)
+        assert not self._finalizers
+
+    def teardown_exact(self, item):
+        if self.stack and item == self.stack[-1]:
+            self._pop_and_teardown()
+        else:
+            self._callfinalizers(item)
+     
+    def prepare(self, colitem): 
+        """ setup objects along the collector chain to the test-method
+            and teardown previously setup objects."""
+        needed_collectors = colitem.listchain() 
+        while self.stack: 
+            if self.stack == needed_collectors[:len(self.stack)]: 
+                break 
+            self._pop_and_teardown()
+        for col in needed_collectors[len(self.stack):]: 
+            col.setup() 
+            self.stack.append(col) 

Added: pypy/trunk/py/plugin/pytest_skipping.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_skipping.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,232 @@
+"""
+advanced skipping for python test functions, classes or modules.
+
+With this plugin you can mark test functions for conditional skipping 
+or as "xfail", expected-to-fail.  Skipping a test will avoid running it
+at all while xfail-marked tests will run and result in an inverted outcome:
+a pass becomes a failure and a fail becomes a semi-passing one. 
+
+The need for skipping a test is usually connected to a condition.  
+If a test fails under all conditions then it's probably better
+to mark your test as 'xfail'. 
+
+By passing ``--report=xfailed,skipped`` to the terminal reporter 
+you will see summary information on skips and xfail-run tests
+at the end of a test run. 
+
+.. _skipif:
+
+Skipping a single function 
+-------------------------------------------
+
+Here is an example for marking a test function to be skipped
+when run on a Python3 interpreter::
+
+    @py.test.mark.skipif("sys.version_info >= (3,0)")
+    def test_function():
+        ...
+
+During test function setup the skipif condition is 
+evaluated by calling ``eval(expr, namespace)``.  The namespace
+contains the  ``sys`` and ``os`` modules and the test 
+``config`` object.  The latter allows you to skip based 
+on a test configuration value e.g. like this::
+
+    @py.test.mark.skipif("not config.getvalue('db')")
+    def test_function(...):
+        ...
+
+Create a shortcut for your conditional skip decorator 
+at module level like this::
+
+    win32only = py.test.mark.skipif("sys.platform != 'win32'")
+
+    @win32only
+    def test_function():
+        ...
+
+
+skip groups of test functions 
+--------------------------------------
+
+As with all metadata function marking you can do it at
+`whole class- or module level`_.  Here is an example 
+for skipping all methods of a test class based on platform::
+
+    class TestPosixCalls:
+        pytestmark = py.test.mark.skipif("sys.platform == 'win32'")
+    
+        def test_function(self):
+            # will not be setup or run under 'win32' platform
+            #
+
+The ``pytestmark`` decorator will be applied to each test function.
+
+.. _`whole class- or module level`: mark.html#scoped-marking
+
+
+mark a test function as **expected to fail**
+-------------------------------------------------------
+
+You can use the ``xfail`` marker to indicate that you
+expect the test to fail:: 
+
+    @py.test.mark.xfail
+    def test_function():
+        ...
+
+This test will be run but no traceback will be reported
+when it fails. Instead terminal reporting will list it in the
+"expected to fail" or "unexpectedly passing" sections.
+
+Same as with skipif_ you can also selectively expect a failure
+depending on platform::
+
+    @py.test.mark.xfail("sys.version_info >= (3,0)")
+
+    def test_function():
+        ...
+
+
+skipping on a missing import dependency
+--------------------------------------------------
+
+You can use the following import helper at module level 
+or within a test or test setup function::
+
+    docutils = py.test.importorskip("docutils")
+
+If ``docutils`` cannot be imported here, this will lead to a
+skip outcome of the test.  You can also skip dependeing if
+if a library does not come with a high enough version::
+
+    docutils = py.test.importorskip("docutils", minversion="0.3")
+
+The version will be read from the specified module's ``__version__`` attribute.
+
+imperative skip from within a test or setup function
+------------------------------------------------------
+
+If for some reason you cannot declare skip-conditions
+you can also imperatively produce a Skip-outcome from 
+within test or setup code.  Example::
+
+    def test_function():
+        if not valid_config():
+            py.test.skip("unsuppored configuration")
+
+"""
+# XXX py.test.skip, .importorskip and the Skipped class 
+# should also be defined in this plugin, requires thought/changes
+
+import py
+
+def pytest_runtest_setup(item):
+    expr, result = evalexpression(item, 'skipif')
+    if result:
+        py.test.skip(expr)
+
+def pytest_runtest_makereport(__multicall__, item, call):
+    if call.when != "call":
+        return
+    expr, result = evalexpression(item, 'xfail')
+    rep = __multicall__.execute()
+    if result:
+        if call.excinfo:
+            rep.skipped = True
+            rep.failed = rep.passed = False
+        else:
+            rep.skipped = rep.passed = False
+            rep.failed = True
+        rep.keywords['xfail'] = expr 
+    else:
+        if 'xfail' in rep.keywords:
+            del rep.keywords['xfail']
+    return rep
+
+# called by terminalreporter progress reporting
+def pytest_report_teststatus(report):
+    if 'xfail' in report.keywords:
+        if report.skipped:
+            return "xfailed", "x", "xfail"
+        elif report.failed:
+            return "xpassed", "P", "xpass"
+
+# called by the terminalreporter instance/plugin
+def pytest_terminal_summary(terminalreporter):
+    show_xfailed(terminalreporter)
+    show_skipped(terminalreporter)
+
+def show_xfailed(terminalreporter):
+    tr = terminalreporter
+    xfailed = tr.stats.get("xfailed")
+    if xfailed:
+        if not tr.hasopt('xfailed'):
+            if tr.config.getvalue("verbose"):
+                tr.write_line(
+                  "%d expected failures, use --report=xfailed for more info" %
+                  len(xfailed))
+            return
+        tr.write_sep("_", "expected failures")
+        for rep in xfailed:
+            entry = rep.longrepr.reprcrash
+            modpath = rep.item.getmodpath(includemodule=True)
+            pos = "%s %s:%d: " %(modpath, entry.path, entry.lineno)
+            reason = rep.longrepr.reprcrash.message
+            i = reason.find("\n")
+            if i != -1:
+                reason = reason[:i]
+            tr._tw.line("%s %s" %(pos, reason))
+
+    xpassed = terminalreporter.stats.get("xpassed")
+    if xpassed:
+        tr.write_sep("_", "UNEXPECTEDLY PASSING TESTS")
+        for rep in xpassed:
+            fspath, lineno, modpath = rep.item.reportinfo()
+            pos = "%s %s:%d: unexpectedly passing" %(modpath, fspath, lineno)
+            tr._tw.line(pos)
+
+
+def evalexpression(item, keyword):
+    if isinstance(item, py.test.collect.Function):
+        markholder = getattr(item.obj, keyword, None)
+        result = False
+        if markholder:
+            d = {'os': py.std.os, 'sys': py.std.sys, 'config': item.config}
+            expr, result = None, True
+            for expr in markholder.args:
+                if isinstance(expr, str):
+                    result = eval(expr, d)
+                else:
+                    result = expr
+                if not result:
+                    break
+            return expr, result
+    return None, False
+
+def folded_skips(skipped):
+    d = {}
+    for event in skipped:
+        entry = event.longrepr.reprcrash 
+        key = entry.path, entry.lineno, entry.message
+        d.setdefault(key, []).append(event)
+    l = []
+    for key, events in d.items(): 
+        l.append((len(events),) + key)
+    return l 
+
+def show_skipped(terminalreporter):
+    tr = terminalreporter
+    skipped = tr.stats.get('skipped', [])
+    if skipped:
+        if not tr.hasopt('skipped'):
+            if tr.config.getvalue("verbose"):
+                tr.write_line(
+                    "%d skipped tests, use --report=skipped for more info" %
+                    len(skipped))
+            return
+        fskips = folded_skips(skipped)
+        if fskips:
+            tr.write_sep("_", "skipped test summary")
+            for num, fspath, lineno, reason in fskips:
+                tr._tw.line("%s:%d: [%d] %s" %(fspath, lineno, num, reason))

Added: pypy/trunk/py/plugin/pytest_terminal.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_terminal.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,468 @@
+"""
+Implements terminal reporting of the full testing process.
+
+This is a good source for looking at the various reporting hooks. 
+"""
+import py
+import sys
+
+def pytest_addoption(parser):
+    group = parser.getgroup("terminal reporting", after="general")
+    group._addoption('-v', '--verbose', action="count", 
+               dest="verbose", default=0, help="increase verbosity."),
+    group._addoption('-l', '--showlocals',
+               action="store_true", dest="showlocals", default=False,
+               help="show locals in tracebacks (disabled by default).")
+    group.addoption('--report',
+               action="store", dest="report", default=None, metavar="opts",
+               help="comma separated reporting options")
+    group._addoption('--tb', metavar="style", 
+               action="store", dest="tbstyle", default='long',
+               type="choice", choices=['long', 'short', 'no'],
+               help="traceback verboseness (long/short/no).")
+    group._addoption('--fulltrace',
+               action="store_true", dest="fulltrace", default=False,
+               help="don't cut any tracebacks (default is to cut).")
+
+    group = parser.getgroup("debugconfig")
+    group.addoption('--collectonly',
+        action="store_true", dest="collectonly",
+        help="only collect tests, don't execute them."),
+    group.addoption('--traceconfig',
+               action="store_true", dest="traceconfig", default=False,
+               help="trace considerations of conftest.py files."),
+    group._addoption('--nomagic',
+               action="store_true", dest="nomagic", default=False,
+               help="don't reinterpret asserts, no traceback cutting. ")
+    group.addoption('--debug',
+               action="store_true", dest="debug", default=False,
+               help="generate and show internal debugging information.")
+
+
+def pytest_configure(config):
+    if config.option.collectonly:
+        reporter = CollectonlyReporter(config)
+    else:
+        reporter = TerminalReporter(config)
+    # XXX see remote.py's XXX 
+    for attr in 'pytest_terminal_hasmarkup', 'pytest_terminal_fullwidth':
+        if hasattr(config, attr):
+            #print "SETTING TERMINAL OPTIONS", attr, getattr(config, attr)
+            name = attr.split("_")[-1]
+            assert hasattr(self.reporter._tw, name), name
+            setattr(reporter._tw, name, getattr(config, attr))
+    config.pluginmanager.register(reporter, 'terminalreporter')
+
+def getreportopt(optvalue):
+    d = {}
+    if optvalue:
+        for setting in optvalue.split(","):
+            setting = setting.strip()
+            val = True
+            if setting.startswith("no"):
+                val = False
+                setting = setting[2:]
+            d[setting] = val
+    return d
+
+class TerminalReporter:
+    def __init__(self, config, file=None):
+        self.config = config 
+        self.stats = {}       
+        self.curdir = py.path.local()
+        if file is None:
+            file = py.std.sys.stdout
+        self._tw = py.io.TerminalWriter(file)
+        self.currentfspath = None 
+        self.gateway2info = {}
+        self._reportopt = getreportopt(config.getvalue('report'))
+
+    def hasopt(self, name):
+        return self._reportopt.get(name, False)
+
+    def write_fspath_result(self, fspath, res):
+        fspath = self.curdir.bestrelpath(fspath)
+        if fspath != self.currentfspath:
+            self._tw.line()
+            relpath = self.curdir.bestrelpath(fspath)
+            self._tw.write(relpath + " ")
+            self.currentfspath = fspath
+        self._tw.write(res)
+
+    def write_ensure_prefix(self, prefix, extra="", **kwargs):
+        if self.currentfspath != prefix:
+            self._tw.line()
+            self.currentfspath = prefix 
+            self._tw.write(prefix)
+        if extra:
+            self._tw.write(extra, **kwargs)
+            self.currentfspath = -2
+
+    def ensure_newline(self):
+        if self.currentfspath: 
+            self._tw.line()
+            self.currentfspath = None
+
+    def write_line(self, line, **markup):
+        line = str(line)
+        self.ensure_newline()
+        self._tw.line(line, **markup)
+
+    def write_sep(self, sep, title=None, **markup):
+        self.ensure_newline()
+        self._tw.sep(sep, title, **markup)
+
+    def getcategoryletterword(self, rep):
+        res = self.config.hook.pytest_report_teststatus(report=rep)
+        if res:
+            return res
+        for cat in 'skipped failed passed ???'.split():
+            if getattr(rep, cat, None):
+                break 
+        return cat, self.getoutcomeletter(rep), self.getoutcomeword(rep)
+
+    def getoutcomeletter(self, rep):
+        return rep.shortrepr 
+
+    def getoutcomeword(self, rep):
+        if rep.passed: 
+            return "PASS", dict(green=True)
+        elif rep.failed: 
+            return "FAIL", dict(red=True)
+        elif rep.skipped: 
+            return "SKIP"
+        else: 
+            return "???", dict(red=True)
+
+    def pytest_internalerror(self, excrepr):
+        for line in str(excrepr).split("\n"):
+            self.write_line("INTERNALERROR> " + line)
+
+    def pytest_gwmanage_newgateway(self, gateway, platinfo):
+        #self.write_line("%s instantiated gateway from spec %r" %(gateway.id, gateway.spec._spec))
+        d = {}
+        d['version'] = repr_pythonversion(platinfo.version_info)
+        d['id'] = gateway.id
+        d['spec'] = gateway.spec._spec 
+        d['platform'] = platinfo.platform 
+        if self.config.option.verbose:
+            d['extra'] = "- " + platinfo.executable
+        else:
+            d['extra'] = ""
+        d['cwd'] = platinfo.cwd
+        infoline = ("%(id)s %(spec)s -- platform %(platform)s, "
+                        "Python %(version)s "
+                        "cwd: %(cwd)s"
+                        "%(extra)s" % d)
+        self.write_line(infoline)
+        self.gateway2info[gateway] = infoline
+
+    def pytest_gwmanage_rsyncstart(self, source, gateways):
+        targets = ", ".join([gw.id for gw in gateways])
+        msg = "rsyncstart: %s -> %s" %(source, targets)
+        if not self.config.option.verbose:
+            msg += " # use --verbose to see rsync progress"
+        self.write_line(msg)
+
+    def pytest_gwmanage_rsyncfinish(self, source, gateways):
+        targets = ", ".join([gw.id for gw in gateways])
+        self.write_line("rsyncfinish: %s -> %s" %(source, targets))
+
+    def pytest_plugin_registered(self, plugin):
+        if self.config.option.traceconfig: 
+            msg = "PLUGIN registered: %s" %(plugin,)
+            # XXX this event may happen during setup/teardown time 
+            #     which unfortunately captures our output here 
+            #     which garbles our output if we use self.write_line 
+            self.write_line(msg)
+
+    def pytest_testnodeready(self, node):
+        self.write_line("%s txnode ready to receive tests" %(node.gateway.id,))
+
+    def pytest_testnodedown(self, node, error):
+        if error:
+            self.write_line("%s node down, error: %s" %(node.gateway.id, error))
+
+    def pytest_trace(self, category, msg):
+        if self.config.option.debug or \
+           self.config.option.traceconfig and category.find("config") != -1:
+            self.write_line("[%s] %s" %(category, msg))
+
+    def pytest_rescheduleitems(self, items):
+        if self.config.option.debug:
+            self.write_sep("!", "RESCHEDULING %s " %(items,))
+
+    def pytest_deselected(self, items):
+        self.stats.setdefault('deselected', []).append(items)
+
+    def pytest_itemstart(self, item, node=None):
+        if getattr(self.config.option, 'dist', 'no') != "no":
+            # for dist-testing situations itemstart means we 
+            # queued the item for sending, not interesting (unless debugging) 
+            if self.config.option.debug:
+                line = self._reportinfoline(item)
+                extra = ""
+                if node:
+                    extra = "-> " + str(node.gateway.id)
+                self.write_ensure_prefix(line, extra)
+        else:
+            if self.config.option.verbose:
+                line = self._reportinfoline(item)
+                self.write_ensure_prefix(line, "") 
+            else:
+                # ensure that the path is printed before the 
+                # 1st test of a module starts running
+
+                self.write_fspath_result(self._getfspath(item), "")
+
+    def pytest__teardown_final_logerror(self, report):
+        self.stats.setdefault("error", []).append(report)
+ 
+    def pytest_runtest_logreport(self, report):
+        rep = report
+        cat, letter, word = self.getcategoryletterword(rep)
+        if not letter and not word:
+            # probably passed setup/teardown
+            return
+        if isinstance(word, tuple):
+            word, markup = word
+        else:
+            markup = {}
+        self.stats.setdefault(cat, []).append(rep)
+        if not self.config.option.verbose:
+            self.write_fspath_result(self._getfspath(rep.item), letter)
+        else:
+            line = self._reportinfoline(rep.item)
+            if not hasattr(rep, 'node'):
+                self.write_ensure_prefix(line, word, **markup)
+            else:
+                self.ensure_newline()
+                if hasattr(rep, 'node'):
+                    self._tw.write("%s " % rep.node.gateway.id)
+                self._tw.write(word, **markup)
+                self._tw.write(" " + line)
+                self.currentfspath = -2
+
+    def pytest_collectreport(self, report):
+        if not report.passed:
+            if report.failed:
+                self.stats.setdefault("error", []).append(report)
+                msg = report.longrepr.reprcrash.message 
+                self.write_fspath_result(report.collector.fspath, "E")
+            elif report.skipped:
+                self.stats.setdefault("skipped", []).append(report)
+                self.write_fspath_result(report.collector.fspath, "S")
+
+    def pytest_sessionstart(self, session):
+        self.write_sep("=", "test session starts", bold=True)
+        self._sessionstarttime = py.std.time.time()
+
+        verinfo = ".".join(map(str, sys.version_info[:3]))
+        msg = "python: platform %s -- Python %s" % (sys.platform, verinfo)
+        msg += " -- pytest-%s" % (py.__version__)
+        if self.config.option.verbose or self.config.option.debug or getattr(self.config.option, 'pastebin', None):
+            msg += " -- " + str(sys.executable)
+        self.write_line(msg)
+
+        if self.config.option.debug or self.config.option.traceconfig:
+            self.write_line("using py lib: %s" % (py.path.local(py.__file__).dirpath()))
+        if self.config.option.traceconfig:
+            self.write_line("active plugins:")
+            plugins = []
+            items = self.config.pluginmanager._name2plugin.items()
+            for name, plugin in items:
+                repr_plugin = repr(plugin)
+                fullwidth = getattr(self._tw, 'fullwidth', 65000)
+                if len(repr_plugin)+26 > fullwidth:
+                    repr_plugin = repr_plugin[:(fullwidth-30)] + '...'
+                self.write_line("    %-20s: %s" %(name, repr_plugin))
+        for i, testarg in enumerate(self.config.args):
+            self.write_line("test object %d: %s" %(i+1, testarg))
+
+    def pytest_sessionfinish(self, exitstatus, __multicall__):
+        __multicall__.execute() 
+        self._tw.line("")
+        if exitstatus in (0, 1, 2):
+            self.summary_errors()
+            self.summary_failures()
+            self.config.hook.pytest_terminal_summary(terminalreporter=self)
+        if exitstatus == 2:
+            self._report_keyboardinterrupt()
+        self.summary_deselected()
+        self.summary_stats()
+
+    def pytest_keyboard_interrupt(self, excinfo):
+        self._keyboardinterrupt_memo = excinfo.getrepr()
+
+    def _report_keyboardinterrupt(self):
+        self.write_sep("!", "KEYBOARD INTERRUPT")
+        excrepr = self._keyboardinterrupt_memo
+        if self.config.option.verbose:
+            excrepr.toterminal(self._tw)
+        else:
+            excrepr.reprcrash.toterminal(self._tw)
+
+    def pytest_looponfailinfo(self, failreports, rootdirs):
+        if failreports:
+            self.write_sep("#", "LOOPONFAILING", red=True)
+            for report in failreports:
+                try:
+                    loc = report.longrepr.reprcrash
+                except AttributeError:
+                    loc = str(report.longrepr)[:50]
+                self.write_line(loc, red=True)
+        self.write_sep("#", "waiting for changes")
+        for rootdir in rootdirs:
+            self.write_line("### Watching:   %s" %(rootdir,), bold=True)
+
+    def _reportinfoline(self, item):
+        collect_fspath = self._getfspath(item)
+        fspath, lineno, msg = self._getreportinfo(item)
+        if fspath and fspath != collect_fspath:
+            fspath = "%s <- %s" % (
+                self.curdir.bestrelpath(collect_fspath),
+                self.curdir.bestrelpath(fspath))
+        elif fspath:
+            fspath = self.curdir.bestrelpath(fspath)
+        if lineno is not None:
+            lineno += 1
+        if fspath and lineno and msg:
+            line = "%(fspath)s:%(lineno)s: %(msg)s"
+        elif fspath and msg:
+            line = "%(fspath)s: %(msg)s"
+        elif fspath and lineno:
+            line = "%(fspath)s:%(lineno)s %(extrapath)s"
+        else:
+            line = "[noreportinfo]"
+        return line % locals() + " "
+        
+    def _getfailureheadline(self, rep):
+        if hasattr(rep, "collector"):
+            return str(rep.collector.fspath)
+        elif hasattr(rep, 'item'):
+            fspath, lineno, msg = self._getreportinfo(rep.item)
+            return msg
+        else:
+            return "test session" 
+
+    def _getreportinfo(self, item):
+        try:
+            return item.__reportinfo
+        except AttributeError:
+            pass
+        reportinfo = item.config.hook.pytest_report_iteminfo(item=item)
+        # cache on item
+        item.__reportinfo = reportinfo
+        return reportinfo
+
+    def _getfspath(self, item):
+        try:
+            return item.fspath
+        except AttributeError:
+            fspath, lineno, msg = self._getreportinfo(item)
+            return fspath
+
+    #
+    # summaries for sessionfinish 
+    #
+
+    def summary_failures(self):
+        if 'failed' in self.stats and self.config.option.tbstyle != "no":
+            self.write_sep("=", "FAILURES")
+            for rep in self.stats['failed']:
+                msg = self._getfailureheadline(rep)
+                self.write_sep("_", msg)
+                self.write_platinfo(rep)
+                rep.toterminal(self._tw)
+
+    def summary_errors(self):
+        if 'error' in self.stats and self.config.option.tbstyle != "no":
+            self.write_sep("=", "ERRORS")
+            for rep in self.stats['error']:
+                msg = self._getfailureheadline(rep)
+                if not hasattr(rep, 'when'):
+                    # collect
+                    msg = "ERROR during collection " + msg
+                elif rep.when == "setup":
+                    msg = "ERROR at setup of " + msg 
+                elif rep.when == "teardown":
+                    msg = "ERROR at teardown of " + msg 
+                self.write_sep("_", msg)
+                self.write_platinfo(rep)
+                rep.toterminal(self._tw)
+
+    def write_platinfo(self, rep):
+        if hasattr(rep, 'node'):
+            self.write_line(self.gateway2info.get(
+                rep.node.gateway, 
+                "node %r (platinfo not found? strange)")
+                    [:self._tw.fullwidth-1])
+
+    def summary_stats(self):
+        session_duration = py.std.time.time() - self._sessionstarttime
+
+        keys = "failed passed skipped deselected".split()
+        for key in self.stats.keys():
+            if key not in keys:
+                keys.append(key)
+        parts = []
+        for key in keys:
+            val = self.stats.get(key, None)
+            if val:
+                parts.append("%d %s" %(len(val), key))
+        line = ", ".join(parts)
+        # XXX coloring
+        self.write_sep("=", "%s in %.2f seconds" %(line, session_duration))
+
+    def summary_deselected(self):
+        if 'deselected' in self.stats:
+            self.write_sep("=", "%d tests deselected by %r" %(
+                len(self.stats['deselected']), self.config.option.keyword), bold=True)
+
+
+class CollectonlyReporter:
+    INDENT = "  "
+
+    def __init__(self, config, out=None):
+        self.config = config 
+        if out is None:
+            out = py.std.sys.stdout
+        self.out = py.io.TerminalWriter(out)
+        self.indent = ""
+        self._failed = []
+
+    def outindent(self, line):
+        self.out.line(self.indent + str(line))
+
+    def pytest_internalerror(self, excrepr):
+        for line in str(excrepr).split("\n"):
+            self.out.line("INTERNALERROR> " + line)
+
+    def pytest_collectstart(self, collector):
+        self.outindent(collector)
+        self.indent += self.INDENT 
+    
+    def pytest_itemstart(self, item, node=None):
+        self.outindent(item)
+
+    def pytest_collectreport(self, report):
+        if not report.passed:
+            self.outindent("!!! %s !!!" % report.longrepr.reprcrash.message)
+            self._failed.append(report)
+        self.indent = self.indent[:-len(self.INDENT)]
+
+    def pytest_sessionfinish(self, session, exitstatus):
+        if self._failed:
+            self.out.sep("!", "collection failures")
+        for rep in self._failed:
+            rep.toterminal(self.out)
+                
+
+def repr_pythonversion(v=None):
+    if v is None:
+        v = sys.version_info
+    try:
+        return "%s.%s.%s-%s-%s" % v
+    except (TypeError, ValueError):
+        return str(v)
+

Added: pypy/trunk/py/plugin/pytest_tmpdir.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_tmpdir.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,21 @@
+"""provide temporary directories to test functions. 
+
+usage example::
+
+    def test_plugin(tmpdir):
+        tmpdir.join("hello").write("hello")
+
+.. _`py.path.local`: ../../path.html
+
+"""
+import py
+
+def pytest_funcarg__tmpdir(request):
+    """return a temporary directory path object
+    unique to each test function invocation,
+    created as a sub directory of the base temporary
+    directory.  The returned object is a `py.path.local`_
+    path object. 
+    """
+    name = request.function.__name__ 
+    return request.config.mktemp(name, numbered=True)

Added: pypy/trunk/py/plugin/pytest_unittest.py
==============================================================================
--- (empty file)
+++ pypy/trunk/py/plugin/pytest_unittest.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,79 @@
+"""
+automatically discover and run traditional "unittest.py" style tests. 
+
+Usage
+----------------
+
+This plugin collects and runs Python `unittest.py style`_ tests. 
+It will automatically collect ``unittest.TestCase`` subclasses 
+and their ``test`` methods from the test modules of a project
+(usually following the ``test_*.py`` pattern). 
+
+This plugin is enabled by default. 
+
+.. _`unittest.py style`: http://docs.python.org/library/unittest.html
+"""
+import py
+import sys
+
+def pytest_pycollect_makeitem(collector, name, obj):
+    if 'unittest' not in sys.modules:
+        return # nobody derived unittest.TestCase
+    try:
+        isunit = issubclass(obj, py.std.unittest.TestCase)
+    except TypeError:
+        pass
+    else:
+        if isunit:
+            return UnitTestCase(name, parent=collector)
+
+class UnitTestCase(py.test.collect.Class):
+    def collect(self):
+        return [UnitTestCaseInstance("()", self)]
+
+    def setup(self):
+        pass
+
+    def teardown(self):
+        pass
+
+_dummy = object()
+class UnitTestCaseInstance(py.test.collect.Instance):
+    def collect(self):
+        loader = py.std.unittest.TestLoader()
+        names = loader.getTestCaseNames(self.obj.__class__)
+        l = []
+        for name in names:
+            callobj = getattr(self.obj, name)
+            if py.builtin.callable(callobj):
+                l.append(UnitTestFunction(name, parent=self))
+        return l
+
+    def _getobj(self):
+        x = self.parent.obj
+        return self.parent.obj(methodName='run')
+        
+class UnitTestFunction(py.test.collect.Function):
+    def __init__(self, name, parent, args=(), obj=_dummy, sort_value=None):
+        super(UnitTestFunction, self).__init__(name, parent)
+        self._args = args
+        if obj is not _dummy:
+            self._obj = obj
+        self._sort_value = sort_value
+        if hasattr(self.parent, 'newinstance'):
+            self.parent.newinstance()
+            self.obj = self._getobj()
+
+    def runtest(self):
+        target = self.obj
+        args = self._args
+        target(*args)
+
+    def setup(self):
+        instance = py.builtin._getimself(self.obj)
+        instance.setUp()
+
+    def teardown(self):
+        instance = py.builtin._getimself(self.obj)
+        instance.tearDown()
+

Modified: pypy/trunk/pypy/annotation/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/annotation/test/autopath.py	(original)
+++ pypy/trunk/pypy/annotation/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/bin/autopath.py
==============================================================================
--- pypy/trunk/pypy/bin/autopath.py	(original)
+++ pypy/trunk/pypy/bin/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/bin/py.py
==============================================================================
--- pypy/trunk/pypy/bin/py.py	(original)
+++ pypy/trunk/pypy/bin/py.py	Wed Nov 11 18:54:49 2009
@@ -12,7 +12,7 @@
     pass
 
 from pypy.tool import option
-from py.compat.optparse import make_option
+from optparse import make_option
 from pypy.interpreter import main, interactive, error, gateway
 from pypy.config.config import OptionDescription, BoolOption, StrOption
 from pypy.config.config import Config, to_optparse

Modified: pypy/trunk/pypy/config/autopath.py
==============================================================================
--- pypy/trunk/pypy/config/autopath.py	(original)
+++ pypy/trunk/pypy/config/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/config/config.py
==============================================================================
--- pypy/trunk/pypy/config/config.py	(original)
+++ pypy/trunk/pypy/config/config.py	Wed Nov 11 18:54:49 2009
@@ -1,5 +1,5 @@
 import py
-from py.compat import optparse
+import optparse
 from pypy.tool.pairtype import extendabletype
 
 SUPPRESS_USAGE = optparse.SUPPRESS_USAGE

Modified: pypy/trunk/pypy/config/makerestdoc.py
==============================================================================
--- pypy/trunk/pypy/config/makerestdoc.py	(original)
+++ pypy/trunk/pypy/config/makerestdoc.py	Wed Nov 11 18:54:49 2009
@@ -1,13 +1,13 @@
 import py
-from py.__.rest.rst import Rest, Paragraph, Strong, ListItem, Title, Link
-from py.__.rest.rst import Directive, Em, Quote, Text
+from pypy.tool.rest.rst import Rest, Paragraph, Strong, ListItem, Title, Link
+from pypy.tool.rest.rst import Directive, Em, Quote, Text
 
 from pypy.config.config import ChoiceOption, BoolOption, StrOption, IntOption
 from pypy.config.config import FloatOption, OptionDescription, Option, Config
 from pypy.config.config import ArbitraryOption, DEFAULT_OPTION_NAME
 from pypy.config.config import _getnegation
 
-configdocdir = py.magic.autopath().dirpath().dirpath().join("doc", "config")
+configdocdir = py.path.local(__file__).dirpath().dirpath().join("doc", "config")
 
 def get_fullpath(opt, path):
     if path:
@@ -212,7 +212,7 @@
     """ register a :config: ReST link role for use in documentation. """
     try:
         from docutils.parsers.rst import directives, states, roles
-        from py.__.rest.directive import register_linkrole
+        from pypy.tool.rest.directive import register_linkrole
     except ImportError:
         return
     # enable :config: link role

Modified: pypy/trunk/pypy/config/pypyoption.py
==============================================================================
--- pypy/trunk/pypy/config/pypyoption.py	(original)
+++ pypy/trunk/pypy/config/pypyoption.py	Wed Nov 11 18:54:49 2009
@@ -5,7 +5,7 @@
 from pypy.config.config import ChoiceOption, StrOption, to_optparse, Config
 from pypy.config.config import ConflictConfigError
 
-modulepath = py.magic.autopath().dirpath().dirpath().join("module")
+modulepath = py.path.local(__file__).dirpath().dirpath().join("module")
 all_modules = [p.basename for p in modulepath.listdir()
                if p.check(dir=True, dotfile=False)
                and p.join('__init__.py').check()]

Modified: pypy/trunk/pypy/config/test/test_makerestdoc.py
==============================================================================
--- pypy/trunk/pypy/config/test/test_makerestdoc.py	(original)
+++ pypy/trunk/pypy/config/test/test_makerestdoc.py	Wed Nov 11 18:54:49 2009
@@ -1,7 +1,7 @@
 from pypy.config.config import *
 from pypy.config.makerestdoc import make_cmdline_overview
 
-from py.__.misc.rest import process as restcheck
+from pypy.tool.rest.rest import process as restcheck
 
 tempdir = py.test.ensuretemp('config')
 

Modified: pypy/trunk/pypy/config/test/test_pypyoption.py
==============================================================================
--- pypy/trunk/pypy/config/test/test_pypyoption.py	(original)
+++ pypy/trunk/pypy/config/test/test_pypyoption.py	Wed Nov 11 18:54:49 2009
@@ -3,7 +3,7 @@
 from pypy.config.config import Config, ConfigError
 from pypy.config.translationoption import set_opt_level
 
-thisdir = py.magic.autopath().dirpath()
+thisdir = py.path.local(__file__).dirpath()
 
 def test_required():
     conf = get_pypy_config()

Modified: pypy/trunk/pypy/conftest.py
==============================================================================
--- pypy/trunk/pypy/conftest.py	(original)
+++ pypy/trunk/pypy/conftest.py	Wed Nov 11 18:54:49 2009
@@ -1,5 +1,5 @@
 import py, sys, os
-from py.__.test.outcome import Failed
+from py.impl.test.outcome import Failed
 from pypy.interpreter.gateway import app2interp_temp
 from pypy.interpreter.error import OperationError
 from pypy.tool.pytest import appsupport
@@ -9,18 +9,14 @@
 from pypy.tool.udir import udir
 from pypy.tool.autopath import pypydir
 
-rootdir = py.magic.autopath().dirpath()
+rootdir = py.path.local(__file__).dirpath()
 
 # pytest settings
 pytest_plugins = "resultlog",
 rsyncdirs = ['.', '../lib-python', '../demo']
 rsyncignore = ['_cache']
 
-# XXX workaround for a py.test bug clashing with lib/py symlink
-# do we really need the latter?
-empty_conftest = type(sys)('conftest')
-empty_conftest.__file__ = "?"
-sys.modules['pypy.lib.py.conftest'] = empty_conftest
+collect_ignore = ['./lib/py']
 
 # PyPy's command line extra options (these are added 
 # to py.test's standard options) 
@@ -36,7 +32,7 @@
 option = py.test.config.option
 
 def pytest_addoption(parser):
-    group = parser.addgroup("pypy options")
+    group = parser.getgroup("pypy options")
     group.addoption('--view', action="store_true", dest="view", default=False,
            help="view translation tests' flow graphs with Pygame")
     group.addoption('-A', '--runappdirect', action="store_true",

Modified: pypy/trunk/pypy/doc/config/autopath.py
==============================================================================
--- pypy/trunk/pypy/doc/config/autopath.py	(original)
+++ pypy/trunk/pypy/doc/config/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/doc/config/generate.py
==============================================================================
--- pypy/trunk/pypy/doc/config/generate.py	(original)
+++ pypy/trunk/pypy/doc/config/generate.py	Wed Nov 11 18:54:49 2009
@@ -3,7 +3,7 @@
 from pypy.config import pypyoption, translationoption, config
 from pypy.doc.config.confrest import all_optiondescrs
 
-thisdir = py.magic.autopath().dirpath()
+thisdir = py.path.local(__file__).dirpath()
 
 for descr in all_optiondescrs:
     prefix = descr._name

Modified: pypy/trunk/pypy/doc/config/makemodules.py
==============================================================================
--- pypy/trunk/pypy/doc/config/makemodules.py	(original)
+++ pypy/trunk/pypy/doc/config/makemodules.py	Wed Nov 11 18:54:49 2009
@@ -2,7 +2,7 @@
 import py
 from pypy.config import pypyoption, translationoption, config
 
-thisdir = py.magic.autopath().dirpath()
+thisdir = py.path.local(__file__).dirpath()
 
 if __name__ == '__main__':
     c = config.Config(pypyoption.pypy_optiondescription).usemodules

Modified: pypy/trunk/pypy/doc/confrest.py
==============================================================================
--- pypy/trunk/pypy/doc/confrest.py	(original)
+++ pypy/trunk/pypy/doc/confrest.py	Wed Nov 11 18:54:49 2009
@@ -34,7 +34,7 @@
         
 
 class Project(Project): 
-    mydir = py.magic.autopath().dirpath()
+    mydir = py.path.local(__file__).dirpath()
 
     title = "PyPy" 
     stylesheet = 'style.css'

Modified: pypy/trunk/pypy/doc/confrest_oldpy.py
==============================================================================
--- pypy/trunk/pypy/doc/confrest_oldpy.py	(original)
+++ pypy/trunk/pypy/doc/confrest_oldpy.py	Wed Nov 11 18:54:49 2009
@@ -1,6 +1,6 @@
 import py
-from py.__.misc.rest import convert_rest_html, strip_html_header 
-from py.__.misc.difftime import worded_time 
+from pypy.tool.rest.rest import convert_rest_html, strip_html_header 
+from pypy.tool.difftime import worded_time 
 
 html = py.xml.html 
 
@@ -104,7 +104,7 @@
     
 
 class Project:
-    mydir = py.magic.autopath().dirpath()
+    mydir = py.path.local(__file__).dirpath()
     title = "py lib"
     prefix_title = ""  # we have a logo already containing "py lib"
     encoding = 'latin1' 
@@ -186,7 +186,9 @@
                      id = 'docinfoline'))
 
         page.contentspace.append(py.xml.raw(content))
-        outputpath.ensure().write(page.unicode().encode(encoding)) 
+        f = outputpath.open('w')
+        f.write(page.unicode().encode(encoding))
+        f.close()
 
 # XXX this function comes from apigen/linker.py, put it
 # somewhere in py lib 

Modified: pypy/trunk/pypy/doc/conftest.py
==============================================================================
--- pypy/trunk/pypy/doc/conftest.py	(original)
+++ pypy/trunk/pypy/doc/conftest.py	Wed Nov 11 18:54:49 2009
@@ -1,12 +1,12 @@
 import py
 
 from pypy.config.makerestdoc import register_config_role 
-docdir = py.magic.autopath().dirpath()
+docdir = py.path.local(__file__).dirpath()
 
 pytest_plugins = "pytest_restdoc"
 
 def pytest_addoption(parser):
-    group = parser.addgroup("pypy-doc options")
+    group = parser.getgroup("pypy-doc options")
     group.addoption('--pypy-doctests', action="store_true",
            dest="pypy_doctests", default=False, 
            help="enable doctests in .txt files")

Modified: pypy/trunk/pypy/doc/statistic/confrest.py
==============================================================================
--- pypy/trunk/pypy/doc/statistic/confrest.py	(original)
+++ pypy/trunk/pypy/doc/statistic/confrest.py	Wed Nov 11 18:54:49 2009
@@ -1,5 +1,5 @@
 import py
-from py.__.doc.confrest import *
+from pypy.doc.confrest import *
 
 class PyPyPage(Page): 
     def fill_menubar(self):
@@ -17,7 +17,7 @@
             " ", id="menubar")
 
 class Project(Project): 
-    mydir = py.magic.autopath().dirpath()
+    mydir = py.path.local(__file__).dirpath()
     title = "PyPy" 
     stylesheet = 'style.css'
     encoding = 'latin1' 

Modified: pypy/trunk/pypy/doc/test_redirections.py
==============================================================================
--- pypy/trunk/pypy/doc/test_redirections.py	(original)
+++ pypy/trunk/pypy/doc/test_redirections.py	Wed Nov 11 18:54:49 2009
@@ -1,6 +1,6 @@
 
 import py 
-redir = py.magic.autopath().dirpath('redirections') 
+redir = py.path.local(__file__).dirpath('redirections') 
 
 def checkexist(path):
     print "checking", path

Modified: pypy/trunk/pypy/doc/tool/makeref.py
==============================================================================
--- pypy/trunk/pypy/doc/tool/makeref.py	(original)
+++ pypy/trunk/pypy/doc/tool/makeref.py	Wed Nov 11 18:54:49 2009
@@ -1,6 +1,6 @@
 
 import py
-py.magic.autopath()
+py.path.local(__file__)
 import pypy
 pypydir = py.path.local(pypy.__file__).dirpath()
 distdir = pypydir.dirpath() 

Modified: pypy/trunk/pypy/doc/tool/mydot.py
==============================================================================
--- pypy/trunk/pypy/doc/tool/mydot.py	(original)
+++ pypy/trunk/pypy/doc/tool/mydot.py	Wed Nov 11 18:54:49 2009
@@ -62,7 +62,7 @@
 
 
 if __name__ == '__main__':
-    from py.compat import optparse
+    import optparse
     parser = optparse.OptionParser()
     parser.add_option("-T", dest="format",
                       help="output format")

Modified: pypy/trunk/pypy/jit/backend/autopath.py
==============================================================================
--- pypy/trunk/pypy/jit/backend/autopath.py	(original)
+++ pypy/trunk/pypy/jit/backend/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/jit/backend/test/conftest.py
==============================================================================
--- pypy/trunk/pypy/jit/backend/test/conftest.py	(original)
+++ pypy/trunk/pypy/jit/backend/test/conftest.py	Wed Nov 11 18:54:49 2009
@@ -3,7 +3,7 @@
 option = py.test.config.option
 
 def pytest_addoption(parser):
-    group = parser.addgroup('random test options')
+    group = parser.getgroup('random test options')
     group.addoption('--random-seed', action="store", type="int",
                     default=random.randrange(0, 10000),
                     dest="randomseed",

Modified: pypy/trunk/pypy/jit/backend/x86/autopath.py
==============================================================================
--- pypy/trunk/pypy/jit/backend/x86/autopath.py	(original)
+++ pypy/trunk/pypy/jit/backend/x86/autopath.py	Wed Nov 11 18:54:49 2009
@@ -21,7 +21,6 @@
 
 """
 
-
 def __dirinfo(part):
     """ return (partdir, this_dir) and insert parent of partdir
     into sys.path.  If the parent directories don't have the part
@@ -33,13 +32,31 @@
     except NameError:
         head = this_dir = os.path.realpath(os.path.dirname(sys.argv[0]))
 
+    error = None
     while head:
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
+            if not os.path.exists(checkfile):
+                error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break
     else:
-        raise EnvironmentError, "'%s' missing in '%r'" % (partdir, this_dir)
+        error = "Cannot find the parent directory %r of the path %r" % (
+            partdir, this_dir)
+    if not error:
+        # check for bogus end-of-line style (e.g. files checked out on
+        # Windows and moved to Unix)
+        f = open(__file__.replace('.pyc', '.py'), 'r')
+        data = f.read()
+        f.close()
+        if data.endswith('\r\n') or data.endswith('\r'):
+            error = ("Bad end-of-line style in the .py files. Typically "
+                     "caused by a zip file or a checkout done on Windows and "
+                     "moved to Unix or vice-versa.")
+    if error:
+        raise EnvironmentError("Invalid source tree - bogus checkout! " +
+                               error)
     
     pypy_root = os.path.join(head, '')
     try:
@@ -109,6 +126,9 @@
 # set guaranteed attributes
 
 pypydir, this_dir = __dirinfo('pypy')
+import py
+libpythondir = str(py.path.local(pypydir).dirpath().join('lib-python', '2.5.2'))
+libpythonmodifieddir = str(py.path.local(libpythondir).dirpath().join('modified-2.5.2'))
 
 if __name__ == '__main__':
     __clone()

Modified: pypy/trunk/pypy/jit/conftest.py
==============================================================================
--- pypy/trunk/pypy/jit/conftest.py	(original)
+++ pypy/trunk/pypy/jit/conftest.py	Wed Nov 11 18:54:49 2009
@@ -3,7 +3,7 @@
 option = py.test.config.option
 
 def pytest_addoption(parser):
-    group = parser.addgroup("JIT options")
+    group = parser.getgroup("JIT options")
     group.addoption('--slow', action="store_true",
            default=False, dest="run_slow_tests",
            help="run all the compiled tests (instead of just a few)")

Modified: pypy/trunk/pypy/jit/tl/autopath.py
==============================================================================
--- pypy/trunk/pypy/jit/tl/autopath.py	(original)
+++ pypy/trunk/pypy/jit/tl/autopath.py	Wed Nov 11 18:54:49 2009
@@ -21,7 +21,6 @@
 
 """
 
-
 def __dirinfo(part):
     """ return (partdir, this_dir) and insert parent of partdir
     into sys.path.  If the parent directories don't have the part
@@ -33,13 +32,31 @@
     except NameError:
         head = this_dir = os.path.realpath(os.path.dirname(sys.argv[0]))
 
+    error = None
     while head:
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
+            if not os.path.exists(checkfile):
+                error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break
     else:
-        raise EnvironmentError, "'%s' missing in '%r'" % (partdir, this_dir)
+        error = "Cannot find the parent directory %r of the path %r" % (
+            partdir, this_dir)
+    if not error:
+        # check for bogus end-of-line style (e.g. files checked out on
+        # Windows and moved to Unix)
+        f = open(__file__.replace('.pyc', '.py'), 'r')
+        data = f.read()
+        f.close()
+        if data.endswith('\r\n') or data.endswith('\r'):
+            error = ("Bad end-of-line style in the .py files. Typically "
+                     "caused by a zip file or a checkout done on Windows and "
+                     "moved to Unix or vice-versa.")
+    if error:
+        raise EnvironmentError("Invalid source tree - bogus checkout! " +
+                               error)
     
     pypy_root = os.path.join(head, '')
     try:
@@ -109,6 +126,9 @@
 # set guaranteed attributes
 
 pypydir, this_dir = __dirinfo('pypy')
+import py
+libpythondir = str(py.path.local(pypydir).dirpath().join('lib-python', '2.5.2'))
+libpythonmodifieddir = str(py.path.local(libpythondir).dirpath().join('modified-2.5.2'))
 
 if __name__ == '__main__':
     __clone()

Modified: pypy/trunk/pypy/jit/tl/conftest.py
==============================================================================
--- pypy/trunk/pypy/jit/tl/conftest.py	(original)
+++ pypy/trunk/pypy/jit/tl/conftest.py	Wed Nov 11 18:54:49 2009
@@ -1,5 +1,5 @@
 def pytest_addoption(parser):
-    group = parser.addgroup("pypyjit.py options")
+    group = parser.getgroup("pypyjit.py options")
     group.addoption('--ootype', action="store_true", dest="ootype",
                     default=False,
                     help="use ootype")

Modified: pypy/trunk/pypy/jit/tl/spli/autopath.py
==============================================================================
--- pypy/trunk/pypy/jit/tl/spli/autopath.py	(original)
+++ pypy/trunk/pypy/jit/tl/spli/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/jit/tl/targettlc.py
==============================================================================
--- pypy/trunk/pypy/jit/tl/targettlc.py	(original)
+++ pypy/trunk/pypy/jit/tl/targettlc.py	Wed Nov 11 18:54:49 2009
@@ -1,6 +1,6 @@
 import time
 import py
-py.magic.autopath()
+py.path.local(__file__)
 from pypy.jit.tl.tlc import interp, interp_nonjit, ConstantPool
 from pypy.jit.metainterp.policy import JitPolicy
 from pypy.jit.backend.hlinfo import highleveljitinfo

Modified: pypy/trunk/pypy/jit/tl/targettlr.py
==============================================================================
--- pypy/trunk/pypy/jit/tl/targettlr.py	(original)
+++ pypy/trunk/pypy/jit/tl/targettlr.py	Wed Nov 11 18:54:49 2009
@@ -1,5 +1,5 @@
 import py
-py.magic.autopath()
+py.path.local(__file__)
 from pypy.jit.tl.tlr import interpret
 from pypy.jit.backend.hlinfo import highleveljitinfo
 

Modified: pypy/trunk/pypy/jit/tl/test/test_pypyjit.py
==============================================================================
--- pypy/trunk/pypy/jit/tl/test/test_pypyjit.py	(original)
+++ pypy/trunk/pypy/jit/tl/test/test_pypyjit.py	Wed Nov 11 18:54:49 2009
@@ -21,7 +21,7 @@
 def check_crasher(func_name):
     try:
         JIT_EXECUTABLE.sysexec(CRASH_FILE, func_name)
-    except py.__.process.cmdexec.ExecutionFailed, e:
+    except py.impl.process.cmdexec.ExecutionFailed, e:
         print "stderr"
         print "------"
         print e.err

Modified: pypy/trunk/pypy/jit/tl/tla/targettla.py
==============================================================================
--- pypy/trunk/pypy/jit/tl/tla/targettla.py	(original)
+++ pypy/trunk/pypy/jit/tl/tla/targettla.py	Wed Nov 11 18:54:49 2009
@@ -1,5 +1,5 @@
 import py
-py.magic.autopath()
+py.path.local(__file__)
 from pypy.jit.tl.tla import tla
 
 

Modified: pypy/trunk/pypy/jit/tl/tla/tla_assembler.py
==============================================================================
--- pypy/trunk/pypy/jit/tl/tla/tla_assembler.py	(original)
+++ pypy/trunk/pypy/jit/tl/tla/tla_assembler.py	Wed Nov 11 18:54:49 2009
@@ -2,7 +2,7 @@
 
 import sys
 import py
-py.magic.autopath()
+py.path.local(__file__)
 from pypy.jit.tl.tla.test_tla import assemble
 
 def usage():

Modified: pypy/trunk/pypy/jit/tool/autopath.py
==============================================================================
--- pypy/trunk/pypy/jit/tool/autopath.py	(original)
+++ pypy/trunk/pypy/jit/tool/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/lang/gameboy/debug/gameboy_debug_entry_point.py
==============================================================================
--- pypy/trunk/pypy/lang/gameboy/debug/gameboy_debug_entry_point.py	(original)
+++ pypy/trunk/pypy/lang/gameboy/debug/gameboy_debug_entry_point.py	Wed Nov 11 18:54:49 2009
@@ -11,7 +11,7 @@
 
 # ------------------------------------------------------------------------------
 
-ROM_PATH    = str(py.magic.autopath().dirpath().dirpath())+"/rom"
+ROM_PATH    = str(py.path.local(__file__).dirpath().dirpath())+"/rom"
 # filename    = ROM_PATH + "/rom9/rom9.gb"
 filename = "/home/tverwaes/roms/SuperMarioLand.gb"
 SOCKET_PORT = 55682
@@ -83,7 +83,7 @@
 
 # ------------------------------------------------------------------------------ 
     
-MARIO_DIR =  str(py.magic.autopath().dirpath().dirpath()\
+MARIO_DIR =  str(py.path.local(__file__).dirpath().dirpath()\
                         .dirpath().dirpath()\
                         .dirpath().dirpath()) + "/mario"
 

Modified: pypy/trunk/pypy/lang/gameboy/profiling/evaluation/gameboy_evaluation_target.py
==============================================================================
--- pypy/trunk/pypy/lang/gameboy/profiling/evaluation/gameboy_evaluation_target.py	(original)
+++ pypy/trunk/pypy/lang/gameboy/profiling/evaluation/gameboy_evaluation_target.py	Wed Nov 11 18:54:49 2009
@@ -8,7 +8,7 @@
 
 
 debug.DEBUG_PRINT_LOGS = False
-ROM_PATH = str(py.magic.autopath().dirpath().dirpath().dirpath())+"/rom"
+ROM_PATH = str(py.path.local(__file__).dirpath().dirpath().dirpath())+"/rom"
 
 filename = ""
 if len(sys.argv) > 1:

Modified: pypy/trunk/pypy/lang/gameboy/profiling/gameboyTest.py
==============================================================================
--- pypy/trunk/pypy/lang/gameboy/profiling/gameboyTest.py	(original)
+++ pypy/trunk/pypy/lang/gameboy/profiling/gameboyTest.py	Wed Nov 11 18:54:49 2009
@@ -7,7 +7,7 @@
 
 
 debug.DEBUG_PRINT_LOGS = False
-ROM_PATH = str(py.magic.autopath().dirpath().dirpath())+"/rom"
+ROM_PATH = str(py.path.local(__file__).dirpath().dirpath())+"/rom"
 
 filename = ""
 if len(sys.argv) > 1:

Modified: pypy/trunk/pypy/lang/gameboy/test/test_cartridge.py
==============================================================================
--- pypy/trunk/pypy/lang/gameboy/test/test_cartridge.py	(original)
+++ pypy/trunk/pypy/lang/gameboy/test/test_cartridge.py	Wed Nov 11 18:54:49 2009
@@ -8,7 +8,7 @@
 def mapToByte(value):
         return ord(value) & 0xFF
 
-ROM_PATH = str(py.magic.autopath().dirpath().dirpath())+"/rom"
+ROM_PATH = str(py.path.local(__file__).dirpath().dirpath())+"/rom"
 CONTENT = "abcdefghijklmnopqrstuvwxyz1234567890"
 
 MAPPED_CONTENT = map_to_byte(CONTENT)

Modified: pypy/trunk/pypy/lang/gameboy/test/test_rom.py
==============================================================================
--- pypy/trunk/pypy/lang/gameboy/test/test_rom.py	(original)
+++ pypy/trunk/pypy/lang/gameboy/test/test_rom.py	Wed Nov 11 18:54:49 2009
@@ -6,7 +6,7 @@
 from pypy.lang.gameboy.gameboy import *
 
 
-ROM_PATH = str(py.magic.autopath().dirpath().dirpath())+"/rom"
+ROM_PATH = str(py.path.local(__file__).dirpath().dirpath())+"/rom"
 EMULATION_CYCLES = 64
 
 # ------------------------------------------------------------------------------

Modified: pypy/trunk/pypy/lang/gameboy/tool/autopath.py
==============================================================================
--- pypy/trunk/pypy/lang/gameboy/tool/autopath.py	(original)
+++ pypy/trunk/pypy/lang/gameboy/tool/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/lang/js/autopath.py
==============================================================================
--- pypy/trunk/pypy/lang/js/autopath.py	(original)
+++ pypy/trunk/pypy/lang/js/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/lang/js/jsparser.py
==============================================================================
--- pypy/trunk/pypy/lang/js/jsparser.py	(original)
+++ pypy/trunk/pypy/lang/js/jsparser.py	Wed Nov 11 18:54:49 2009
@@ -2,7 +2,7 @@
 from pypy.rlib.parsing.parsing import ParseError, Rule
 import py
 
-GFILE = py.magic.autopath().dirpath().join("jsgrammar.txt")
+GFILE = py.path.local(__file__).dirpath().join("jsgrammar.txt")
 
 try:
     t = GFILE.read(mode='U')

Modified: pypy/trunk/pypy/lang/js/test/ecma/conftest.py
==============================================================================
--- pypy/trunk/pypy/lang/js/test/ecma/conftest.py	(original)
+++ pypy/trunk/pypy/lang/js/test/ecma/conftest.py	Wed Nov 11 18:54:49 2009
@@ -2,13 +2,13 @@
 from pypy.lang.js.interpreter import *
 from pypy.lang.js.jsobj import W_Array, JsBaseExcept
 from pypy.rlib.parsing.parsing import ParseError
-from py.__.test.outcome import Failed, ExceptionFailure
+from py.impl.test.outcome import Failed, ExceptionFailure
 import pypy.lang.js as js
 from pypy.lang.js import interpreter
 
 interpreter.TEST = True
 
-rootdir = py.magic.autopath().dirpath()
+rootdir = py.path.local(__file__).dirpath()
 exclusionlist = ['shell.js', 'browser.js']
 
 def pytest_addoption(parser):

Modified: pypy/trunk/pypy/lang/js/test/test_interactive.py
==============================================================================
--- pypy/trunk/pypy/lang/js/test/test_interactive.py	(original)
+++ pypy/trunk/pypy/lang/js/test/test_interactive.py	Wed Nov 11 18:54:49 2009
@@ -20,7 +20,7 @@
         return child
 
     def spawn(self, argv):
-        return self._spawn(str(py.magic.autopath().dirpath().dirpath().join('js_interactive.py')), argv)
+        return self._spawn(str(py.path.local(__file__).dirpath().dirpath().join('js_interactive.py')), argv)
     
     def prompt_send(self, message):
         self.child.expect('js>')

Modified: pypy/trunk/pypy/lang/js/test/test_parser.py
==============================================================================
--- pypy/trunk/pypy/lang/js/test/test_parser.py	(original)
+++ pypy/trunk/pypy/lang/js/test/test_parser.py	Wed Nov 11 18:54:49 2009
@@ -9,7 +9,7 @@
 from pypy import conftest
 import sys
 
-GFILE = py.magic.autopath().dirpath().dirpath().join("jsgrammar.txt")
+GFILE = py.path.local(__file__).dirpath().dirpath().join("jsgrammar.txt")
 
 try:
     t = GFILE.read(mode='U')

Modified: pypy/trunk/pypy/lang/prolog/interpreter/autopath.py
==============================================================================
--- pypy/trunk/pypy/lang/prolog/interpreter/autopath.py	(original)
+++ pypy/trunk/pypy/lang/prolog/interpreter/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/lang/prolog/interpreter/conftest.py
==============================================================================
--- pypy/trunk/pypy/lang/prolog/interpreter/conftest.py	(original)
+++ pypy/trunk/pypy/lang/prolog/interpreter/conftest.py	Wed Nov 11 18:54:49 2009
@@ -1,6 +1,6 @@
 import py, sys
 
-rootdir = py.magic.autopath().dirpath()
+rootdir = py.path.local(__file__).dirpath()
 
 Option = py.test.config.Option
 

Modified: pypy/trunk/pypy/lang/prolog/interpreter/interactive.py
==============================================================================
--- pypy/trunk/pypy/lang/prolog/interpreter/interactive.py	(original)
+++ pypy/trunk/pypy/lang/prolog/interpreter/interactive.py	Wed Nov 11 18:54:49 2009
@@ -7,7 +7,7 @@
 
 import py
 import sys
-#sys.path.append(str(py.magic.autopath().dirpath().dirpath()))
+#sys.path.append(str(py.path.local(__file__).dirpath().dirpath()))
 
 from pypy.rlib.parsing.parsing import ParseError
 from pypy.rlib.parsing.deterministic import LexerError

Modified: pypy/trunk/pypy/lang/prolog/interpreter/parsing.py
==============================================================================
--- pypy/trunk/pypy/lang/prolog/interpreter/parsing.py	(original)
+++ pypy/trunk/pypy/lang/prolog/interpreter/parsing.py	Wed Nov 11 18:54:49 2009
@@ -3403,7 +3403,7 @@
 # generated code between this line and its other occurence
  
 if __name__ == '__main__':
-    f = py.magic.autopath()
+    f = py.path.local(__file__)
     oldcontent = f.read()
     s = "# GENERATED CODE BETWEEN THIS LINE AND ITS OTHER OCCURENCE\n".lower()
     pre, gen, after = oldcontent.split(s)

Modified: pypy/trunk/pypy/lang/scheme/autopath.py
==============================================================================
--- pypy/trunk/pypy/lang/scheme/autopath.py	(original)
+++ pypy/trunk/pypy/lang/scheme/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/lang/scheme/execution.py
==============================================================================
--- pypy/trunk/pypy/lang/scheme/execution.py	(original)
+++ pypy/trunk/pypy/lang/scheme/execution.py	Wed Nov 11 18:54:49 2009
@@ -19,7 +19,7 @@
         except (TypeError, AttributeError):
             pass
 
-de_file = py.magic.autopath().dirpath().join("r5rs_derived_expr.ss")
+de_file = py.path.local(__file__).dirpath().join("r5rs_derived_expr.ss")
 de_code = de_file.read()
 de_expr_lst = parse(de_code)
 

Modified: pypy/trunk/pypy/lang/scheme/test/test_interactive.py
==============================================================================
--- pypy/trunk/pypy/lang/scheme/test/test_interactive.py	(original)
+++ pypy/trunk/pypy/lang/scheme/test/test_interactive.py	Wed Nov 11 18:54:49 2009
@@ -20,7 +20,7 @@
         return child
 
     def spawn(self, argv=[]):
-        path = py.magic.autopath()/".."/".."/"interactive.py"
+        path = py.path.local(__file__)/".."/".."/"interactive.py"
         return self._spawn(str(path), argv)
 
     def test_interactive(self):

Modified: pypy/trunk/pypy/lang/smalltalk/test/test_miniimage.py
==============================================================================
--- pypy/trunk/pypy/lang/smalltalk/test/test_miniimage.py	(original)
+++ pypy/trunk/pypy/lang/smalltalk/test/test_miniimage.py	Wed Nov 11 18:54:49 2009
@@ -12,7 +12,7 @@
 
 def setup_module(module, filename='mini.image'):
     space = objspace.ObjSpace()
-    module.mini_image = py.magic.autopath().dirpath().dirpath().join(filename)
+    module.mini_image = py.path.local(__file__).dirpath().dirpath().join(filename)
     module.reader = open_miniimage(space)
     reader.initialize()
     module.image = squeakimage.SqueakImage()

Modified: pypy/trunk/pypy/lang/smalltalk/tool/analyseimage.py
==============================================================================
--- pypy/trunk/pypy/lang/smalltalk/tool/analyseimage.py	(original)
+++ pypy/trunk/pypy/lang/smalltalk/tool/analyseimage.py	Wed Nov 11 18:54:49 2009
@@ -6,7 +6,7 @@
 from pypy.lang.smalltalk import interpreter 
 import sys
 
-mini_image = py.magic.autopath().dirpath().dirpath().join('mini.image')
+mini_image = py.path.local(__file__).dirpath().dirpath().join('mini.image')
 
 def get_miniimage(space):
     return squeakimage.ImageReader(space, squeakimage.Stream(mini_image.open()))

Modified: pypy/trunk/pypy/lang/smalltalk/tool/autopath.py
==============================================================================
--- pypy/trunk/pypy/lang/smalltalk/tool/autopath.py	(original)
+++ pypy/trunk/pypy/lang/smalltalk/tool/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/lib/app_test/ctypes_tests/conftest.py
==============================================================================
--- pypy/trunk/pypy/lib/app_test/ctypes_tests/conftest.py	(original)
+++ pypy/trunk/pypy/lib/app_test/ctypes_tests/conftest.py	Wed Nov 11 18:54:49 2009
@@ -11,7 +11,7 @@
     from pypy.translator.platform import platform
     from pypy.translator.tool.cbuild import ExternalCompilationInfo
     udir = py.test.ensuretemp('_ctypes_test')
-    cfile = py.magic.autopath().dirpath().join("_ctypes_test.c")
+    cfile = py.path.local(__file__).dirpath().join("_ctypes_test.c")
 
     if sys.platform == 'win32':
         libraries = ['oleaut32']

Modified: pypy/trunk/pypy/lib/distributed/socklayer.py
==============================================================================
--- pypy/trunk/pypy/lib/distributed/socklayer.py	(original)
+++ pypy/trunk/pypy/lib/distributed/socklayer.py	Wed Nov 11 18:54:49 2009
@@ -1,7 +1,7 @@
 
 import py
 from socket import socket
-from py.__.green.msgstruct import decodemessage, message
+from py.impl.green.msgstruct import decodemessage, message
 from socket import socket, AF_INET, SOCK_STREAM
 import marshal
 import sys

Modified: pypy/trunk/pypy/lib/test2/autopath.py
==============================================================================
--- pypy/trunk/pypy/lib/test2/autopath.py	(original)
+++ pypy/trunk/pypy/lib/test2/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/module/__builtin__/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/module/__builtin__/test/autopath.py	(original)
+++ pypy/trunk/pypy/module/__builtin__/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/module/__builtin__/test/test_import.py
==============================================================================
--- pypy/trunk/pypy/module/__builtin__/test/test_import.py	(original)
+++ pypy/trunk/pypy/module/__builtin__/test/test_import.py	Wed Nov 11 18:54:49 2009
@@ -76,7 +76,7 @@
         code = py.code.Source(p.join("x.py").read()).compile()
         s3 = marshal.dumps(code)
         s2 = struct.pack("i", os.stat(str(p.join("x.py")))[stat.ST_MTIME])
-        p.join("x.pyc").write(imp.get_magic() + s2 + s3)
+        p.join("x.pyc").write(imp.get_magic() + s2 + s3, mode='wb')
     else:
         w = space.wrap
         w_modname = w("compiled.x")
@@ -92,7 +92,7 @@
             stream.close()
         if space.config.objspace.usepycfiles:
             # also create a lone .pyc file
-            p.join('lone.pyc').write(p.join('x.pyc').read())
+            p.join('lone.pyc').write(p.join('x.pyc').read(), mode='wb')
 
     return str(root)
 

Modified: pypy/trunk/pypy/module/_codecs/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/module/_codecs/test/autopath.py	(original)
+++ pypy/trunk/pypy/module/_codecs/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/module/_file/test/test_file_extra.py
==============================================================================
--- pypy/trunk/pypy/module/_file/test/test_file_extra.py	(original)
+++ pypy/trunk/pypy/module/_file/test/test_file_extra.py	Wed Nov 11 18:54:49 2009
@@ -19,7 +19,7 @@
 
 
 def setup_module(mod):
-    udir.join('sample').write(SAMPLE)
+    udir.join('sample').write(SAMPLE, 'wb')
 
 
 class BaseROTests:

Modified: pypy/trunk/pypy/module/_sre/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/module/_sre/test/autopath.py	(original)
+++ pypy/trunk/pypy/module/_sre/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/module/_sre/test/test_app_sre.py
==============================================================================
--- pypy/trunk/pypy/module/_sre/test/test_app_sre.py	(original)
+++ pypy/trunk/pypy/module/_sre/test/test_app_sre.py	Wed Nov 11 18:54:49 2009
@@ -3,7 +3,7 @@
 from py.test import raises, skip
 from pypy.interpreter.gateway import app2interp_temp
 from pypy.conftest import gettestobjspace, option
-from py.__.test.outcome import Skipped
+from py.impl.test.outcome import Skipped
 
 def init_globals_hack(space):
     space.appexec([space.wrap(autopath.this_dir)], """(this_dir):

Modified: pypy/trunk/pypy/module/bz2/test/test_bz2_compdecomp.py
==============================================================================
--- pypy/trunk/pypy/module/bz2/test/test_bz2_compdecomp.py	(original)
+++ pypy/trunk/pypy/module/bz2/test/test_bz2_compdecomp.py	Wed Nov 11 18:54:49 2009
@@ -25,7 +25,7 @@
 
     mod.TEXT = 'root:x:0:0:root:/root:/bin/bash\nbin:x:1:1:bin:/bin:\ndaemon:x:2:2:daemon:/sbin:\nadm:x:3:4:adm:/var/adm:\nlp:x:4:7:lp:/var/spool/lpd:\nsync:x:5:0:sync:/sbin:/bin/sync\nshutdown:x:6:0:shutdown:/sbin:/sbin/shutdown\nhalt:x:7:0:halt:/sbin:/sbin/halt\nmail:x:8:12:mail:/var/spool/mail:\nnews:x:9:13:news:/var/spool/news:\nuucp:x:10:14:uucp:/var/spool/uucp:\noperator:x:11:0:operator:/root:\ngames:x:12:100:games:/usr/games:\ngopher:x:13:30:gopher:/usr/lib/gopher-data:\nftp:x:14:50:FTP User:/var/ftp:/bin/bash\nnobody:x:65534:65534:Nobody:/home:\npostfix:x:100:101:postfix:/var/spool/postfix:\nniemeyer:x:500:500::/home/niemeyer:/bin/bash\npostgres:x:101:102:PostgreSQL Server:/var/lib/pgsql:/bin/bash\nmysql:x:102:103:MySQL server:/var/lib/mysql:/bin/bash\nwww:x:103:104::/var/www:/bin/false\n'
     mod.DATA = DATA
-    mod.BUGGY_DATA = py.magic.autopath().dirpath().join('data.bz2').read()
+    mod.BUGGY_DATA = py.path.local(__file__).dirpath().join('data.bz2').read()
     mod.decompress = decompress
 
 class AppTestBZ2Compressor(CheckAllocation):

Modified: pypy/trunk/pypy/module/bz2/test/test_bz2_file.py
==============================================================================
--- pypy/trunk/pypy/module/bz2/test/test_bz2_file.py	(original)
+++ pypy/trunk/pypy/module/bz2/test/test_bz2_file.py	Wed Nov 11 18:54:49 2009
@@ -15,12 +15,12 @@
     def create_temp_file(crlf=False):
         f = py.test.ensuretemp("bz2").join("foo")
         data = (DATA, DATA_CRLF)[crlf]
-        f.write(data)
+        f.write(data, 'wb')
 
     def create_broken_temp_file():
         f = py.test.ensuretemp("bz2").join("foo")
         data = DATA[:100]
-        f.write(data)
+        f.write(data, 'wb')
     
     def decompress(data):
         import popen2

Modified: pypy/trunk/pypy/module/pypyjit/test/conftest.py
==============================================================================
--- pypy/trunk/pypy/module/pypyjit/test/conftest.py	(original)
+++ pypy/trunk/pypy/module/pypyjit/test/conftest.py	Wed Nov 11 18:54:49 2009
@@ -1,5 +1,5 @@
 def pytest_addoption(parser):
-    group = parser.addgroup("pypyjit options")
+    group = parser.getgroup("pypyjit options")
     group.addoption("--pypy", action="store", default=None, dest="pypy_c",
                     help="the location of the JIT enabled pypy-c")
 

Modified: pypy/trunk/pypy/module/sys/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/module/sys/test/autopath.py	(original)
+++ pypy/trunk/pypy/module/sys/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/objspace/std/test/test_complexobject.py
==============================================================================
--- pypy/trunk/pypy/objspace/std/test/test_complexobject.py	(original)
+++ pypy/trunk/pypy/objspace/std/test/test_complexobject.py	Wed Nov 11 18:54:49 2009
@@ -67,7 +67,7 @@
                 sys.path.append(%r)
                 import helper
                 return helper
-        """ % (str(py.magic.autopath().dirpath())))
+        """ % (str(py.path.local(__file__).dirpath())))
 
     def test_div(self):
         h = self.helper

Modified: pypy/trunk/pypy/rlib/parsing/ebnfparse.py
==============================================================================
--- pypy/trunk/pypy/rlib/parsing/ebnfparse.py	(original)
+++ pypy/trunk/pypy/rlib/parsing/ebnfparse.py	Wed Nov 11 18:54:49 2009
@@ -2125,7 +2125,7 @@
 # generated code between this line and its other occurence
 
 if __name__ == '__main__':
-    f = py.magic.autopath()
+    f = py.path.local(__file__)
     oldcontent = f.read()
     s = "# GENERATED CODE BETWEEN THIS LINE AND ITS OTHER OCCURENCE\n".lower()
     pre, gen, after = oldcontent.split(s)

Modified: pypy/trunk/pypy/rlib/parsing/makepackrat.py
==============================================================================
--- pypy/trunk/pypy/rlib/parsing/makepackrat.py	(original)
+++ pypy/trunk/pypy/rlib/parsing/makepackrat.py	Wed Nov 11 18:54:49 2009
@@ -718,7 +718,7 @@
 
 
 def test_generate():
-    f = py.magic.autopath().dirpath().join("pypackrat.py")
+    f = py.path.local(__file__).dirpath().join("pypackrat.py")
     from pypackrat import PyPackratSyntaxParser
     p = PyPackratSyntaxParser(syntax)
     t = p.file()

Modified: pypy/trunk/pypy/rlib/parsing/regexparse.py
==============================================================================
--- pypy/trunk/pypy/rlib/parsing/regexparse.py	(original)
+++ pypy/trunk/pypy/rlib/parsing/regexparse.py	Wed Nov 11 18:54:49 2009
@@ -1966,7 +1966,7 @@
 
 
 def test_generate():
-    f = py.magic.autopath()
+    f = py.path.local(__file__)
     oldcontent = f.read()
     s = "# GENERATED CODE BETWEEN THIS LINE AND ITS OTHER OCCURENCE\n".lower()
     pre, gen, after = oldcontent.split(s)

Modified: pypy/trunk/pypy/rlib/parsing/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/rlib/parsing/test/autopath.py	(original)
+++ pypy/trunk/pypy/rlib/parsing/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/rlib/parsing/test/test_pythonlexer.py
==============================================================================
--- pypy/trunk/pypy/rlib/parsing/test/test_pythonlexer.py	(original)
+++ pypy/trunk/pypy/rlib/parsing/test/test_pythonlexer.py	Wed Nov 11 18:54:49 2009
@@ -230,7 +230,7 @@
         assert tokens[i * 3].name == 'String'
 
 def test_self():
-    s = py.magic.autopath().read()
+    s = py.path.local(__file__).read()
     tokens = pythonlexer.tokenize(s)
     print tokens
 
@@ -263,7 +263,7 @@
         "op": "operator",
     }
     import tokenize, token
-    s = py.magic.autopath().read()
+    s = py.path.local(__file__).read()
     tokens = pythonlex(s)
     print [t.name for t in tokens][:20]
     tokens2 = list(tokenize.generate_tokens(iter(s.splitlines(True)).next))

Modified: pypy/trunk/pypy/rlib/parsing/test/test_pythonparse.py
==============================================================================
--- pypy/trunk/pypy/rlib/parsing/test/test_pythonparse.py	(original)
+++ pypy/trunk/pypy/rlib/parsing/test/test_pythonparse.py	Wed Nov 11 18:54:49 2009
@@ -7,7 +7,7 @@
 from pypy.rlib.parsing.parsing import PackratParser, Symbol, ParseError, Rule
 from pypy.rlib.parsing.ebnfparse import parse_ebnf, make_parse_function
 
-grammar = py.magic.autopath().dirpath().join("pygrammar.txt").read(mode='rt')
+grammar = py.path.local(__file__).dirpath().join("pygrammar.txt").read(mode='rt')
 
 
 def test_parse_grammar():
@@ -240,12 +240,12 @@
         t = self.ToAST.transform(t)
 
     def test_parse_this(self):
-        s = py.magic.autopath().read()
+        s = py.path.local(__file__).read()
         t = self.parse(s)
         t = self.ToAST.transform(t)
 
     def test_parsing(self):
-        s = py.magic.autopath().dirpath().dirpath().join("parsing.py").read()
+        s = py.path.local(__file__).dirpath().dirpath().join("parsing.py").read()
         t = self.parse(s)
         t = self.ToAST.transform(t)
 

Modified: pypy/trunk/pypy/rlib/rsdl/eci.py
==============================================================================
--- pypy/trunk/pypy/rlib/rsdl/eci.py	(original)
+++ pypy/trunk/pypy/rlib/rsdl/eci.py	Wed Nov 11 18:54:49 2009
@@ -9,7 +9,7 @@
             includes = ['SDL.h'],
             include_dirs = ['/Library/Frameworks/SDL.framework/Headers'],
             link_files = [
-                str(py.magic.autopath().dirpath().join('macosx-sdl-main/SDLMain.m')),
+                str(py.path.local(__file__).dirpath().join('macosx-sdl-main/SDLMain.m')),
             ],
             frameworks = ['SDL', 'Cocoa']
         )

Modified: pypy/trunk/pypy/rlib/rsdl/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/rlib/rsdl/test/autopath.py	(original)
+++ pypy/trunk/pypy/rlib/rsdl/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/rlib/test/test_listsort.py
==============================================================================
--- pypy/trunk/pypy/rlib/test/test_listsort.py	(original)
+++ pypy/trunk/pypy/rlib/test/test_listsort.py	Wed Nov 11 18:54:49 2009
@@ -35,7 +35,7 @@
         sorttest(lst1)
 
 def test_file():
-    for fn in py.magic.autopath().dirpath().listdir():
+    for fn in py.path.local(__file__).dirpath().listdir():
         if fn.ext == '.py': 
             lines1 = fn.readlines()
             sorttest(lines1)

Modified: pypy/trunk/pypy/rpython/microbench/autopath.py
==============================================================================
--- pypy/trunk/pypy/rpython/microbench/autopath.py	(original)
+++ pypy/trunk/pypy/rpython/microbench/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/rpython/module/test/test_ll_os_path.py
==============================================================================
--- pypy/trunk/pypy/rpython/module/test/test_ll_os_path.py	(original)
+++ pypy/trunk/pypy/rpython/module/test/test_ll_os_path.py	Wed Nov 11 18:54:49 2009
@@ -11,7 +11,7 @@
 from pypy.tool.udir import udir
 
 def test_exists():
-    filename = impl.to_rstr(str(py.magic.autopath()))
+    filename = impl.to_rstr(str(py.path.local(__file__)))
     assert impl.ll_os_path_exists(filename) == True
     assert not impl.ll_os_path_exists(impl.to_rstr(
         "strange_filename_that_looks_improbable.sde"))

Modified: pypy/trunk/pypy/rpython/test/test_rbuiltin.py
==============================================================================
--- pypy/trunk/pypy/rpython/test/test_rbuiltin.py	(original)
+++ pypy/trunk/pypy/rpython/test/test_rbuiltin.py	Wed Nov 11 18:54:49 2009
@@ -294,7 +294,7 @@
         def f(fn):
             fn = hlstr(fn)
             return os.path.exists(fn)
-        filename = self.string_to_ll(str(py.magic.autopath()))
+        filename = self.string_to_ll(str(py.path.local(__file__)))
         assert self.interpret(f, [filename]) == True
         #assert self.interpret(f, [
         #    self.string_to_ll("strange_filename_that_looks_improbable.sde")]) == False
@@ -308,7 +308,7 @@
             fn = hlstr(fn)
             return os.path.isdir(fn)
         assert self.interpret(f, [self.string_to_ll("/")]) == True
-        assert self.interpret(f, [self.string_to_ll(str(py.magic.autopath()))]) == False
+        assert self.interpret(f, [self.string_to_ll(str(py.path.local(__file__)))]) == False
         assert self.interpret(f, [self.string_to_ll("another/unlikely/directory/name")]) == False
 
     def test_pbc_isTrue(self):

Modified: pypy/trunk/pypy/test_all.py
==============================================================================
--- pypy/trunk/pypy/test_all.py	(original)
+++ pypy/trunk/pypy/test_all.py	Wed Nov 11 18:54:49 2009
@@ -1,6 +1,6 @@
 #! /usr/bin/env python
 
-if __name__ == '__main__': 
+if __name__ == '__main__':
     import tool.autopath
-    import py 
-    py.test.cmdline.main() 
+    import py
+    py.cmdline.pytest()

Modified: pypy/trunk/pypy/tool/algo/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/tool/algo/test/autopath.py	(original)
+++ pypy/trunk/pypy/tool/algo/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/tool/ansi_mandelbrot.py
==============================================================================
--- pypy/trunk/pypy/tool/ansi_mandelbrot.py	(original)
+++ pypy/trunk/pypy/tool/ansi_mandelbrot.py	Wed Nov 11 18:54:49 2009
@@ -1,6 +1,6 @@
 import sys
 
-from py.__.io.terminalwriter import ansi_print, get_terminal_width
+from py.impl.io.terminalwriter import ansi_print, get_terminal_width
 
 """
 Black       0;30     Dark Gray     1;30

Modified: pypy/trunk/pypy/tool/ansi_print.py
==============================================================================
--- pypy/trunk/pypy/tool/ansi_print.py	(original)
+++ pypy/trunk/pypy/tool/ansi_print.py	Wed Nov 11 18:54:49 2009
@@ -4,7 +4,7 @@
 
 import sys
 
-from py.__.io.terminalwriter import ansi_print
+from py.impl.io.terminalwriter import ansi_print
 from pypy.tool.ansi_mandelbrot import Driver
 
 class AnsiLog:

Modified: pypy/trunk/pypy/tool/autopath.py
==============================================================================
--- pypy/trunk/pypy/tool/autopath.py	(original)
+++ pypy/trunk/pypy/tool/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/tool/bench/pypyresult.py
==============================================================================
--- pypy/trunk/pypy/tool/bench/pypyresult.py	(original)
+++ pypy/trunk/pypy/tool/bench/pypyresult.py	Wed Nov 11 18:54:49 2009
@@ -52,7 +52,7 @@
             
 
 if __name__ == "__main__":
-    x = py.magic.autopath().dirpath("bench-unix.benchmark_result")
+    x = py.path.local(__file__).dirpath("bench-unix.benchmark_result")
     db = ResultDB()
     db.parsepickle(x)
     

Added: pypy/trunk/pypy/tool/difftime.py
==============================================================================
--- (empty file)
+++ pypy/trunk/pypy/tool/difftime.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,32 @@
+import py 
+
+_time_desc = {
+         1 : 'second', 60 : 'minute', 3600 : 'hour', 86400 : 'day',
+         2628000 : 'month', 31536000 : 'year', }
+
+def worded_diff_time(ctime):
+    difftime = py.std.time.time() - ctime
+    keys = _time_desc.keys()
+    keys.sort()
+    for i, key in py.builtin.enumerate(keys):
+        if key >=difftime:
+            break
+    l = []
+    keylist = keys[:i]
+
+    keylist.reverse()
+    for key in keylist[:1]:
+        div = int(difftime / key)
+        if div==0:
+            break
+        difftime -= div * key
+        plural = div > 1 and 's' or ''
+        l.append('%d %s%s' %(div, _time_desc[key], plural))
+    return ", ".join(l) + " ago "
+
+_months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+           'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
+
+def worded_time(ctime):
+    tm = py.std.time.gmtime(ctime)
+    return "%s %d, %d" % (_months[tm.tm_mon-1], tm.tm_mday, tm.tm_year)

Modified: pypy/trunk/pypy/tool/genstatistic.py
==============================================================================
--- pypy/trunk/pypy/tool/genstatistic.py	(original)
+++ pypy/trunk/pypy/tool/genstatistic.py	Wed Nov 11 18:54:49 2009
@@ -1,7 +1,7 @@
 
 import autopath
 import py
-from py.__.misc.cmdline import countloc 
+from py.impl.misc.cmdline import countloc 
 from py.xml import raw
 
 pypydir = py.path.local(autopath.pypydir)

Modified: pypy/trunk/pypy/tool/option.py
==============================================================================
--- pypy/trunk/pypy/tool/option.py	(original)
+++ pypy/trunk/pypy/tool/option.py	Wed Nov 11 18:54:49 2009
@@ -3,7 +3,7 @@
 import os
 from pypy.config.pypyoption import get_pypy_config
 from pypy.config.config import Config, OptionDescription, to_optparse
-from py.compat import optparse
+import optparse
 
 extra_useage = """For detailed descriptions of all the options see
 http://codespeak.net/pypy/dist/pypy/doc/config/commandline.html"""

Modified: pypy/trunk/pypy/tool/pytest/appsupport.py
==============================================================================
--- pypy/trunk/pypy/tool/pytest/appsupport.py	(original)
+++ pypy/trunk/pypy/tool/pytest/appsupport.py	Wed Nov 11 18:54:49 2009
@@ -1,9 +1,10 @@
 import autopath
 import py
-from py.__.magic import exprinfo
+import py.impl.code.assertion
+from py.impl.code import _assertionold as exprinfo
 from pypy.interpreter import gateway
 from pypy.interpreter.error import OperationError
-from py.__.test.outcome import ExceptionFailure
+from py.impl.test.outcome import ExceptionFailure
 
 # ____________________________________________________________
 

Modified: pypy/trunk/pypy/tool/pytest/autopath.py
==============================================================================
--- pypy/trunk/pypy/tool/pytest/autopath.py	(original)
+++ pypy/trunk/pypy/tool/pytest/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/tool/pytest/genreportdata.py
==============================================================================
--- pypy/trunk/pypy/tool/pytest/genreportdata.py	(original)
+++ pypy/trunk/pypy/tool/pytest/genreportdata.py	Wed Nov 11 18:54:49 2009
@@ -3,7 +3,7 @@
 import py
 import sys
         
-mydir = py.magic.autopath().dirpath().realpath()
+mydir = py.path.local(__file__).dirpath().realpath()
 from pypy.tool.pytest import htmlreport 
 from pypy.tool.pytest import confpath 
 

Modified: pypy/trunk/pypy/tool/pytest/htmlreport.py
==============================================================================
--- pypy/trunk/pypy/tool/pytest/htmlreport.py	(original)
+++ pypy/trunk/pypy/tool/pytest/htmlreport.py	Wed Nov 11 18:54:49 2009
@@ -232,7 +232,7 @@
         t = self.rep.render_latest_table(self.rep.results)
         assert unicode(t)
 
-mydir = py.magic.autopath().dirpath()
+mydir = py.path.local(__file__).dirpath()
 
 def getpicklepath(): 
     return mydir.join('.htmlreport.pickle')

Modified: pypy/trunk/pypy/tool/pytest/test/test_new_count.py
==============================================================================
--- pypy/trunk/pypy/tool/pytest/test/test_new_count.py	(original)
+++ pypy/trunk/pypy/tool/pytest/test/test_new_count.py	Wed Nov 11 18:54:49 2009
@@ -2,7 +2,7 @@
 import py
 #from pypy.tool.pytest.confpath import testresultdir
 from pypy.tool.pytest.result import ResultFromMime
-testpath = py.magic.autopath().dirpath('data')
+testpath = py.path.local(__file__).dirpath('data')
 
 class TestResultCache:
 

Added: pypy/trunk/pypy/tool/rest/__init__.py
==============================================================================

Added: pypy/trunk/pypy/tool/rest/convert.py
==============================================================================
--- (empty file)
+++ pypy/trunk/pypy/tool/rest/convert.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,163 @@
+import py
+
+ExecutionFailed = py.process.cmdexec.Error
+# utility functions to convert between various formats
+
+format_to_dotargument = {"png": "png",
+                         "eps": "ps",
+                         "ps":  "ps",
+                         "pdf": "ps",
+                        }
+
+def ps2eps(ps):
+    # XXX write a pure python version
+    if not py.path.local.sysfind("ps2epsi") and \
+           not py.path.local.sysfind("ps2eps"):
+        raise SystemExit("neither ps2eps nor ps2epsi found")
+    try:
+        eps = ps.new(ext=".eps")
+        py.process.cmdexec('ps2epsi "%s" "%s"' % (ps, eps))
+    except ExecutionFailed:
+        py.process.cmdexec('ps2eps -l -f "%s"' % ps)
+
+def ps2pdf(ps, compat_level="1.2"):
+    if not py.path.local.sysfind("gs"):
+        raise SystemExit("ERROR: gs not found")
+    pdf = ps.new(ext=".pdf")
+    options = dict(OPTIONS="-dSAFER -dCompatibilityLevel=%s" % compat_level,
+                   infile=ps, outfile=pdf)
+    cmd = ('gs %(OPTIONS)s -q -dNOPAUSE -dBATCH -sDEVICE=pdfwrite '
+           '"-sOutputFile=%(outfile)s" %(OPTIONS)s -c .setpdfwrite '
+           '-f "%(infile)s"') % options
+    py.process.cmdexec(cmd)
+    return pdf
+
+def eps2pdf(eps):
+    # XXX write a pure python version
+    if not py.path.local.sysfind("epstopdf"):
+        raise SystemExit("ERROR: epstopdf not found")
+    py.process.cmdexec('epstopdf "%s"' % eps)
+
+def dvi2eps(dvi, dest=None):
+    if dest is None:
+        dest = eps.new(ext=".eps")
+    command = 'dvips -q -E -n 1 -D 600 -p 1 -o "%s" "%s"' % (dest, dvi)
+    if not py.path.local.sysfind("dvips"):
+        raise SystemExit("ERROR: dvips not found")
+    py.process.cmdexec(command)
+
+def convert_dot(fn, new_extension):
+    if not py.path.local.sysfind("dot"):
+        raise SystemExit("ERROR: dot not found")
+    result = fn.new(ext=new_extension)
+    print result
+    arg = "-T%s" % (format_to_dotargument[new_extension], )
+    py.std.os.system('dot "%s" "%s" > "%s"' % (arg, fn, result))
+    if new_extension == "eps":
+        ps = result.new(ext="ps")
+        result.move(ps)
+        ps2eps(ps)
+        ps.remove()
+    elif new_extension == "pdf":
+        # convert to eps file first, to get the bounding box right
+        eps = result.new(ext="eps")
+        ps = result.new(ext="ps")
+        result.move(ps)
+        ps2eps(ps)
+        eps2pdf(eps)
+        ps.remove()
+        eps.remove()
+    return result
+ 
+
+class latexformula2png(object):
+    def __init__(self, formula, dest, temp=None):
+        self.formula = formula
+        try:
+            import Image
+            self.Image = Image
+            self.scale = 2 # create a larger image
+            self.upscale = 5 # create the image upscale times larger, then scale it down
+        except ImportError:
+            self.scale = 2
+            self.upscale = 1
+            self.Image = None
+        self.output_format = ('pngmono', 'pnggray', 'pngalpha')[2]
+        if temp is None:
+            temp = py.test.ensuretemp("latexformula")
+        self.temp = temp
+        self.latex = self.temp.join('formula.tex')
+        self.dvi = self.temp.join('formula.dvi')
+        self.eps = self.temp.join('formula.eps')
+        self.png = self.temp.join('formula.png')
+        self.saveas(dest)
+
+    def saveas(self, dest):
+        self.gen_latex()
+        self.gen_dvi()
+        dvi2eps(self.dvi, self.eps)
+        self.gen_png()
+        self.scale_image()
+        self.png.copy(dest)
+
+    def gen_latex(self):
+        self.latex.write ("""
+        \\documentclass{article}
+        \\pagestyle{empty}
+        \\begin{document}
+
+        %s
+        \\pagebreak
+        
+        \\end{document}
+        """ % (self.formula))
+
+    def gen_dvi(self):
+        origdir = py.path.local()
+        self.temp.chdir()
+        py.process.cmdexec('latex "%s"' % (self.latex))
+        origdir.chdir()
+
+    def gen_png(self):
+        tempdir = py.path.local.mkdtemp()
+        
+        re_bbox = py.std.re.compile('%%BoundingBox:\s*(\d+) (\d+) (\d+) (\d+)')
+        eps = self.eps.read()
+        x1, y1, x2, y2 = [int(i) for i in re_bbox.search(eps).groups()]
+        X = x2 - x1 + 2
+        Y = y2 - y1 + 2
+        mx = -x1
+        my = -y1
+        ps = self.temp.join('temp.ps')
+        source = self.eps
+        ps.write("""
+        1 1 1 setrgbcolor
+        newpath
+        -1 -1 moveto
+        %(X)d  -1 lineto
+        %(X)d %(Y)d lineto
+        -1 %(Y)d lineto
+        closepath
+        fill
+        %(mx)d %(my)d translate
+        0 0 0 setrgbcolor
+        (%(source)s) run
+        
+        """ % locals())
+
+        sx = int((x2 - x1) * self.scale * self.upscale)
+        sy = int((y2 - y1) * self.scale * self.upscale)
+        res = 72 * self.scale * self.upscale
+        command = ('gs -q -g%dx%d -r%dx%d -sDEVICE=%s -sOutputFile="%s" '
+                   '-dNOPAUSE -dBATCH "%s"') % (
+                    sx, sy, res, res, self.output_format, self.png, ps)
+        py.process.cmdexec(command)
+
+    def scale_image(self):
+        if self.Image is None:
+            return
+        image = self.Image.open(str(self.png))
+        image.resize((image.size[0] / self.upscale,
+                      image.size[1] / self.upscale),
+                     self.Image.ANTIALIAS).save(str(self.png))
+

Added: pypy/trunk/pypy/tool/rest/directive.py
==============================================================================
--- (empty file)
+++ pypy/trunk/pypy/tool/rest/directive.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,115 @@
+# XXX this file is messy since it tries to deal with several docutils versions
+import py
+
+from pypy.tool.rest.convert import convert_dot, latexformula2png
+
+import sys
+import docutils
+from docutils import nodes
+from docutils.parsers.rst import directives, states, roles
+from docutils.parsers.rst.directives import images
+
+if hasattr(images, "image"):
+    directives_are_functions = True
+else:
+    directives_are_functions = False
+
+try:
+    from docutils.utils import unescape # docutils version > 0.3.5
+except ImportError:
+    from docutils.parsers.rst.states import unescape # docutils 0.3.5
+
+if not directives_are_functions:
+    ImageClass = images.Image
+
+else:
+    class ImageClass(object):
+        option_spec = images.image.options
+        def run(self):
+            return images.image(u'image',
+                                self.arguments,
+                                self.options,
+                                self.content,
+                                self.lineno,
+                                self.content_offset,
+                                self.block_text,
+                                self.state,
+                                self.state_machine)
+
+
+backend_to_image_format = {"html": "png", "latex": "pdf"}
+
+class GraphvizDirective(ImageClass):
+    def convert(self, fn, path):
+        path = py.path.local(path).dirpath()
+        dot = path.join(fn)
+        result = convert_dot(dot, backend_to_image_format[_backend])
+        return result.relto(path)
+
+    def run(self):
+        newname = self.convert(self.arguments[0],
+                               self.state.document.settings._source)
+        text = self.block_text.replace("graphviz", "image", 1)
+        self.block_text = text.replace(self.arguments[0], newname, 1)
+        self.name = u'image'
+        self.arguments = [newname]
+        return ImageClass.run(self)
+    
+    def old_interface(self):
+        def f(name, arguments, options, content, lineno,
+              content_offset, block_text, state, state_machine):
+            for arg in "name arguments options content lineno " \
+                       "content_offset block_text state state_machine".split():
+                setattr(self, arg, locals()[arg])
+            return self.run()
+        f.arguments = (1, 0, 1)
+        f.options = self.option_spec
+        return f
+
+
+_backend = None
+def set_backend_and_register_directives(backend):
+    #XXX this is only used to work around the inflexibility of docutils:
+    # a directive does not know the target format
+    global _backend
+    _backend = backend
+    if not directives_are_functions:
+        directives.register_directive("graphviz", GraphvizDirective)
+    else:
+        directives.register_directive("graphviz",
+                                      GraphvizDirective().old_interface())
+    roles.register_canonical_role("latexformula", latexformula_role)
+
+def latexformula_role(name, rawtext, text, lineno, inliner,
+                      options={}, content=[]):
+    if _backend == 'latex':
+        options['format'] = 'latex'
+        return roles.raw_role(name, rawtext, text, lineno, inliner,
+                              options, content)
+    else:
+        # XXX: make the place of the image directory configurable
+        sourcedir = py.path.local(inliner.document.settings._source).dirpath()
+        imagedir = sourcedir.join("img")
+        if not imagedir.check():
+            imagedir.mkdir()
+        # create halfway senseful imagename:
+        # use hash of formula + alphanumeric characters of it
+        # could
+        imagename = "%s_%s.png" % (
+            hash(text), "".join([c for c in text if c.isalnum()]))
+        image = imagedir.join(imagename)
+        latexformula2png(unescape(text, True), image)
+        imagenode = nodes.image(image.relto(sourcedir), uri=image.relto(sourcedir))
+        return [imagenode], []
+latexformula_role.content = True
+latexformula_role.options = {}
+
+def register_linkrole(role_name, callback):
+    def source_role(name, rawtext, text, lineno, inliner, options={},
+                    content=[]):
+        text, target = callback(name, text)
+        reference_node = nodes.reference(rawtext, text, name=text, refuri=target)
+        return [reference_node], []
+    source_role.content = True
+    source_role.options = {}
+    roles.register_canonical_role(role_name, source_role)

Added: pypy/trunk/pypy/tool/rest/rest.py
==============================================================================
--- (empty file)
+++ pypy/trunk/pypy/tool/rest/rest.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,81 @@
+import py
+import sys, os, traceback
+import re
+
+if hasattr(sys.stdout, 'fileno') and os.isatty(sys.stdout.fileno()):
+    def log(msg):
+        print msg 
+else:
+    def log(msg):
+        pass
+
+def convert_rest_html(source, source_path, stylesheet=None, encoding='latin1'):
+    from pypy.tool.rest import directive
+    """ return html latin1-encoded document for the given input. 
+        source  a ReST-string
+        sourcepath where to look for includes (basically)
+        stylesheet path (to be used if any)
+    """
+    from docutils.core import publish_string
+    directive.set_backend_and_register_directives("html")
+    kwargs = {
+        'stylesheet' : stylesheet, 
+        'stylesheet_path': None,
+        'traceback' : 1, 
+        'embed_stylesheet': 0,
+        'output_encoding' : encoding, 
+        #'halt' : 0, # 'info',
+        'halt_level' : 2, 
+    }
+    # docutils uses os.getcwd() :-(
+    source_path = os.path.abspath(str(source_path))
+    prevdir = os.getcwd()
+    try:
+        #os.chdir(os.path.dirname(source_path))
+        return publish_string(source, source_path, writer_name='html',
+                              settings_overrides=kwargs)
+    finally:
+        os.chdir(prevdir)
+
+def process(txtpath, encoding='latin1'):
+    """ process a textfile """
+    log("processing %s" % txtpath)
+    assert txtpath.check(ext='.txt')
+    if isinstance(txtpath, py.path.svnwc):
+        txtpath = txtpath.localpath
+    htmlpath = txtpath.new(ext='.html')
+    #svninfopath = txtpath.localpath.new(ext='.svninfo')
+
+    style = txtpath.dirpath('style.css')
+    if style.check():
+        stylesheet = style.basename
+    else:
+        stylesheet = None
+    content = unicode(txtpath.read(), encoding)
+    doc = convert_rest_html(content, txtpath, stylesheet=stylesheet, encoding=encoding)
+    htmlpath.write(doc)
+    #log("wrote %r" % htmlpath)
+    #if txtpath.check(svnwc=1, versioned=1): 
+    #    info = txtpath.info()
+    #    svninfopath.dump(info) 
+
+rex1 = re.compile(ur'.*<body>(.*)</body>.*', re.MULTILINE | re.DOTALL)
+rex2 = re.compile(ur'.*<div class="document">(.*)</div>.*', re.MULTILINE | re.DOTALL)
+
+def strip_html_header(string, encoding='utf8'):
+    """ return the content of the body-tag """ 
+    uni = unicode(string, encoding)
+    for rex in rex1,rex2: 
+        match = rex.search(uni) 
+        if not match: 
+            break 
+        uni = match.group(1) 
+    return uni 
+
+class Project: # used for confrest.py files 
+    def __init__(self, sourcepath):
+        self.sourcepath = sourcepath
+    def process(self, path):
+        return process(path)
+    def get_htmloutputpath(self, path):
+        return path.new(ext='html')

Added: pypy/trunk/pypy/tool/rest/rst.py
==============================================================================
--- (empty file)
+++ pypy/trunk/pypy/tool/rest/rst.py	Wed Nov 11 18:54:49 2009
@@ -0,0 +1,417 @@
+
+""" reStructuredText generation tools
+
+    provides an api to build a tree from nodes, which can be converted to
+    ReStructuredText on demand
+
+    note that not all of ReST is supported, a usable subset is offered, but
+    certain features aren't supported, and also certain details (like how links
+    are generated, or how escaping is done) can not be controlled
+"""
+
+from __future__ import generators
+
+import py
+
+def escape(txt):
+    """escape ReST markup"""
+    if not isinstance(txt, str) and not isinstance(txt, unicode):
+        txt = str(txt)
+    # XXX this takes a very naive approach to escaping, but it seems to be
+    # sufficient...
+    for c in '\\*`|:_':
+        txt = txt.replace(c, '\\%s' % (c,))
+    return txt
+
+class RestError(Exception):
+    """ raised on containment errors (wrong parent) """
+
+class AbstractMetaclass(type):
+    def __new__(cls, *args):
+        obj = super(AbstractMetaclass, cls).__new__(cls, *args)
+        parent_cls = obj.parentclass
+        if parent_cls is None:
+            return obj
+        if not isinstance(parent_cls, list):
+            class_list = [parent_cls]
+        else:
+            class_list = parent_cls
+        if obj.allow_nesting:
+            class_list.append(obj)
+        
+        for _class in class_list:
+            if not _class.allowed_child:
+                _class.allowed_child = {obj:True}
+            else:
+                _class.allowed_child[obj] = True
+        return obj
+
+class AbstractNode(object):
+    """ Base class implementing rest generation
+    """
+    sep = ''
+    __metaclass__ = AbstractMetaclass
+    parentclass = None # this exists to allow parent to know what
+        # children can exist
+    allow_nesting = False
+    allowed_child = {}
+    defaults = {}
+    
+    _reg_whitespace = py.std.re.compile('\s+')
+
+    def __init__(self, *args, **kwargs):
+        self.parent = None
+        self.children = []
+        for child in args:
+            self._add(child)
+        for arg in kwargs:
+            setattr(self, arg, kwargs[arg])
+    
+    def join(self, *children):
+        """ add child nodes
+        
+            returns a reference to self
+        """
+        for child in children:
+            self._add(child)
+        return self
+    
+    def add(self, child):
+        """ adds a child node
+            
+            returns a reference to the child
+        """
+        self._add(child)
+        return child
+        
+    def _add(self, child):
+        if child.__class__ not in self.allowed_child:
+            raise RestError("%r cannot be child of %r" % \
+                (child.__class__, self.__class__))
+        self.children.append(child)
+        child.parent = self
+    
+    def __getitem__(self, item):
+        return self.children[item]
+    
+    def __setitem__(self, item, value):
+        self.children[item] = value
+
+    def text(self):
+        """ return a ReST string representation of the node """
+        return self.sep.join([child.text() for child in self.children])
+    
+    def wordlist(self):
+        """ return a list of ReST strings for this node and its children """ 
+        return [self.text()]
+
+class Rest(AbstractNode):
+    """ Root node of a document """
+    
+    sep = "\n\n"
+    def __init__(self, *args, **kwargs):
+        AbstractNode.__init__(self, *args, **kwargs)
+        self.links = {}
+    
+    def render_links(self, check=False):
+        """render the link attachments of the document"""
+        assert not check, "Link checking not implemented"
+        if not self.links:
+            return ""
+        link_texts = []
+        # XXX this could check for duplicates and remove them...
+        for link, target in self.links.iteritems():
+            link_texts.append(".. _`%s`: %s" % (escape(link), target))
+        return "\n" + "\n".join(link_texts) + "\n\n"
+
+    def text(self):
+        outcome = []
+        if (isinstance(self.children[0], Transition) or
+                isinstance(self.children[-1], Transition)):
+            raise ValueError, ('document must not begin or end with a '
+                               'transition')
+        for child in self.children:
+            outcome.append(child.text())
+        
+        # always a trailing newline
+        text = self.sep.join([i for i in outcome if i]) + "\n"
+        return text + self.render_links()
+
+class Transition(AbstractNode):
+    """ a horizontal line """
+    parentclass = Rest
+
+    def __init__(self, char='-', width=80, *args, **kwargs):
+        self.char = char
+        self.width = width
+        super(Transition, self).__init__(*args, **kwargs)
+        
+    def text(self):
+        return (self.width - 1) * self.char
+
+class Paragraph(AbstractNode):
+    """ simple paragraph """
+
+    parentclass = Rest
+    sep = " "
+    indent = ""
+    width = 80
+    
+    def __init__(self, *args, **kwargs):
+        # make shortcut
+        args = list(args)
+        for num, arg in py.builtin.enumerate(args):
+            if isinstance(arg, str):
+                args[num] = Text(arg)
+        super(Paragraph, self).__init__(*args, **kwargs)
+    
+    def text(self):
+        texts = []
+        for child in self.children:
+            texts += child.wordlist()
+        
+        buf = []
+        outcome = []
+        lgt = len(self.indent)
+        
+        def grab(buf):
+            outcome.append(self.indent + self.sep.join(buf))
+        
+        texts.reverse()
+        while texts:
+            next = texts[-1]
+            if not next:
+                texts.pop()
+                continue
+            if lgt + len(self.sep) + len(next) <= self.width or not buf:
+                buf.append(next)
+                lgt += len(next) + len(self.sep)
+                texts.pop()
+            else:
+                grab(buf)
+                lgt = len(self.indent)
+                buf = []
+        grab(buf)
+        return "\n".join(outcome)
+    
+class SubParagraph(Paragraph):
+    """ indented sub paragraph """
+
+    indent = " "
+    
+class Title(Paragraph):
+    """ title element """
+
+    parentclass = Rest
+    belowchar = "="
+    abovechar = ""
+    
+    def text(self):
+        txt = self._get_text()
+        lines = []
+        if self.abovechar:
+            lines.append(self.abovechar * len(txt))
+        lines.append(txt)
+        if self.belowchar:
+            lines.append(self.belowchar * len(txt))
+        return "\n".join(lines)
+
+    def _get_text(self):
+        txt = []
+        for node in self.children:
+            txt += node.wordlist()
+        return ' '.join(txt)
+
+class AbstractText(AbstractNode):
+    parentclass = [Paragraph, Title]
+    start = ""
+    end = ""
+    def __init__(self, _text):
+        self._text = _text
+    
+    def text(self):
+        text = self.escape(self._text)
+        return self.start + text + self.end
+
+    def escape(self, text):
+        if not isinstance(text, str) and not isinstance(text, unicode):
+            text = str(text)
+        if self.start:
+            text = text.replace(self.start, '\\%s' % (self.start,))
+        if self.end and self.end != self.start:
+            text = text.replace(self.end, '\\%s' % (self.end,))
+        return text
+    
+class Text(AbstractText):
+    def wordlist(self):
+        text = escape(self._text)
+        return self._reg_whitespace.split(text)
+
+class LiteralBlock(AbstractText):
+    parentclass = Rest
+    start = '::\n\n'
+
+    def text(self):
+        if not self._text.strip():
+            return ''
+        text = self.escape(self._text).split('\n')
+        for i, line in py.builtin.enumerate(text):
+            if line.strip():
+                text[i] = '  %s' % (line,)
+        return self.start + '\n'.join(text)
+
+class Em(AbstractText):
+    start = "*"
+    end = "*"
+
+class Strong(AbstractText):
+    start = "**"
+    end = "**"
+
+class Quote(AbstractText):
+    start = '``'
+    end = '``'
+
+class Anchor(AbstractText):
+    start = '_`'
+    end = '`'
+
+class Footnote(AbstractText):
+    def __init__(self, note, symbol=False):
+        raise NotImplemented('XXX')
+
+class Citation(AbstractText):
+    def __init__(self, text, cite):
+        raise NotImplemented('XXX')
+
+class ListItem(Paragraph):
+    allow_nesting = True
+    item_chars = '*+-'
+    
+    def text(self):
+        idepth = self.get_indent_depth()
+        indent = self.indent + (idepth + 1) * '  '
+        txt = '\n\n'.join(self.render_children(indent))
+        ret = []
+        item_char = self.item_chars[idepth]
+        ret += [indent[len(item_char)+1:], item_char, ' ', txt[len(indent):]]
+        return ''.join(ret)
+    
+    def render_children(self, indent):
+        txt = []
+        buffer = []
+        def render_buffer(fro, to):
+            if not fro:
+                return
+            p = Paragraph(indent=indent, *fro)
+            p.parent = self.parent
+            to.append(p.text())
+        for child in self.children:
+            if isinstance(child, AbstractText):
+                buffer.append(child)
+            else:
+                if buffer:
+                    render_buffer(buffer, txt)
+                    buffer = []
+                txt.append(child.text())
+
+        render_buffer(buffer, txt)
+        return txt
+
+    def get_indent_depth(self):
+        depth = 0
+        current = self
+        while (current.parent is not None and
+                isinstance(current.parent, ListItem)):
+            depth += 1
+            current = current.parent
+        return depth
+
+class OrderedListItem(ListItem):
+    item_chars = ["#."] * 5
+
+class DListItem(ListItem):
+    item_chars = None
+    def __init__(self, term, definition, *args, **kwargs):
+        self.term = term
+        super(DListItem, self).__init__(definition, *args, **kwargs)
+
+    def text(self):
+        idepth = self.get_indent_depth()
+        indent = self.indent + (idepth + 1) * '  '
+        txt = '\n\n'.join(self.render_children(indent))
+        ret = []
+        ret += [indent[2:], self.term, '\n', txt]
+        return ''.join(ret)
+
+class Link(AbstractText):
+    start = '`'
+    end = '`_'
+
+    def __init__(self, _text, target):
+        self._text = _text
+        self.target = target
+        self.rest = None
+    
+    def text(self):
+        if self.rest is None:
+            self.rest = self.find_rest()
+        if self.rest.links.get(self._text, self.target) != self.target:
+            raise ValueError('link name %r already in use for a different '
+                             'target' % (self.target,))
+        self.rest.links[self._text] = self.target
+        return AbstractText.text(self)
+
+    def find_rest(self):
+        # XXX little overkill, but who cares...
+        next = self
+        while next.parent is not None:
+            next = next.parent
+        return next
+
+class InternalLink(AbstractText):
+    start = '`'
+    end = '`_'
+    
+class LinkTarget(Paragraph):
+    def __init__(self, name, target):
+        self.name = name
+        self.target = target
+    
+    def text(self):
+        return ".. _`%s`:%s\n" % (self.name, self.target)
+
+class Substitution(AbstractText):
+    def __init__(self, text, **kwargs):
+        raise NotImplemented('XXX')
+
+class Directive(Paragraph):
+    indent = '   '
+    def __init__(self, name, *args, **options):
+        self.name = name
+        self.content = options.pop('content', [])
+        children = list(args)
+        super(Directive, self).__init__(*children)
+        self.options = options
+        
+    def text(self):
+        # XXX not very pretty...
+        namechunksize = len(self.name) + 2
+        self.children.insert(0, Text('X' * namechunksize))
+        txt = super(Directive, self).text()
+        txt = '.. %s::%s' % (self.name, txt[namechunksize + 3:],)
+        options = '\n'.join(['   :%s: %s' % (k, v) for (k, v) in
+                             self.options.iteritems()])
+        if options:
+            txt += '\n%s' % (options,)
+
+        if self.content:
+            txt += '\n'
+            for item in self.content:
+                assert item.parentclass == Rest, 'only top-level items allowed'
+                assert not item.indent
+                item.indent = '   '
+                txt += '\n' + item.text()
+        
+        return txt
+

Modified: pypy/trunk/pypy/tool/statistic_over_time.py
==============================================================================
--- pypy/trunk/pypy/tool/statistic_over_time.py	(original)
+++ pypy/trunk/pypy/tool/statistic_over_time.py	Wed Nov 11 18:54:49 2009
@@ -1,5 +1,5 @@
 import py
-from py.__.misc.cmdline.countloc import get_loccount
+from py.impl.misc.cmdline.countloc import get_loccount
 import datetime
 import time
 

Modified: pypy/trunk/pypy/tool/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/tool/test/autopath.py	(original)
+++ pypy/trunk/pypy/tool/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/tool/test/test_conftest1.py
==============================================================================
--- pypy/trunk/pypy/tool/test/test_conftest1.py	(original)
+++ pypy/trunk/pypy/tool/test/test_conftest1.py	Wed Nov 11 18:54:49 2009
@@ -1,7 +1,7 @@
 
 import py
 
-innertest = py.magic.autopath().dirpath('conftest1_innertest.py')
+innertest = py.path.local(__file__).dirpath('conftest1_innertest.py')
 pytest_plugins = "pytest_pytester"
 
 class TestPyPyTests:

Modified: pypy/trunk/pypy/tool/test/test_pytestsupport.py
==============================================================================
--- pypy/trunk/pypy/tool/test/test_pytestsupport.py	(original)
+++ pypy/trunk/pypy/tool/test/test_pytestsupport.py	Wed Nov 11 18:54:49 2009
@@ -1,5 +1,5 @@
 import autopath
-from py.__.magic import exprinfo
+from py.impl.code import _assertionold as exprinfo
 from pypy.interpreter.error import OperationError
 from pypy.interpreter.gateway import app2interp_temp
 from pypy.interpreter.argument import Arguments

Modified: pypy/trunk/pypy/tool/udir.py
==============================================================================
--- pypy/trunk/pypy/tool/udir.py	(original)
+++ pypy/trunk/pypy/tool/udir.py	Wed Nov 11 18:54:49 2009
@@ -35,7 +35,7 @@
         dir = local(dir)
     if basename is None:
         try:
-            p = py.magic.autopath().dirpath()
+            p = py.path.local(__file__).dirpath()
             basename = svn_info(py.path.svnwc(p).info().url)
         except:
             basename = ''

Modified: pypy/trunk/pypy/translator/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/autopath.py	(original)
+++ pypy/trunk/pypy/translator/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/benchmark/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/benchmark/autopath.py	(original)
+++ pypy/trunk/pypy/translator/benchmark/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/benchmark/benchmarks.py
==============================================================================
--- pypy/trunk/pypy/translator/benchmark/benchmarks.py	(original)
+++ pypy/trunk/pypy/translator/benchmark/benchmarks.py	Wed Nov 11 18:54:49 2009
@@ -34,7 +34,7 @@
 
 def external_dependency(dirname, svnurl, revision):
     """Check out (if necessary) a given fixed revision of a svn url."""
-    dirpath = py.magic.autopath().dirpath().join(dirname)
+    dirpath = py.path.local(__file__).dirpath().join(dirname)
     revtag = dirpath.join('-svn-rev-')
     if dirpath.check():
         if not revtag.check() or int(revtag.read()) != revision:
@@ -70,13 +70,13 @@
     return get_result(txt, PYSTONE_PATTERN)
 
 def run_richards(executable='/usr/local/bin/python', n=5):
-    richards = py.magic.autopath().dirpath().dirpath().join('goal').join('richards.py')
+    richards = py.path.local(__file__).dirpath().dirpath().join('goal').join('richards.py')
     txt = run_cmd('"%s" %s %s' % (executable, richards, n))
     return get_result(txt, RICHARDS_PATTERN)
 
 def run_translate(executable='/usr/local/bin/python'):
-    translate = py.magic.autopath().dirpath().dirpath().join('goal').join('translate.py')
-    target = py.magic.autopath().dirpath().dirpath().join('goal').join('targetrpystonedalone.py')
+    translate = py.path.local(__file__).dirpath().dirpath().join('goal').join('translate.py')
+    target = py.path.local(__file__).dirpath().dirpath().join('goal').join('targetrpystonedalone.py')
     argstr = '%s %s --batch --backendopt --no-compile %s > /dev/null 2> /dev/null'
     T = time.time()
     status = os.system(argstr%(executable, translate, target))
@@ -87,7 +87,7 @@
 
 def run_docutils(executable='/usr/local/bin/python'):
     docutilssvnpath = 'docutils'    # subdir of the local dir
-    translatetxt = py.magic.autopath().dirpath().dirpath().dirpath().join('doc').join('translation.txt')
+    translatetxt = py.path.local(__file__).dirpath().dirpath().dirpath().join('doc').join('translation.txt')
     command = """import sys
 sys.modules['unicodedata'] = sys # docutils need 'import unicodedata' to work, but no more...
 sys.path[0:0] = ['%s', '%s/extras']
@@ -123,7 +123,7 @@
         templess is some simple templating language, to check out use
         'svn co -r100 http://johnnydebris.net/templess/trunk templess'
     """
-    here = py.magic.autopath().dirpath()
+    here = py.path.local(__file__).dirpath()
     pypath = os.path.dirname(os.path.dirname(py.__file__))
     templessdir = here.join('templess')
     testscript = templessdir.join('test/oneshot.py')
@@ -143,7 +143,7 @@
 
 def run_gadfly(executable='/usr/local/bin/python'):
     """ run some tests in the gadfly pure Python database """
-    here = py.magic.autopath().dirpath()
+    here = py.path.local(__file__).dirpath()
     gadfly = here.join('gadfly')
     testscript = gadfly.join('test', 'testsubset.py')
     command = 'PYTHONPATH="%s" "%s" "%s"' % (gadfly, executable, testscript)
@@ -167,7 +167,7 @@
 
 def run_mako(executable='/usr/local/bin/python'):
     """ run some tests in the mako templating system """
-    here = py.magic.autopath().dirpath()
+    here = py.path.local(__file__).dirpath()
     mako = here.join('mako')
     testscript = mako.join('examples', 'bench', 'basic.py')
     command = 'PYTHONPATH="%s" "%s" "%s" mako' % (mako.join('lib'),

Modified: pypy/trunk/pypy/translator/benchmark/jitbench.py
==============================================================================
--- pypy/trunk/pypy/translator/benchmark/jitbench.py	(original)
+++ pypy/trunk/pypy/translator/benchmark/jitbench.py	Wed Nov 11 18:54:49 2009
@@ -14,7 +14,7 @@
     response.read()
 
 def run_richards(executable='python'):
-    richards = str(py.magic.autopath().dirpath().dirpath().join('goal').join('richards.py'))
+    richards = str(py.path.local(__file__).dirpath().dirpath().join('goal').join('richards.py'))
     pipe = subprocess.Popen([executable, richards], stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
     return pipe.communicate()

Modified: pypy/trunk/pypy/translator/c/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/c/autopath.py	(original)
+++ pypy/trunk/pypy/translator/c/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/c/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/c/test/autopath.py	(original)
+++ pypy/trunk/pypy/translator/c/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/c/test/test_extfunc.py
==============================================================================
--- pypy/trunk/pypy/translator/c/test/test_extfunc.py	(original)
+++ pypy/trunk/pypy/translator/c/test/test_extfunc.py	Wed Nov 11 18:54:49 2009
@@ -141,7 +141,7 @@
     os.unlink(filename)
 
 def test_os_access():
-    filename = str(py.magic.autopath())
+    filename = str(py.path.local(__file__))
     def call_access(path, mode):
         return os.access(path, mode)
     f = compile(call_access, [str, int])
@@ -150,7 +150,7 @@
 
 
 def test_os_stat():
-    filename = str(py.magic.autopath())
+    filename = str(py.path.local(__file__))
     has_blksize = hasattr(os.stat_result, 'st_blksize')
     has_blocks = hasattr(os.stat_result, 'st_blocks')
     def call_stat():
@@ -189,7 +189,7 @@
 def test_os_fstat():
     if os.environ.get('PYPY_CC', '').startswith('tcc'):
         py.test.skip("segfault with tcc :-(")
-    filename = str(py.magic.autopath())
+    filename = str(py.path.local(__file__))
     def call_fstat():
         fd = os.open(filename, os.O_RDONLY, 0777)
         st = os.fstat(fd)

Modified: pypy/trunk/pypy/translator/cli/conftest.py
==============================================================================
--- pypy/trunk/pypy/translator/cli/conftest.py	(original)
+++ pypy/trunk/pypy/translator/cli/conftest.py	Wed Nov 11 18:54:49 2009
@@ -1,5 +1,5 @@
 def pytest_addoption(parser):
-    group = parser.addgroup("pypy-cli options")
+    group = parser.getgroup("pypy-cli options")
     group.addoption('--source', action="store_true", dest="source", default=False,
             help="only generate IL source, don't compile")
     group.addoption('--wd', action="store_true", dest="wd", default=False,

Modified: pypy/trunk/pypy/translator/cli/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/cli/test/autopath.py	(original)
+++ pypy/trunk/pypy/translator/cli/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/driver.py
==============================================================================
--- pypy/trunk/pypy/translator/driver.py	(original)
+++ pypy/trunk/pypy/translator/driver.py	Wed Nov 11 18:54:49 2009
@@ -9,7 +9,7 @@
 from pypy.annotation import model as annmodel
 from pypy.annotation.listdef import s_list_of_strings
 from pypy.annotation import policy as annpolicy
-from py.compat import optparse
+import optparse
 from pypy.tool.udir import udir
 from pypy.rlib.jit import DEBUG_OFF, DEBUG_DETAILED, DEBUG_PROFILE, DEBUG_STEPS
 

Modified: pypy/trunk/pypy/translator/goal/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/goal/autopath.py	(original)
+++ pypy/trunk/pypy/translator/goal/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/goal/targetgbfullprofiling.py
==============================================================================
--- pypy/trunk/pypy/translator/goal/targetgbfullprofiling.py	(original)
+++ pypy/trunk/pypy/translator/goal/targetgbfullprofiling.py	Wed Nov 11 18:54:49 2009
@@ -3,7 +3,7 @@
 from pypy.lang.gameboy.profiling.gameboy_profiling_implementation import GameBoyProfiler
 
 
-ROM_PATH = str(py.magic.autopath().dirpath().dirpath().dirpath())+"/lang/gameboy/rom"
+ROM_PATH = str(py.path.local(__file__).dirpath().dirpath().dirpath())+"/lang/gameboy/rom"
 
 def entry_point(argv=None):
     if argv is not None and len(argv) > 1:

Modified: pypy/trunk/pypy/translator/goal/targetgbimplementation.py
==============================================================================
--- pypy/trunk/pypy/translator/goal/targetgbimplementation.py	(original)
+++ pypy/trunk/pypy/translator/goal/targetgbimplementation.py	Wed Nov 11 18:54:49 2009
@@ -3,7 +3,7 @@
 from pypy.lang.gameboy.gameboy_implementation import GameBoyImplementation
 
 
-ROM_PATH = str(py.magic.autopath().dirpath().dirpath().dirpath())+"/lang/gameboy/rom"
+ROM_PATH = str(py.path.local(__file__).dirpath().dirpath().dirpath())+"/lang/gameboy/rom"
 
 print ROM_PATH
 

Modified: pypy/trunk/pypy/translator/goal/targetgbrom4.py
==============================================================================
--- pypy/trunk/pypy/translator/goal/targetgbrom4.py	(original)
+++ pypy/trunk/pypy/translator/goal/targetgbrom4.py	Wed Nov 11 18:54:49 2009
@@ -4,7 +4,7 @@
 from pypy.lang.gameboy.gameboy import GameBoy
 
 
-ROM_PATH = str(py.magic.autopath().dirpath().dirpath().dirpath())+"/lang/gameboy/rom"
+ROM_PATH = str(py.path.local(__file__).dirpath().dirpath().dirpath())+"/lang/gameboy/rom"
 EMULATION_CYCLES = 1<<24
 
 

Modified: pypy/trunk/pypy/translator/goal/targetpreimportedpypy.py
==============================================================================
--- pypy/trunk/pypy/translator/goal/targetpreimportedpypy.py	(original)
+++ pypy/trunk/pypy/translator/goal/targetpreimportedpypy.py	Wed Nov 11 18:54:49 2009
@@ -23,7 +23,7 @@
     "random",
 ]
 
-thisdir = py.magic.autopath().dirpath()
+thisdir = py.path.local(__file__).dirpath()
 
 try:
     this_dir = os.path.dirname(__file__)

Modified: pypy/trunk/pypy/translator/goal/targetpypystandalone.py
==============================================================================
--- pypy/trunk/pypy/translator/goal/targetpypystandalone.py	(original)
+++ pypy/trunk/pypy/translator/goal/targetpypystandalone.py	Wed Nov 11 18:54:49 2009
@@ -11,7 +11,7 @@
 from pypy.tool.option import make_objspace
 from pypy.translator.goal.nanos import setup_nanos
 
-thisdir = py.magic.autopath().dirpath()
+thisdir = py.path.local(__file__).dirpath()
 
 try:
     this_dir = os.path.dirname(__file__)

Modified: pypy/trunk/pypy/translator/goal/test2/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/goal/test2/autopath.py	(original)
+++ pypy/trunk/pypy/translator/goal/test2/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/goal/translate.py
==============================================================================
--- pypy/trunk/pypy/translator/goal/translate.py	(original)
+++ pypy/trunk/pypy/translator/goal/translate.py	Wed Nov 11 18:54:49 2009
@@ -84,7 +84,7 @@
 
 import py
 # we want 2.4 expand_default functionality
-optparse = py.compat.optparse
+import optparse
 from pypy.tool.ansi_print import ansi_log
 log = py.log.Producer("translation")
 py.log.setconsumer("translation", ansi_log)

Modified: pypy/trunk/pypy/translator/interactive.py
==============================================================================
--- pypy/trunk/pypy/translator/interactive.py	(original)
+++ pypy/trunk/pypy/translator/interactive.py	Wed Nov 11 18:54:49 2009
@@ -1,4 +1,4 @@
-from py.compat import optparse
+import optparse
 
 import autopath
 from pypy.translator.translator import TranslationContext

Modified: pypy/trunk/pypy/translator/jvm/conftest.py
==============================================================================
--- pypy/trunk/pypy/translator/jvm/conftest.py	(original)
+++ pypy/trunk/pypy/translator/jvm/conftest.py	Wed Nov 11 18:54:49 2009
@@ -1,6 +1,6 @@
 
 def pytest_addoption(parser):
-    group = parser.addgroup("pypy-jvm options")
+    group = parser.getgroup("pypy-jvm options")
     group.addoption('--java', action='store', dest='java', default='java',
             help='Define the java executable to use')
     group.addoption('--javac', action='store', dest='javac',

Modified: pypy/trunk/pypy/translator/jvm/genjvm.py
==============================================================================
--- pypy/trunk/pypy/translator/jvm/genjvm.py	(original)
+++ pypy/trunk/pypy/translator/jvm/genjvm.py	Wed Nov 11 18:54:49 2009
@@ -83,7 +83,7 @@
         self.jasmin_files = None
         
         # Determine various paths:
-        self.thisdir = py.magic.autopath().dirpath()
+        self.thisdir = py.path.local(__file__).dirpath()
         self.rootdir = self.thisdir.join('src')
         self.srcdir = self.rootdir.join('pypy')
         self.jnajar = self.rootdir.join('jna.jar')

Modified: pypy/trunk/pypy/translator/microbench/pybench/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/microbench/pybench/autopath.py	(original)
+++ pypy/trunk/pypy/translator/microbench/pybench/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/platform/__init__.py
==============================================================================
--- pypy/trunk/pypy/translator/platform/__init__.py	(original)
+++ pypy/trunk/pypy/translator/platform/__init__.py	Wed Nov 11 18:54:49 2009
@@ -6,7 +6,7 @@
 import sys, py, os
 
 from pypy.tool.ansi_print import ansi_log
-from py.__.code import safe_repr
+from py.impl.code.code import safe_repr
 log = py.log.Producer("platform")
 py.log.setconsumer("platform", ansi_log)
 
@@ -33,9 +33,9 @@
 
     def __repr__(self):
         if self.err:
-            return "<CompilationError err=%s>" % safe_repr._repr(self.err)
+            return "<CompilationError err=%s>" % safe_repr(self.err)
         else:
-            return "<CompilationError out=%s>" % safe_repr._repr(self.out)
+            return "<CompilationError out=%s>" % safe_repr(self.out)
 
     __str__ = __repr__
 

Modified: pypy/trunk/pypy/translator/platform/test/test_darwin.py
==============================================================================
--- pypy/trunk/pypy/translator/platform/test/test_darwin.py	(original)
+++ pypy/trunk/pypy/translator/platform/test/test_darwin.py	Wed Nov 11 18:54:49 2009
@@ -32,7 +32,7 @@
             return 0;
         }
         ''')
-        includedir = py.magic.autopath().dirpath().join('include')
+        includedir = py.path.local(__file__).dirpath().join('include')
         eci = ExternalCompilationInfo(frameworks=('Cocoa',),
                                       include_dirs=(includedir,))
         executable = self.platform.compile([objcfile], eci)

Modified: pypy/trunk/pypy/translator/platform/test/test_maemo.py
==============================================================================
--- pypy/trunk/pypy/translator/platform/test/test_maemo.py	(original)
+++ pypy/trunk/pypy/translator/platform/test/test_maemo.py	Wed Nov 11 18:54:49 2009
@@ -26,7 +26,7 @@
             return 0;
         }
         ''')
-        includedir = py.magic.autopath().dirpath().join('include')
+        includedir = py.path.local(__file__).dirpath().join('include')
         eci = ExternalCompilationInfo(include_dirs=(includedir,))
         executable = self.platform.compile([cfile], eci)
         res = self.platform.execute(executable)

Modified: pypy/trunk/pypy/translator/sandbox/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/sandbox/autopath.py	(original)
+++ pypy/trunk/pypy/translator/sandbox/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/sandbox/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/sandbox/test/autopath.py	(original)
+++ pypy/trunk/pypy/translator/sandbox/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/test/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/test/autopath.py	(original)
+++ pypy/trunk/pypy/translator/test/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break

Modified: pypy/trunk/pypy/translator/test/test_driver.py
==============================================================================
--- pypy/trunk/pypy/translator/test/test_driver.py	(original)
+++ pypy/trunk/pypy/translator/test/test_driver.py	Wed Nov 11 18:54:49 2009
@@ -1,7 +1,7 @@
 import py
 
 from pypy.translator.driver import TranslationDriver
-from py.compat import optparse
+import optparse
 
 def test_ctr():
     td = TranslationDriver()

Modified: pypy/trunk/pypy/translator/tool/autopath.py
==============================================================================
--- pypy/trunk/pypy/translator/tool/autopath.py	(original)
+++ pypy/trunk/pypy/translator/tool/autopath.py	Wed Nov 11 18:54:49 2009
@@ -37,8 +37,7 @@
         partdir = head
         head, tail = os.path.split(head)
         if tail == part:
-            # check if "../py/__init__.py" exists
-            checkfile = os.path.join(partdir, os.pardir, 'py', '__init__.py')
+            checkfile = os.path.join(partdir, os.pardir, 'pypy', '__init__.py')
             if not os.path.exists(checkfile):
                 error = "Cannot find %r" % (os.path.normpath(checkfile),)
             break



More information about the Pypy-commit mailing list